lang
stringclasses
3 values
file_path
stringlengths
5
150
repo_name
stringlengths
6
110
commit
stringlengths
40
40
file_code
stringlengths
1.52k
18.9k
prefix
stringlengths
82
16.5k
suffix
stringlengths
0
15.1k
middle
stringlengths
121
8.18k
strategy
stringclasses
8 values
context_items
listlengths
0
100
Rust
src/bin/main.rs
DanielJoyce/websocket-serial-server
27eee51e364d8852ad4f995272a30241d2ac6ab9
#[macro_use] extern crate log; use std::io::Write; use std::net::TcpStream; use std::sync::mpsc::{channel, Sender}; use std::thread; use hyper::net::Fresh; use hyper::server::request::Request; use hyper::server::response::Response; use hyper::Server as HttpServer; use rand::{thread_rng, Rng}; use websocket::client::Writer; use websocket::message::Type; use websocket::result::WebSocketError; use websocket::server::upgrade::WsUpgrade; use websocket::{Message, Server}; use lib::cfg::*; use lib::dynamic_sleep::DynamicSleep; use lib::errors as e; use lib::manager::Manager; use lib::messages::*; pub const MAX_SEND_ERROR_COUNT: u32 = 5; pub fn main() { env_logger::init().expect("Initialization of logging system failed!"); let cfg = WsssConfig::load(); let websocket_html = include_str!("websockets.html").replace( "__WS_PORT__ = 8081", &format!("__WS_PORT__ = {}", cfg.ws_port), ); let http_handler = move |_: Request, response: Response<Fresh>| { let mut response = response.start().expect(&"Could not start response"); response .write_all(websocket_html.as_bytes()) .expect(&"Could not get template as bytes"); response.end().expect(&"Send response failed"); }; info!("Using ports {} {}", cfg.http_port, cfg.ws_port); let (sub_tx, sub_rx) = channel::<SubscriptionRequest>(); let (sreq_tx, sreq_rx) = channel::<(String, SerialRequest)>(); Manager::spawn(sreq_rx, sub_rx); let http_server = HttpServer::http(format!("{}:{}", cfg.bind_address, cfg.http_port)).expect( &format!("Failed to create http server on port {}", cfg.http_port), ); thread::spawn(move || { http_server.handle(http_handler).expect(&"Failed to listen"); }); let ws_server = Server::bind(format!("{}:{}", cfg.bind_address, cfg.ws_port)) .expect(&format!("Failed bind on websocket port {}", cfg.ws_port)); for connection in ws_server.filter_map(Result::ok) { let prefix: String = thread_rng().gen_ascii_chars().take(8).collect(); let sub_id = format!("thread-{}-{}", prefix, rand::random::<u16>()); debug!("{}: spawned.", sub_id); let sub_tx_clone = sub_tx.clone(); let sreq_tx_clone = sreq_tx.clone(); spawn_ws_handler(sub_id, sub_tx_clone, sreq_tx_clone, connection); } } fn spawn_ws_handler( sub_id: String, sub_tx_clone: Sender<SubscriptionRequest>, sreq_tx_clone: Sender<(String, SerialRequest)>, connection: WsUpgrade<TcpStream>, ) { thread::spawn(move || ws_handler(sub_id, &sub_tx_clone, &sreq_tx_clone, connection)); } fn ws_handler( sub_id: String, sub_tx: &Sender<SubscriptionRequest>, sreq_tx: &Sender<(String, SerialRequest)>, connection: WsUpgrade<TcpStream>, ) { if !connection .protocols() .contains(&"websocket-serial-json".to_string()) { connection.reject().expect(&"Connection rejection failed."); return; } connection .tcp_stream() .set_nonblocking(true) .expect(&"Setting stream non-blocking failed."); let (sub_resp_tx, sub_resp_rx) = channel::<SerialResponse>(); sub_tx .send(SubscriptionRequest { sub_id: sub_id.clone(), subscriber: sub_resp_tx, }) .expect(&format!("{}: Registering with manager failed.", sub_id)); let client = connection .use_protocol(format!("websocket-serial-json")) .accept() .expect(&format!("{}: Accept protocol failed.", sub_id)); let ip = client .peer_addr() .expect(&format!("{}: Could not get peer address", sub_id)); info!("{}: Connection from {}", sub_id, ip); let (mut receiver, mut sender) = client .split() .expect(&format!("{}: WS client error", sub_id)); let mut send_error_count = 0; let mut dynamic_sleep = DynamicSleep::new("main"); 'msg_loop: loop { dynamic_sleep.sleep(); match receiver.recv_message::<Message, _, _>() { Ok(message) => { match message.opcode { Type::Close => { let message = Message::close(); sender .send_message(&message) .unwrap_or(info!("{}: Client {} hung up!", sub_id, ip)); sreq_tx .send((sub_id.clone(), SerialRequest::Close { port: None })) .unwrap_or_else(|e| { warn!( "Client exit cleanup failed for sub_id '{}', cause '{}'", sub_id, e ) }); info!("{}: Client {} disconnected", sub_id, ip); break 'msg_loop; } Type::Ping => { let message = Message::pong(message.payload); sender .send_message(&message) .unwrap_or(info!("{}: Could not ping client {}!", sub_id, ip)); } _ => { let msg = String::from_utf8_lossy(&message.payload); match serde_json::from_str(&msg) { Ok(req) => { match sreq_tx.send((sub_id.clone(), req)) { Err(err) => { let error = e::ErrorKind::SendRequest(err).into(); send_serial_response_error(&sub_id, &mut sender, error); } _ => {} }; } Err(err) => { let error = e::ErrorKind::Json(err).into(); send_serial_response_error(&sub_id, &mut sender, error); } }; } }; } Err(e) => { match e { WebSocketError::NoDataAvailable => { /*Logging?*/ } _ => { /*Logging?*/ } }; } } match sub_resp_rx.try_recv() { Ok(resp) => match serde_json::to_string(&resp) { Ok(json) => { let reply = Message::text(json.clone()); sender.send_message(&reply).unwrap_or_else(|e| { send_error_count += 1; info!( "{}: Could not send message '{}' to client '{}', cause '{}'", sub_id, json, ip, e ) }); } Err(_) => {} }, _ => { /*Logging*/ } }; if send_error_count > MAX_SEND_ERROR_COUNT { warn!( "{}: Client send error count exceeded! Shutting down msg loop.", &sub_id ); break 'msg_loop; } } info!("{}: Shutting down!", sub_id); } fn send_serial_response_error(sub_id: &String, sender: &mut Writer<TcpStream>, error: e::Error) { let error = e::to_serial_response_error(error); serde_json::to_string(&error) .map_err(|err| e::ErrorKind::Json(err)) .map(|json| Message::text(json)) .map(|msg| { sender .send_message::<Message, _>(&msg) .map_err::<e::Error, _>(|err| e::ErrorKind::SendWsMessage(err).into()) }) .unwrap_or_else(|_| { warn!("{}: Problem sending bad json error response", sub_id); Ok(()) }) .is_ok(); }
#[macro_use] extern crate log; use std::io::Write; use std::net::TcpStream; use std::sync::mpsc::{channel, Sender};
pStream>, ) { thread::spawn(move || ws_handler(sub_id, &sub_tx_clone, &sreq_tx_clone, connection)); } fn ws_handler( sub_id: String, sub_tx: &Sender<SubscriptionRequest>, sreq_tx: &Sender<(String, SerialRequest)>, connection: WsUpgrade<TcpStream>, ) { if !connection .protocols() .contains(&"websocket-serial-json".to_string()) { connection.reject().expect(&"Connection rejection failed."); return; } connection .tcp_stream() .set_nonblocking(true) .expect(&"Setting stream non-blocking failed."); let (sub_resp_tx, sub_resp_rx) = channel::<SerialResponse>(); sub_tx .send(SubscriptionRequest { sub_id: sub_id.clone(), subscriber: sub_resp_tx, }) .expect(&format!("{}: Registering with manager failed.", sub_id)); let client = connection .use_protocol(format!("websocket-serial-json")) .accept() .expect(&format!("{}: Accept protocol failed.", sub_id)); let ip = client .peer_addr() .expect(&format!("{}: Could not get peer address", sub_id)); info!("{}: Connection from {}", sub_id, ip); let (mut receiver, mut sender) = client .split() .expect(&format!("{}: WS client error", sub_id)); let mut send_error_count = 0; let mut dynamic_sleep = DynamicSleep::new("main"); 'msg_loop: loop { dynamic_sleep.sleep(); match receiver.recv_message::<Message, _, _>() { Ok(message) => { match message.opcode { Type::Close => { let message = Message::close(); sender .send_message(&message) .unwrap_or(info!("{}: Client {} hung up!", sub_id, ip)); sreq_tx .send((sub_id.clone(), SerialRequest::Close { port: None })) .unwrap_or_else(|e| { warn!( "Client exit cleanup failed for sub_id '{}', cause '{}'", sub_id, e ) }); info!("{}: Client {} disconnected", sub_id, ip); break 'msg_loop; } Type::Ping => { let message = Message::pong(message.payload); sender .send_message(&message) .unwrap_or(info!("{}: Could not ping client {}!", sub_id, ip)); } _ => { let msg = String::from_utf8_lossy(&message.payload); match serde_json::from_str(&msg) { Ok(req) => { match sreq_tx.send((sub_id.clone(), req)) { Err(err) => { let error = e::ErrorKind::SendRequest(err).into(); send_serial_response_error(&sub_id, &mut sender, error); } _ => {} }; } Err(err) => { let error = e::ErrorKind::Json(err).into(); send_serial_response_error(&sub_id, &mut sender, error); } }; } }; } Err(e) => { match e { WebSocketError::NoDataAvailable => { /*Logging?*/ } _ => { /*Logging?*/ } }; } } match sub_resp_rx.try_recv() { Ok(resp) => match serde_json::to_string(&resp) { Ok(json) => { let reply = Message::text(json.clone()); sender.send_message(&reply).unwrap_or_else(|e| { send_error_count += 1; info!( "{}: Could not send message '{}' to client '{}', cause '{}'", sub_id, json, ip, e ) }); } Err(_) => {} }, _ => { /*Logging*/ } }; if send_error_count > MAX_SEND_ERROR_COUNT { warn!( "{}: Client send error count exceeded! Shutting down msg loop.", &sub_id ); break 'msg_loop; } } info!("{}: Shutting down!", sub_id); } fn send_serial_response_error(sub_id: &String, sender: &mut Writer<TcpStream>, error: e::Error) { let error = e::to_serial_response_error(error); serde_json::to_string(&error) .map_err(|err| e::ErrorKind::Json(err)) .map(|json| Message::text(json)) .map(|msg| { sender .send_message::<Message, _>(&msg) .map_err::<e::Error, _>(|err| e::ErrorKind::SendWsMessage(err).into()) }) .unwrap_or_else(|_| { warn!("{}: Problem sending bad json error response", sub_id); Ok(()) }) .is_ok(); }
use std::thread; use hyper::net::Fresh; use hyper::server::request::Request; use hyper::server::response::Response; use hyper::Server as HttpServer; use rand::{thread_rng, Rng}; use websocket::client::Writer; use websocket::message::Type; use websocket::result::WebSocketError; use websocket::server::upgrade::WsUpgrade; use websocket::{Message, Server}; use lib::cfg::*; use lib::dynamic_sleep::DynamicSleep; use lib::errors as e; use lib::manager::Manager; use lib::messages::*; pub const MAX_SEND_ERROR_COUNT: u32 = 5; pub fn main() { env_logger::init().expect("Initialization of logging system failed!"); let cfg = WsssConfig::load(); let websocket_html = include_str!("websockets.html").replace( "__WS_PORT__ = 8081", &format!("__WS_PORT__ = {}", cfg.ws_port), ); let http_handler = move |_: Request, response: Response<Fresh>| { let mut response = response.start().expect(&"Could not start response"); response .write_all(websocket_html.as_bytes()) .expect(&"Could not get template as bytes"); response.end().expect(&"Send response failed"); }; info!("Using ports {} {}", cfg.http_port, cfg.ws_port); let (sub_tx, sub_rx) = channel::<SubscriptionRequest>(); let (sreq_tx, sreq_rx) = channel::<(String, SerialRequest)>(); Manager::spawn(sreq_rx, sub_rx); let http_server = HttpServer::http(format!("{}:{}", cfg.bind_address, cfg.http_port)).expect( &format!("Failed to create http server on port {}", cfg.http_port), ); thread::spawn(move || { http_server.handle(http_handler).expect(&"Failed to listen"); }); let ws_server = Server::bind(format!("{}:{}", cfg.bind_address, cfg.ws_port)) .expect(&format!("Failed bind on websocket port {}", cfg.ws_port)); for connection in ws_server.filter_map(Result::ok) { let prefix: String = thread_rng().gen_ascii_chars().take(8).collect(); let sub_id = format!("thread-{}-{}", prefix, rand::random::<u16>()); debug!("{}: spawned.", sub_id); let sub_tx_clone = sub_tx.clone(); let sreq_tx_clone = sreq_tx.clone(); spawn_ws_handler(sub_id, sub_tx_clone, sreq_tx_clone, connection); } } fn spawn_ws_handler( sub_id: String, sub_tx_clone: Sender<SubscriptionRequest>, sreq_tx_clone: Sender<(String, SerialRequest)>, connection: WsUpgrade<Tc
random
[ { "content": "//! The serial support library contains all\n\n//! the functionality to read ports, and send data\n\n//! between threads reading serial port data\n\n//! and threads handling websocket requests\n\n\n\n#![recursion_limit = \"1024\"]\n\n#![allow(dead_code)]\n\n#![allow(unused_variables)]\n\nextern crate argparse;\n\n#[macro_use]\n\nextern crate error_chain;\n\n#[macro_use]\n\nextern crate log;\n\n#[macro_use]\n\nextern crate serde_derive;\n\n\n\npub mod cfg;\n\npub mod common;\n\npub mod dynamic_sleep;\n\npub mod errors;\n\npub mod manager;\n\npub mod messages;\n\npub mod port_manager;\n\npub mod sub_manager;\n\npub mod writelock_manager;\n", "file_path": "src/lib/mod.rs", "rank": 0, "score": 11.072375174273073 }, { "content": "use std::collections::{HashMap, HashSet};\n\nuse std::sync::mpsc::Sender;\n\n\n\nuse crate::errors::*;\n\nuse crate::messages::*;\n\n\n\n/// Subscription\n", "file_path": "src/lib/sub_manager.rs", "rank": 1, "score": 10.190582766913122 }, { "content": "use std::sync::mpsc::Receiver;\n\n\n\nuse crate::messages::SubscriptionRequest;\n\n\n\n/// Convenience type for a listener\n\n/// that accepts weak refs of Senders of Serial Reponses\n\n/// This is how the manager will communicate\n\n/// results back to the websockets\n\npub type SubscReceiver = Receiver<SubscriptionRequest>;\n", "file_path": "src/lib/common.rs", "rank": 2, "score": 8.161247833747613 }, { "content": "//! Manages serial port state and communication with clients,\n\n//! and handling requests / responses\n\n\n\nuse std::collections::HashSet;\n\nuse std::sync::mpsc::{Receiver, TryRecvError};\n\nuse std::thread;\n\n\n\nuse base64;\n\n\n\nuse crate::common::*;\n\nuse crate::dynamic_sleep::DynamicSleep;\n\nuse crate::errors::*;\n\nuse crate::messages::*;\n\nuse crate::port_manager::*;\n\nuse crate::sub_manager::*;\n\nuse crate::writelock_manager::*;\n\n\n\n/// Serial port management module supporting one\n\n/// writer and multiple readers\n\n///\n", "file_path": "src/lib/manager.rs", "rank": 3, "score": 7.814761359548802 }, { "content": "//! The dynamic sleep module implements a timer\n\n//! to try and maintain a given timining interval\n\n//! that adheres as close as possible to a\n\n//! specified update rate in a loop\n\n\n\nuse std::thread;\n\nuse std::time::{Duration, Instant};\n\n\n\nuse log::LogLevel::Warn;\n\n\n\n/// Dynamic Sleep\n\n/// The dynamic sleep struct\n\n/// provides a sleep method that\n\n/// does its best to allow a loop\n\n/// to run with the specified frequency\n\npub struct DynamicSleep {\n\n /// Tag, used for logging\n\n tag: String,\n\n /// Update frequency in Hz\n\n _freq: u32,\n", "file_path": "src/lib/dynamic_sleep.rs", "rank": 4, "score": 6.619675298294488 }, { "content": "use std::collections::{HashMap, HashSet};\n\nuse std::error::Error;\n\nuse std::iter::FromIterator;\n\nuse std::time::Duration;\n\n\n\nuse serialport as sp;\n\n\n\nuse crate::errors::*;\n\n\n\n/// Struct for containing Port information\n", "file_path": "src/lib/port_manager.rs", "rank": 5, "score": 6.471837320777211 }, { "content": "//! This module contains all of the enums used\n\n//! to represent messages that can be send by clients\n\n//! ( SerialRequest::* ) and their responses by\n\n//! by the server ( SerialReponse::* )\n\n\n\nuse serde_json;\n\n\n\n// TODO: use when new version drops\n\n//use serialport::{SerialPortInfo,UsbPortInfo,SerialPortType};\n\n\n\nuse std::fmt;\n\nuse std::sync::mpsc::Sender;\n\n\n\n#[derive(Clone, Debug)]\n\npub struct SubscriptionRequest {\n\n pub sub_id: String,\n\n pub subscriber: Sender<SerialResponse>,\n\n}\n\n\n\n/// Represents the valid json requests that can be made\n", "file_path": "src/lib/messages.rs", "rank": 6, "score": 6.361853067708806 }, { "content": "#### Linux\n\n\n\n1. `sudo apt-get install libudev-dev`\n\n1. `sudo apt-get install libssl-dev`\n\n1. `sudo apt-get install pkg-config`\n\n\n\n#### Windows\n\n\n\nUnknown, help appreciated.\n\n\n\n#### OSX\n\n\n\nUnknown, help appreciated.\n\n\n\n\n\n### Vscode-Rust setup\n\n\n\nIf you are using the vscode rust plugin, here is an example of\n\nthe settings I am using\n\n\n\nThe rust.rls is the most important one.\n\n\n\n``` json\n\n// Place your settings in this file to overwrite the default settings\n\n{\n\n \"rust.rls\": {\n\n \"executable\": \"rustup\",\n\n \"args\": [\n\n \"run\",\n\n \"nightly\",\n\n \"rls\"\n\n ]\n\n },\n\n //\"editor.formatOnSave\": true,\n\n //\"editor.fontFamily\": \"Fira Code\"\n\n}\n\n```\n\n\n\n### Running\n\n\n\nbuild:\n\n\n\n`cargo build` or `cargo build --release`\n\n\n\nrun:\n\n\n\n`./target/debug/wsss`, or if built with `--release`, `./target/release/wsss`\n\n\n\nuse `-p` or `--port` to set http port (default is 8080)\n\n\n\nthen browse to `http://localhost:PORT` to find the test page.\n\n\n\n### Logging\n\n\n\nWsss makes extensive use of logging and the [env_logger](https://crates.io/crates/env_logger) crate\n\n \n\nPlease read the env_logger docs for more information, but here is a quick example:\n\n\n\n```RUST_LOG=debug ./target/debug/wsss```\n\n\n\n## TODO\n\n\n\n* [ ] Break this out into bugs/features :)\n\n* [ ] TLS Support\n\n* [ ] Determine settings to help shrink file size\n\n* [ ] Add command to reset entire serial port managment subsystem\n\nif it looks like things are wedged\n\n* [x] Switch to dynamic timing loops for all msg handling threads\n\n * [ ] Allow users to specify desired update frequency\n\n * [x] Log if time per loop is exceeded\n\n* [x] Configuration file support\n\n * [x] Use [toml](https://github.com/toml-lang/toml)\n\n * [ ] serial port whitelist/blacklist/regex\n\n * [x] Specify ip address to bind to besides local host\n\n* [ ] Add HTTPS/WSS support\n\n * [ ] Specify cert locations\n\n* [ ] Add method to reinitialize serial port subsystem if things\n\ntotally go south\n\n* [x] Remove sub_id from SerialRequest and send it as tuple\n\nwith sub_id to handler method\n\n* [ ] Reduce the usage of String in favor of &str?\n\n* [ ] \"Wrote\" response message, should we return a hash of the data that was written so integrity can be verified?\n", "file_path": "README.md", "rank": 7, "score": 6.341934656564506 }, { "content": "//! Loads and centralizes configuration from\n\n//! config files, env, and command line\n\n//!\n\n//! For information on command line switches, config files,\n\n//! or env names, check the documentation for [WsssConfig](struct.WsssConfig.html)\n\n\n\nuse std::convert::Into;\n\nuse std::default::Default;\n\nuse std::env;\n\nuse std::fs::File;\n\nuse std::io::prelude::*;\n\nuse std::net::Ipv4Addr;\n\nuse std::str::FromStr;\n\n\n\nuse argparse::{ArgumentParser, StoreOption};\n\nuse toml;\n\n\n\nuse crate::errors::*;\n\n\n\n/// Default HTTP port to bind to if none given\n", "file_path": "src/lib/cfg.rs", "rank": 8, "score": 5.723258428548261 }, { "content": " /// Period in microseconds\n\n _period_nanos: u32,\n\n /// Last instand\n\n last_instant: Option<Instant>,\n\n /// How many cycles\n\n cycles: u32,\n\n /// How many slips / second\n\n slips: u32,\n\n}\n\n\n\nimpl DynamicSleep {\n\n /// Create a new instance with default frequency of 30hz.\n\n /// If logging is enabled, tag is used to mark the log messages\n\n pub fn new<S>(tag: S) -> DynamicSleep\n\n where\n\n S: Into<String>,\n\n {\n\n let hz = 30;\n\n DynamicSleep {\n\n tag: tag.into(),\n", "file_path": "src/lib/dynamic_sleep.rs", "rank": 9, "score": 4.904592708124534 }, { "content": " _freq: hz,\n\n _period_nanos: hz_to_nanos(hz),\n\n last_instant: None,\n\n cycles: 0,\n\n slips: 0,\n\n }\n\n }\n\n\n\n /// Create a new instance with a custom frequency.\n\n /// If logging is enabled, tag is used to mark the log messages\n\n pub fn with_freq<S>(tag: S, freq_in_hz: u32) -> DynamicSleep\n\n where\n\n S: Into<String>,\n\n {\n\n DynamicSleep {\n\n tag: tag.into(),\n\n _freq: freq_in_hz,\n\n _period_nanos: hz_to_nanos(freq_in_hz),\n\n last_instant: None,\n\n cycles: 0,\n", "file_path": "src/lib/dynamic_sleep.rs", "rank": 10, "score": 4.806256274222189 }, { "content": "use std::collections::HashMap;\n\n\n\nuse crate::errors::*;\n\n\n\n/// Manages tracking of write locks\n\npub struct WriteLockManager {\n\n /// Map of port to subscription ids\n\n write_locks: HashMap<String, String>,\n\n}\n\n\n\nimpl WriteLockManager {\n\n /// Create a new WriteLockManager instance\n\n pub fn new() -> WriteLockManager {\n\n WriteLockManager {\n\n write_locks: HashMap::new(),\n\n }\n\n }\n\n\n\n /// Is the port write locked by the given sub_id\n\n pub fn is_port_write_locked_by(&self, port_name: &String, sub_id: &String) -> bool {\n", "file_path": "src/lib/writelock_manager.rs", "rank": 11, "score": 4.387082494711376 }, { "content": "mod tests {\n\n\n\n use std::io::Read;\n\n use std::io::Write;\n\n\n\n use serialport::posix::TTYPort;\n\n use serialport::SerialPort;\n\n\n\n use super::*;\n\n\n\n #[test]\n\n #[cfg(unix)]\n\n fn test_unix_serialports() {\n\n let (mut master, mut slave) = TTYPort::pair().expect(\"Failed to create pseudoterminal pair!\");\n\n\n\n slave\n\n .set_exclusive(false)\n\n .expect(\"Failed to set exclusive false\");\n\n\n\n let serial_msg = \"abcdefg\";\n", "file_path": "src/lib/port_manager.rs", "rank": 12, "score": 3.609547793898707 }, { "content": "use crate::messages::{SerialRequest, SerialResponse};\n\n\n\nerror_chain! {\n\n\n\n foreign_links {\n\n // Wrapped Format error\n\n Fmt(::std::fmt::Error);\n\n // Wrapped IO error\n\n Io(::std::io::Error) #[cfg(unix)];\n\n // Wrapped serial port error\n\n Serialport(::serialport::Error) #[cfg(unix)];\n\n // Wrapped Ut8 decode error\n\n Utf8(::std::string::FromUtf8Error);\n\n // Wrapped json error\n\n Json(::serde_json::error::Error);\n\n // Wrapped toml deserialization error\n\n TomlDeserialize(::toml::de::Error);\n\n // Wrapped toml serialization error\n\n TomlSerialize(::toml::ser::Error);\n\n // Wrapped sync send response error\n", "file_path": "src/lib/errors.rs", "rank": 13, "score": 3.3664514031074457 }, { "content": " use std::sync::mpsc::{channel, Receiver};\n\n\n\n use super::*;\n\n\n\n #[test]\n\n fn test_subscriptions() {\n\n fn should_get_msg(\n\n rcvr: &Receiver<SerialResponse>,\n\n serial_resp: &SerialResponse,\n\n fail_tag: &str,\n\n ) {\n\n if let Ok(resp) = rcvr.try_recv() {\n\n assert_eq!(\n\n resp,\n\n serial_resp.clone(),\n\n \"{} messages '{:?}' '{:?}' should be equal\",\n\n fail_tag,\n\n resp,\n\n serial_resp.clone()\n\n )\n", "file_path": "src/lib/sub_manager.rs", "rank": 14, "score": 2.927942206753235 }, { "content": " }\n\n }\n\n res\n\n }\n\n\n\n /// Get a list of ports that currently have subscriptions\n\n pub fn subscribed_ports(&mut self) -> HashSet<String> {\n\n let mut subscribed_ports = HashSet::<String>::new();\n\n for subs in self.subscriptions.values() {\n\n subscribed_ports.extend(subs.ports.clone());\n\n }\n\n subscribed_ports\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use std::collections::HashSet;\n\n use std::iter::FromIterator;\n", "file_path": "src/lib/sub_manager.rs", "rank": 15, "score": 2.893006562770835 }, { "content": " Some(last) => {\n\n self.cycles += 1;\n\n let now = Instant::now();\n\n let dur = now.duration_since(last);\n\n let subsec_nanos = dur.subsec_nanos();\n\n self.last_instant = Some(now);\n\n if dur.as_secs() > 0 || subsec_nanos > self._period_nanos {\n\n self.slips += 1;\n\n return;\n\n } else {\n\n thread::sleep(Duration::new(0, self._period_nanos - subsec_nanos));\n\n }\n\n if self.cycles == self._freq && self.slips > 0 {\n\n // If we have had slippage within the last second ( approx )\n\n // then we log it.\n\n if log_enabled!(Warn) {\n\n warn!(\"'{}' slipped {} times in last second\", self.tag, self.slips);\n\n }\n\n self.slips = 0;\n\n self.cycles = 0;\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\n// Convert frequency to nanos\n", "file_path": "src/lib/dynamic_sleep.rs", "rank": 16, "score": 2.770225852089368 }, { "content": " self\n\n .write_locks\n\n .insert(port_name.to_string(), sub_id.to_string());\n\n Ok(())\n\n }\n\n true => Err(ErrorKind::AlreadyWriteLocked(port_name.to_string()).into()),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_locking() {\n\n /// Utility method\n\n fn check_not_locked_by_anyone(\n\n wl_manager: &WriteLockManager,\n\n port: &String,\n", "file_path": "src/lib/writelock_manager.rs", "rank": 17, "score": 2.1868671047888664 }, { "content": " }\n\n }\n\n\n\n //Handle write requests\n\n let mut recv_count = 0;\n\n while recv_count < 50 {\n\n recv_count += 1;\n\n match self.subsc_receiver.try_recv() {\n\n Ok(sub_request) => self.sub_manager.add_subscription(sub_request),\n\n Err(e) => {\n\n match e {\n\n TryRecvError::Disconnected => {\n\n // Does this mean all senders have disconnected?\n\n // Or just one?\n\n debug!(\"Got disconnected when trying to get serial request\");\n\n }\n\n TryRecvError::Empty => break,\n\n }\n\n }\n\n }\n", "file_path": "src/lib/manager.rs", "rank": 18, "score": 2.167988353771298 }, { "content": "///\n\n/// On the server side, every client is associated with\n\n/// a unique subscription id which is used\n\n/// to associate a given connection with their\n\n/// operations\n\n///\n\n/// Requests that fail or can not be met will result\n\n/// in SerialResponse::Error responses\n\n#[derive(Serialize, Deserialize, Clone, Debug, PartialEq)]\n\npub enum SerialRequest {\n\n /// Open a port for reading\n\n ///\n\n /// Opening the same port more than once is\n\n /// okay and has no ill effects\n\n ///\n\n ///``` json\n\n /// JSON:\n\n /// {\"Open\":{\"port\":\"/dev/ttyUSB\"}}\n\n ///```\n\n Open { port: String },\n", "file_path": "src/lib/messages.rs", "rank": 19, "score": 2.005942471966609 }, { "content": "# Documentation\n\n\n\n## Configuration\n\n\n\nwsss supports configuration in several different ways, by commandline parameters, env variables, and config files\n\n\n\ncommandline options override env variables, which overrides file based config.\n\n\n\nCurrently the following values may be specified\n\n\n\n* `http_port` The HTTP port to bind to, defaults to 10080\n\n* `ws_port` The port the websocket listens on, defaults to 10081\n\n* `bind_address` The ip address the server binds to, defaults to 127.0.0.1 ( localhost )\n\n\n\nWhen wsss starts, it first tries to load configuration information from the following files: \n\n\n\n1. The file specified by the environment variable `WSS_CONF_FILE`\n\n1. It then tries to load the file in `/etc/wsss/wsss_conf.toml`\n\n1. It then tries to load a `wsss_conf.toml` file located in the same directory as the wsss executable\n\n\n\n**Only the first file found is loaded.**\n\n\n\nThe configuration file makes use of [TOML](https://github.com/toml-lang/toml). Here is a sample config:\n\n\n\n``` toml\n\n# Sample config. Hashes mark comments\n\n\n\nhttp_port = 10090\n\nws_port = 10095\n\nbind_address = \"10.1.101.26\"\n\n```\n\n\n\nNext, it tries to pull in config from the environment. These values will override any values found in any loaded configuration files.\n\n\n\nThe following env variable names are searched:\n\n\n\n* `WSSS_HTTP_PORT` Specifies the HTTP port\n\n* `WSSS_WS_PORT` Specifies the Websocket port\n\n* `WSSS_BIND_ADDRESS` Specifies the ip address to bind to\n\n\n\nFinally it parses and uses any configuration passed in via commandline arguments\n\n\n\nAvailable commandline arguments can be found via running `wsss -h` or `wsss --help`\n\n\n\nsample output:\n\n\n\n```\n\nUsage:\n\n ./target/debug/wsss [OPTIONS]\n\n\n\nProvide access to serial ports over JSON Websockets\n\n\n\noptional arguments:\n\n -h,--help show this help message and exit\n\n -p,--http_port HTTP_PORT\n\n Http Port\n\n -w,--ws_port WS_PORT Websocket Port\n\n -a,--bind_address BIND_ADDRESS\n\n Bind Address\n\n```\n\n\n\nFinally, any item not specified in any of these steps is given the default value mentioned at the beginning of this section.\n\n\n", "file_path": "DOCUMENTATION.md", "rank": 20, "score": 1.4363028935411686 }, { "content": "# websocket-serial-server (wsss)\n\n\n\n[![Build Status](https://travis-ci.org/DanielJoyce/websocket-serial-server.svg?branch=master)](https://travis-ci.org/DanielJoyce/websocket-serial-server)\n\n\n\nConnect to and read / write serial ports over websockets. In Rust\n\n\n\nThere is a need for a way to interface with hardware for software running in web browsers. WebUSB and the HTML5 serial spec are still immature.\n\n\n\n**Alpha, but works for me**\n\n\n\n**Currently there is no security, the connection is NOT encrypted**\n\n\n\n## Features\n\n\n\n1. Written in Rust, so robust and memory safe.\n\n1. All requests and responses are JSON based\n\n1. Supports sending and receiving binary data, as base64 encoded strings\n\n1. Clients can subscribe to multiple ports\n\n1. Clients can write lock ports, so they are the only one\n\nwho can send data to it. Writing to a port can not happen\n\ntill port is write locked. This prevents corruption\n\n1. Ports are only closed when all clients have closed it\n\n1. Data read from port is broadcast to all clients who opeoned it.\n\n1. Ports are automatically cleaned up if read/write errors occur\n\n1. Opening the same port twice will not cause corruption of data\n\nsend to a client ( as seen in SPJS ).\n\n1. Supports port enumeration.\n\n1. simple programming model consisting of threads and event loops, which is fine for dozens of clients and ports.\n\n 1. As the async paradigm in rust matures, will move to that model\n\n1. Simple architecture and code base.\n\n\n\n## Documentation\n\n\n\n[Documentation](DOCUMENTATION.md)\n\n\n\n## Limitations\n\n\n\nCurrently Websocket-rs is not tokio based, so it spawns a thread per connection.\n\nFor having a few clients talk to a 3D printer, CNC machine, or other \n\nsuch use case, this is fine. \n\n\n\nOnce Websocket-rs moves to tokio, this limitation can be removed\n\n\n\n*There is no support for custom protocol or buffer handlers.* That should be handled by client libraries. The purpose of wsss is to simply get data from a serial port to clients and vice-versa.\n\n\n\n## Developing\n\n\n\n### Dependencies\n\n\n", "file_path": "README.md", "rank": 21, "score": 0.898433202542698 } ]
Rust
src/shader/service.rs
vojd/skuggbox
724ddb025623345f634634f4ea70cb022fc20e2b
use log::{error, info}; use std::path::PathBuf; use crate::shader::VERTEX_SHADER; use crate::shader::{find_included_files, PreProcessor, Shader, ShaderError}; use crate::uniforms::{read_uniforms, Uniform}; use crate::utils::cstr_with_len; pub fn create_program(fragment_src: String) -> Result<ShaderProgram, ShaderError> { let vertex_shader = Shader::from_source(String::from(VERTEX_SHADER), gl::VERTEX_SHADER)?; let frag_shader = Shader::from_source(fragment_src, gl::FRAGMENT_SHADER)?; info!( "Creating shader program: {} {}", vertex_shader.id, frag_shader.id ); Ok(ShaderProgram::new(vertex_shader, frag_shader)) } pub struct ShaderProgram { pub id: gl::types::GLuint, } impl ShaderProgram { pub fn new(vert_shader: Shader, frag_shader: Shader) -> Self { let id = unsafe { gl::CreateProgram() }; unsafe { gl::AttachShader(id, vert_shader.id); gl::AttachShader(id, frag_shader.id); gl::LinkProgram(id); } let mut success: gl::types::GLint = 1; unsafe { gl::GetProgramiv(id, gl::LINK_STATUS, &mut success); } if success == 0 { let mut len: gl::types::GLint = 0; unsafe { gl::GetProgramiv(id, gl::INFO_LOG_LENGTH, &mut len); } let error = cstr_with_len(len as usize); unsafe { gl::GetProgramInfoLog( id, len, std::ptr::null_mut(), error.as_ptr() as *mut gl::types::GLchar, ); } error!("linker error {}", error.to_string_lossy()); panic!("linker error"); } unsafe { gl::DetachShader(id, vert_shader.id); gl::DetachShader(id, frag_shader.id); } Self { id } } } impl Drop for ShaderProgram { fn drop(&mut self) { unsafe { gl::DeleteProgram(self.id); } } } pub struct ShaderService { pre_processor: Box<PreProcessor>, fs: PathBuf, pub program: Option<ShaderProgram>, pub files: Vec<PathBuf>, pub use_camera_integration: bool, uniforms: Vec<Uniform>, } impl ShaderService { pub fn new(fs: PathBuf) -> Self { let mut pre_processor = PreProcessor::new(fs.clone()); pre_processor.reload(); let program = create_program(pre_processor.shader_src.clone()).unwrap(); let files = if let Some(f) = find_included_files(fs.clone()) { vec![fs.clone(), f.iter().collect()] } else { vec![fs.clone()] }; Self { pre_processor: pre_processor.into(), fs, program: Some(program), files, use_camera_integration: false, uniforms: vec![], } } pub fn reload(&mut self) { self.pre_processor.use_camera_integration = self.use_camera_integration; self.pre_processor.reload(); match create_program(self.pre_processor.shader_src.clone()) { Ok(new_program) => { self.program = Some(new_program); self.uniforms = read_uniforms(self.fs.clone()); info!("Shader recreated without errors") } _ => { error!("Compilation failed - not binding failed program"); } }; } }
use log::{error, info}; use std::path::PathBuf; use crate::shader::VERTEX_SHADER; use crate::shader::{find_included_files, PreProcessor, Shader, ShaderError}; use crate::uniforms::{read_uniforms, Uniform}; use crate::utils::cstr_with_len; pub fn create_program(fragment_src: String) -> Result<ShaderProgram, ShaderError> { let vertex_shader = Shader::from_source(String::from(VERTEX_SHADER), gl::VERTEX_SHADER)?; let frag_shader = Shader::from_source(fragment_src, gl::FRAGMENT_SHADER)?; info!( "Creating shader program: {} {}", vertex_shader.id, frag_shader.id ); Ok(ShaderProgram::new(vertex_shader, frag_shader)) } pub struct ShaderProgram { pub id: gl::types::GLuint, } impl ShaderProgram { pub fn new(vert_shader: Shader, frag_shader: Shader) -> Self { let id = unsafe { gl::CreateProgram() }; unsafe { gl::AttachShader(id, vert_shader.id); gl::AttachShader(id, frag_shader.id); gl::LinkProgram(id); } let mut success: gl::types::GLint = 1; unsafe { gl::GetProgramiv(id, gl::LINK_STATUS, &mut success); } if success == 0 { let mut len: gl::types::GLint = 0; unsafe { gl::GetProgramiv(id, gl::INFO_LOG_LENGTH, &mut len); } let error = cstr_with_len(len as usize); unsafe { gl::GetProgramInfoLog( id, len, std::ptr::null_mut(), error.as_ptr() as *mut gl::types::GLchar, ); } error!("linker error {}", error.to_string_lossy()); panic!("linker error"); } unsafe { gl::DetachShader(id, vert
= self.use_camera_integration; self.pre_processor.reload(); match create_program(self.pre_processor.shader_src.clone()) { Ok(new_program) => { self.program = Some(new_program); self.uniforms = read_uniforms(self.fs.clone()); info!("Shader recreated without errors") } _ => { error!("Compilation failed - not binding failed program"); } }; } }
_shader.id); gl::DetachShader(id, frag_shader.id); } Self { id } } } impl Drop for ShaderProgram { fn drop(&mut self) { unsafe { gl::DeleteProgram(self.id); } } } pub struct ShaderService { pre_processor: Box<PreProcessor>, fs: PathBuf, pub program: Option<ShaderProgram>, pub files: Vec<PathBuf>, pub use_camera_integration: bool, uniforms: Vec<Uniform>, } impl ShaderService { pub fn new(fs: PathBuf) -> Self { let mut pre_processor = PreProcessor::new(fs.clone()); pre_processor.reload(); let program = create_program(pre_processor.shader_src.clone()).unwrap(); let files = if let Some(f) = find_included_files(fs.clone()) { vec![fs.clone(), f.iter().collect()] } else { vec![fs.clone()] }; Self { pre_processor: pre_processor.into(), fs, program: Some(program), files, use_camera_integration: false, uniforms: vec![], } } pub fn reload(&mut self) { self.pre_processor.use_camera_integration
random
[ { "content": "pub fn cstr_with_len(len: usize) -> CString {\n\n let mut buffer: Vec<u8> = Vec::with_capacity(len + 1);\n\n buffer.extend([b' '].iter().cycle().take(len));\n\n unsafe { CString::from_vec_unchecked(buffer) }\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 1, "score": 136967.376116496 }, { "content": "/// Read a shader from disk, return String or ShaderError\n\npub fn read_shader_src(shader_path: PathBuf) -> anyhow::Result<String, ShaderError> {\n\n let mut file = File::open(shader_path.clone()).map_err(|e| ShaderError::FileError {\n\n error: format!(\n\n \"Err: {:?}, {:?} is invalid or does not exit\",\n\n e, shader_path\n\n ),\n\n })?;\n\n\n\n let mut s = String::new();\n\n file.read_to_string(&mut s).unwrap();\n\n Ok(s)\n\n}\n\n\n", "file_path": "src/shader/read.rs", "rank": 2, "score": 127914.7000469684 }, { "content": "/// Parse a uniform from the shader and return an internal Rust representation.\n\n/// Uniforms holds no initial values.\n\nfn extract_uniform(line: String) -> Result<Uniform, UniformError> {\n\n if !is_uniform(line.clone()) {\n\n return Err(UniformError::ParseError);\n\n }\n\n\n\n let parts: Vec<String> = line\n\n .trim()\n\n .split_whitespace()\n\n .take(3)\n\n .map(|s| s.to_string())\n\n .collect();\n\n // TODO: Should not be necessary. Do the check in line above\n\n if !parts.len() == 3 {\n\n return Err(UniformError::ParseError);\n\n }\n\n\n\n let mut uniform_name = parts.get(2).ok_or(UniformError::ParseError)?.to_string();\n\n if uniform_name.ends_with(';') {\n\n uniform_name.truncate(uniform_name.len() - 1)\n\n }\n", "file_path": "src/uniforms.rs", "rank": 3, "score": 122339.65774374563 }, { "content": "pub fn include_statement_from_string(shader_name: String) -> String {\n\n format!(\"#pragma include({});\", shader_name)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{pragma_shader_name, string_between};\n\n\n\n #[test]\n\n fn is_string_between() {\n\n let line = \"#pragma include('some-shader.glsl')\";\n\n assert_eq!(string_between(line, \"include(\", \")\"), \"'some-shader.glsl'\");\n\n }\n\n\n\n #[test]\n\n fn is_shader_name() {\n\n let line_single_quotes = \"#pragma include('some-shader.glsl')\";\n\n assert_eq!(\n\n pragma_shader_name(line_single_quotes),\n\n \"some-shader.glsl\".to_string()\n", "file_path": "src/utils.rs", "rank": 4, "score": 121504.43672655114 }, { "content": "#[allow(temporary_cstring_as_ptr)]\n\nfn get_uniform_location(program: &ShaderProgram, uniform_name: &str) -> i32 {\n\n unsafe { gl::GetUniformLocation(program.id, CString::new(uniform_name).unwrap().as_ptr()) }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 5, "score": 117815.97941435273 }, { "content": "/// Read the given shader and extract all existing uniform values\n\n/// Append these to the uniform handler\n\npub fn read_uniforms(shader_path: PathBuf) -> Vec<Uniform> {\n\n let mut uniforms: Vec<Uniform> = vec![];\n\n\n\n let file = File::open(shader_path.as_os_str());\n\n if let Ok(f) = file {\n\n let f = BufReader::new(f);\n\n\n\n for line in f.lines().flatten() {\n\n if is_uniform(line.clone()) {\n\n if let Ok(uniform) = extract_uniform(line) {\n\n uniforms.push(uniform);\n\n };\n\n }\n\n }\n\n }\n\n\n\n uniforms\n\n}\n\n\n", "file_path": "src/uniforms.rs", "rank": 6, "score": 112532.86189024597 }, { "content": "/// Returns the shader name to be imported on a line of shader code\n\n/// TODO: Shall return &str\n\npub fn pragma_shader_name(line: &str) -> String {\n\n let shader_name = string_between(line, \"include(\", \")\");\n\n shader_name.replace(\"'\", \"\").replace(\"\\\"\", \"\")\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 7, "score": 107139.7113830366 }, { "content": "pub fn cstr_to_str(cstr: &CString) -> String {\n\n cstr.to_string_lossy().to_string()\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 8, "score": 97364.7795224661 }, { "content": "fn is_uniform(line: String) -> bool {\n\n line.trim().starts_with(\"uniform\")\n\n}\n\n\n", "file_path": "src/uniforms.rs", "rank": 9, "score": 94418.81208211386 }, { "content": "/// Search for included files from the supplied `shader`\n\n/// TODO: Replace with the new includer\n\npub fn find_included_files(shader: PathBuf) -> Option<Vec<PathBuf>> {\n\n let s = match read_shader_src(shader.clone()) {\n\n Ok(src) => src,\n\n _ => return None,\n\n };\n\n\n\n // Find potential include files\n\n let mut includes = Vec::new();\n\n for line in s.lines() {\n\n if is_include_line(line.trim_start()) {\n\n let shader_name = pragma_shader_name(line);\n\n let path = shader\n\n .parent()\n\n .expect(\"Could not read path from shader source file\");\n\n let s = path.join(Path::new(shader_name.as_str()));\n\n includes.push(s);\n\n }\n\n }\n\n Some(includes)\n\n}\n", "file_path": "src/shader/read.rs", "rank": 10, "score": 83473.95109082297 }, { "content": "/// Perform the required OpenGL dance to create an OpenGL shader\n\nfn shader_from_string(\n\n source: String,\n\n shader_type: gl::types::GLuint,\n\n) -> anyhow::Result<gl::types::GLuint, ShaderError> {\n\n let c_src = CString::new(source).expect(\"Could not convert source to CString\");\n\n\n\n // check for includes\n\n\n\n let id = unsafe { gl::CreateShader(shader_type) };\n\n\n\n unsafe {\n\n gl::ShaderSource(id, 1, &c_src.as_ptr(), std::ptr::null());\n\n gl::CompileShader(id);\n\n }\n\n\n\n let mut success: gl::types::GLint = 1;\n\n unsafe {\n\n gl::GetShaderiv(id, gl::COMPILE_STATUS, &mut success);\n\n }\n\n\n", "file_path": "src/shader/glsl.rs", "rank": 11, "score": 80962.65334566268 }, { "content": "pub fn string_between<'v>(value: &'v str, start: &str, end: &str) -> &'v str {\n\n if let Some(start_idx) = value.rfind(start) {\n\n if let Some(end_idx) = value.rfind(end) {\n\n let from_byte = start_idx + start.len();\n\n let to_byte = end_idx;\n\n if from_byte < value.len() {\n\n return &value[from_byte..to_byte];\n\n }\n\n }\n\n }\n\n \"\"\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 12, "score": 64314.48506243226 }, { "content": "pub fn handle_events<T>(\n\n event: Event<'_, T>,\n\n control_flow: &mut ControlFlow,\n\n world_state: &mut AppState,\n\n timer: &mut Timer,\n\n context: &ContextWrapper<PossiblyCurrent, Window>,\n\n buffer: &Buffer,\n\n shader: &mut ShaderService,\n\n) {\n\n *control_flow = ControlFlow::Poll;\n\n context.swap_buffers().unwrap();\n\n\n\n match event {\n\n Event::WindowEvent { event, .. } => {\n\n match event {\n\n WindowEvent::CloseRequested => {\n\n info!(\"Bye now...\");\n\n world_state.is_running = false;\n\n buffer.delete();\n\n *control_flow = ControlFlow::Exit\n", "file_path": "src/input.rs", "rank": 13, "score": 63482.64637618586 }, { "content": "pub fn main() {\n\n println!(\"glsl loader\");\n\n\n\n let (sender, _receiver) = channel();\n\n watch_all(sender, vec![PathBuf::from(\"./examples/base.frag\")]);\n\n}\n", "file_path": "components/glsl-watcher/examples/watcher.rs", "rank": 14, "score": 63233.34839004712 }, { "content": "pub fn find_minime_tool() -> Option<Minime> {\n\n match which(\"minime-preprocess\") {\n\n Ok(path) => Some(Minime::new(path)),\n\n Err(e) => {\n\n warn!(\"WARNING: Can't find the minime toolchain: {}\", e);\n\n None\n\n }\n\n }\n\n}\n", "file_path": "src/minime.rs", "rank": 15, "score": 58440.83479707342 }, { "content": "#[test]\n\nfn test_shader_reload() {\n\n let shader_path = PathBuf::from(\"./tests/files/main_test.glsl\");\n\n let mut pre_processor = PreProcessor::new(shader_path);\n\n pre_processor.reload();\n\n dbg!(&pre_processor.shader_src);\n\n assert_eq!(pre_processor.shader_src.contains(\"#pragma include\"), false);\n\n}\n\n\n", "file_path": "tests/shader_include.rs", "rank": 16, "score": 56550.85223388334 }, { "content": "pub fn seek(playback_time: f32, playback_control: PlaybackControl) -> f32 {\n\n match playback_control {\n\n PlaybackControl::Forward(t) => playback_time + t,\n\n PlaybackControl::Rewind(t) => playback_time - t,\n\n }\n\n}\n", "file_path": "src/state.rs", "rank": 17, "score": 49469.26741166129 }, { "content": "#[test]\n\nfn test_camera_integration() {\n\n let shader_path = PathBuf::from(\"./tests/files/camera_integration_test.glsl\");\n\n let mut pre_processor = PreProcessor::new(shader_path);\n\n pre_processor.reload();\n\n dbg!(&pre_processor.shader_src);\n\n\n\n let lines = pre_processor.shader_src.lines().count();\n\n assert!(lines > 3, \"No integration added\");\n\n}\n", "file_path": "tests/shader_include.rs", "rank": 18, "score": 49443.66657346698 }, { "content": "fn is_include_line(s: &str) -> bool {\n\n s.starts_with(\"#pragma\") && s.contains(\"include\")\n\n}\n\n\n", "file_path": "src/shader/read.rs", "rank": 19, "score": 45853.18163857194 }, { "content": "/// Using `notify` to watch for changes to any defined shader file\n\n/// `sender` - std::sync::mpsc::channel\n\n/// `files` - Which files to watch\n\n///\n\npub fn watch_all(sender: Sender<PathBuf>, files: Vec<PathBuf>) {\n\n let (watch_sender, watch_receiver) = channel();\n\n let mut watcher = raw_watcher(watch_sender).unwrap();\n\n\n\n let directories: Vec<PathBuf> = files\n\n .iter()\n\n .filter_map(|p| fs::canonicalize(p).ok())\n\n .collect();\n\n\n\n println!(\"Watching files shaders in:\");\n\n for dir in &directories {\n\n watcher\n\n .watch(dir.as_path(), RecursiveMode::Recursive)\n\n .unwrap();\n\n println!(\" {:?}\", dir);\n\n }\n\n\n\n watch_loop(sender, watch_receiver, directories);\n\n}\n\n\n", "file_path": "components/glsl-watcher/src/lib.rs", "rank": 20, "score": 45356.561208809144 }, { "content": "/// Using `notify` to watch for changes to any defined shader file\n\n/// `sender` - std::sync::mpsc::channel\n\n/// `dir` - Which dir to find the files in\n\n/// `vs` - vertex shader name located in `dir`\n\n/// `fs` - fragment shader name located in `dir\n\n///\n\npub fn watch(sender: Sender<bool>, dir: &str, vs: &str, fs: &str) {\n\n let (watch_sender, watch_receiver) = channel();\n\n let mut watcher = raw_watcher(watch_sender).unwrap();\n\n watcher.watch(\"./\", RecursiveMode::Recursive).unwrap();\n\n println!(\"Watching shaders in {}\", dir);\n\n\n\n loop {\n\n // NOTE: It's likely that a change to a file will trigger two successive WRITE events\n\n let changed_file = match watch_receiver.recv() {\n\n Ok(RawEvent {\n\n path: Some(path),\n\n op: Ok(op),\n\n ..\n\n }) => {\n\n let file_name = path.file_name().unwrap().to_str().unwrap();\n\n if op == Op::WRITE && (file_name == vs || file_name == fs) {\n\n println!(\"change in: {:?}\", file_name);\n\n Some(path)\n\n } else {\n\n None\n", "file_path": "components/glsl-watcher/src/lib.rs", "rank": 21, "score": 42233.15259273699 }, { "content": "/// Generic trait for handling window events.\n\n/// Mostly used to get the event types correct.\n\npub trait WindowEventHandler {\n\n /// Handles window events. The implementation should return true if the event was used\n\n /// otherwise false.\n\n /// * `self` - A mutable reference to the implementation\n\n /// * `event` - The event to react to\n\n fn handle_window_events(&mut self, event: &WindowEvent<'_>) -> bool;\n\n}\n", "file_path": "src/event.rs", "rank": 22, "score": 36348.5820434985 }, { "content": "pub trait CameraModel: WindowEventHandler {\n\n fn handle_mouse(&mut self, mouse: &Mouse, delta_time: f32);\n\n\n\n fn calculate_uniform_data(&mut self) -> Mat4;\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct OrbitCamera {\n\n pos: Vec3,\n\n target: Vec3,\n\n angle: Vec2,\n\n speed: f32,\n\n zoom: f32,\n\n}\n\n\n\nimpl Default for OrbitCamera {\n\n fn default() -> Self {\n\n Self {\n\n pos: Vec3::new(02.0, 2.0, -2.0),\n\n target: Vec3::new(0.0, 0.0, 0.0),\n", "file_path": "src/camera.rs", "rank": 23, "score": 33451.373141644566 }, { "content": "fn render(\n\n context: &ContextWrapper<PossiblyCurrent, Window>,\n\n state: &mut AppState,\n\n shaders: &ShaderService,\n\n buffer: &Buffer,\n\n) {\n\n unsafe {\n\n gl::Viewport(0, 0, state.width, state.height);\n\n gl::ClearColor(0.3, 0.3, 0.5, 1.0);\n\n }\n\n\n\n if let Some(program) = &shaders.program {\n\n unsafe { gl::UseProgram(program.id) };\n\n } else {\n\n unsafe { gl::UseProgram(0) };\n\n }\n\n\n\n unsafe {\n\n let program = shaders.program.as_ref().unwrap();\n\n\n", "file_path": "src/main.rs", "rank": 24, "score": 32866.049845025766 }, { "content": "fn main() {\n\n SimpleLogger::new().init().unwrap();\n\n\n\n // Parse command line arguments using `structopt`\n\n let config = Config::parse();\n\n\n\n // verify that all specified file does exist\n\n let mut timer = Timer::new();\n\n\n\n let mut event_loop = EventLoop::new();\n\n let window = WindowBuilder::new().with_title(\"Skuggbox\");\n\n\n\n let window_context = ContextBuilder::new()\n\n .build_windowed(window, &event_loop)\n\n .unwrap();\n\n\n\n let context = unsafe { window_context.make_current().unwrap() };\n\n\n\n gl::load_with(|s| context.get_proc_address(s) as *const _);\n\n\n", "file_path": "src/main.rs", "rank": 25, "score": 32866.049845025766 }, { "content": "fn watch_loop(\n\n sender: Sender<PathBuf>,\n\n watch_receiver: Receiver<RawEvent>,\n\n directories: Vec<PathBuf>,\n\n) {\n\n loop {\n\n // NOTE: It's likely that a change to a file will trigger two successive WRITE events\n\n let changed_file = match watch_receiver.recv() {\n\n Ok(RawEvent {\n\n path: Some(path),\n\n op: Ok(op),\n\n ..\n\n }) => {\n\n let file_name = path.file_name().unwrap().to_str().unwrap();\n\n println!(\"on change in: {:?}\", path.to_str().unwrap());\n\n if op == Op::WRITE && directories.contains(&path) {\n\n println!(\"change in: {:?}\", file_name);\n\n Some(path)\n\n } else {\n\n None\n", "file_path": "components/glsl-watcher/src/lib.rs", "rank": 26, "score": 28513.668680990846 }, { "content": " Int(i32),\n\n Float(f32),\n\n Bool(bool),\n\n}\n\n\n\n#[derive(Debug)]\n\n#[allow(dead_code)]\n\npub struct Uniform {\n\n name: String,\n\n glsl_type: GLSLType,\n\n value: Option<GLSLValue>,\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum UniformError {\n\n ParseError,\n\n TypeError,\n\n}\n\n\n\n/// Read the given shader and extract all existing uniform values\n\n/// Append these to the uniform handler\n", "file_path": "src/uniforms.rs", "rank": 27, "score": 26462.32809426773 }, { "content": " let u_type = parts.get(1).ok_or(UniformError::ParseError)?;\n\n let uniform_type = GLSLType::from_str(u_type)?;\n\n\n\n Ok(Uniform {\n\n name: uniform_name,\n\n glsl_type: uniform_type,\n\n value: None,\n\n })\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::str::FromStr;\n\n\n\n use super::{extract_uniform, is_uniform, GLSLType};\n\n\n\n #[test]\n\n fn is_uniform_line() {\n\n let line = \"uniform vec2 value;\".to_string();\n\n assert!(is_uniform(line));\n", "file_path": "src/uniforms.rs", "rank": 28, "score": 26460.547574520882 }, { "content": " fn extract_uniform_valid_no_trailing_semicolon() {\n\n let line = \"uniform vec2 value;\".to_string();\n\n let uniform = extract_uniform(line).unwrap_or_else(|_| panic!(\"Uniform not ok\"));\n\n assert_eq!(uniform.name, \"value\");\n\n assert_eq!(uniform.glsl_type, GLSLType::from_str(\"vec2\").unwrap());\n\n }\n\n\n\n #[test]\n\n fn extract_uniform_errors() {\n\n let line = \"uniform ; //;;\".to_string();\n\n let uniform = extract_uniform(line);\n\n assert!(uniform.is_err());\n\n // assert_eq!(uniform, Err(UniformError::ParseError));\n\n }\n\n\n\n #[test]\n\n fn glsl_type_from_string() {\n\n assert_eq!(GLSLType::from_str(\"vec2\").unwrap(), GLSLType::Vec2);\n\n assert_eq!(GLSLType::from_str(\"vec3\").unwrap(), GLSLType::Vec3);\n\n assert_eq!(GLSLType::from_str(\"vec4\").unwrap(), GLSLType::Vec4);\n\n }\n\n}\n", "file_path": "src/uniforms.rs", "rank": 29, "score": 26458.955529566312 }, { "content": " type Err = UniformError;\n\n\n\n fn from_str(input: &str) -> Result<GLSLType, Self::Err> {\n\n match input {\n\n \"bool\" => Ok(GLSLType::Bool),\n\n \"int\" => Ok(GLSLType::Int),\n\n \"float\" => Ok(GLSLType::Float),\n\n\n\n \"vec2\" => Ok(GLSLType::Vec2),\n\n \"vec3\" => Ok(GLSLType::Vec3),\n\n \"vec4\" => Ok(GLSLType::Vec4),\n\n _ => Err(UniformError::TypeError),\n\n }\n\n }\n\n}\n\n\n\n/// hmm... might be overkill\n\n#[derive(Debug, PartialEq)]\n\n#[allow(dead_code)]\n\npub enum GLSLValue {\n", "file_path": "src/uniforms.rs", "rank": 30, "score": 26458.415167997005 }, { "content": "\n\n let line_with_spaces = \" uniform vec2 my_uniform;\".to_string();\n\n assert!(is_uniform(line_with_spaces));\n\n\n\n let line_with_comments = \"// uniform;\".to_string();\n\n assert!(!is_uniform(line_with_comments));\n\n\n\n let line_with_suffix_comment = \"uniform vec2 value; // a comment\".to_string();\n\n assert!(is_uniform(line_with_suffix_comment));\n\n }\n\n\n\n #[test]\n\n fn extract_uniform_valid() {\n\n let line = \"uniform vec2 value ; //;;\".to_string();\n\n let uniform = extract_uniform(line).unwrap_or_else(|_| panic!(\"Uniform not ok\"));\n\n assert_eq!(uniform.name, \"value\");\n\n assert_eq!(uniform.glsl_type, GLSLType::from_str(\"vec2\").unwrap());\n\n }\n\n\n\n #[test]\n", "file_path": "src/uniforms.rs", "rank": 31, "score": 26457.704185714592 }, { "content": "use std::fs::File;\n\nuse std::io::{BufRead, BufReader};\n\nuse std::path::PathBuf;\n\nuse std::str::FromStr;\n\n\n\n#[derive(Debug, PartialEq)]\n\n#[allow(dead_code)]\n\npub enum GLSLType {\n\n // primitives\n\n Int,\n\n Float,\n\n Bool,\n\n\n\n // collections\n\n Vec2,\n\n Vec3,\n\n Vec4,\n\n}\n\n\n\nimpl FromStr for GLSLType {\n", "file_path": "src/uniforms.rs", "rank": 32, "score": 26457.4387692179 }, { "content": " // in case something failed we want to extract the error and return it\n\n if success == 0 {\n\n // fetch the required buffer length\n\n let mut len: gl::types::GLint = 0;\n\n unsafe {\n\n gl::GetShaderiv(id, gl::INFO_LOG_LENGTH, &mut len);\n\n }\n\n\n\n let error = cstr_with_len(len as usize);\n\n\n\n unsafe {\n\n gl::GetShaderInfoLog(\n\n id,\n\n len,\n\n std::ptr::null_mut(),\n\n error.as_ptr() as *mut gl::types::GLchar,\n\n )\n\n }\n\n\n\n // Prints the compilation error to console\n\n error!(\"{}\", error.to_string_lossy());\n\n return Err(ShaderError::CompilationError {\n\n error: cstr_to_str(&error),\n\n });\n\n }\n\n\n\n Ok(id)\n\n}\n", "file_path": "src/shader/glsl.rs", "rank": 35, "score": 23358.964029676663 }, { "content": "use std::ffi::CString;\n\nuse std::path::PathBuf;\n\n\n\nuse log::error;\n\n\n\nuse crate::shader::PreProcessor;\n\nuse crate::utils::{cstr_to_str, cstr_with_len};\n\n\n\n#[derive(Debug)]\n\npub enum ShaderError {\n\n CompilationError { error: String },\n\n FileError { error: String },\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Shader {\n\n pub(crate) id: gl::types::GLuint,\n\n}\n\n\n\nimpl Shader {\n", "file_path": "src/shader/glsl.rs", "rank": 36, "score": 23356.623896175475 }, { "content": " #[allow(unused)]\n\n pub fn from_file(\n\n source_file: PathBuf,\n\n shader_type: gl::types::GLuint,\n\n ) -> anyhow::Result<Shader, ShaderError> {\n\n // TODO: move to root\n\n let mut includer = PreProcessor::new(source_file);\n\n includer.process_includes();\n\n let shader_src = includer.shader_src;\n\n let id = shader_from_string(shader_src, shader_type)?;\n\n Ok(Shader { id })\n\n }\n\n\n\n pub fn from_source(\n\n source: String,\n\n shader_type: gl::types::GLuint,\n\n ) -> anyhow::Result<Shader, ShaderError> {\n\n let id = shader_from_string(source, shader_type)?;\n\n Ok(Shader { id })\n\n }\n\n}\n\n\n\n/// Perform the required OpenGL dance to create an OpenGL shader\n", "file_path": "src/shader/glsl.rs", "rank": 41, "score": 23352.16390051357 }, { "content": " // if the camera integration should be used or not\n\n pub use_camera_integration: bool,\n\n\n\n // contains the final shader\n\n pub shader_src: String,\n\n\n\n // The files which makes up the content of this shader\n\n pub files: Vec<PathBuf>,\n\n}\n\n\n\nimpl PreProcessor {\n\n pub fn new(shader_path: PathBuf) -> Self {\n\n let shader_src = read_shader_src(shader_path.clone()).unwrap();\n\n Self {\n\n main_shader_src: shader_src,\n\n main_shader_path: shader_path,\n\n parts: Default::default(),\n\n use_camera_integration: false,\n\n shader_src: Default::default(),\n\n files: vec![],\n", "file_path": "src/shader/read.rs", "rank": 42, "score": 23350.853940195753 }, { "content": "use log::{debug, error};\n\nuse regex::Regex;\n\n/// Utility functions to read shader content\n\n/// and produce the necessary pieces to construct a\n\nuse std::collections::{BTreeMap, HashMap};\n\nuse std::fs::File;\n\nuse std::io::Read;\n\nuse std::path::{Path, PathBuf};\n\n\n\nuse crate::shader::ShaderError;\n\nuse crate::utils::{include_statement_from_string, pragma_shader_name};\n\nuse crate::SKUGGBOX_CAMERA;\n\n\n\n/// Read a shader from disk, return String or ShaderError\n", "file_path": "src/shader/read.rs", "rank": 43, "score": 23348.797725672066 }, { "content": " }\n\n }\n\n\n\n pub fn reload(&mut self) {\n\n match read_shader_src(self.main_shader_path.clone()) {\n\n Ok(src) => self.process(src),\n\n Err(e) => error!(\"Could not re-compile shader {:?}\", e),\n\n };\n\n }\n\n\n\n pub fn process(&mut self, shader_src: String) {\n\n self.main_shader_src = shader_src;\n\n self.process_includes();\n\n self.process_integrations();\n\n self.recreate_file_list();\n\n }\n\n\n\n /// Handle pragma directives which are not\n\n fn process_pragma(&self, line: &str) -> Option<PragmaDirective> {\n\n let camera_regex = Regex::new(r\"^\\s*#pragma\\s+skuggbox\\s*\\(\\s*camera\\s*\\)\\s*$\").unwrap();\n", "file_path": "src/shader/read.rs", "rank": 44, "score": 23348.112255462802 }, { "content": "\n\n if line.contains(\"#pragma\") && camera_regex.is_match(line) {\n\n debug!(\"Found camera integration: {:?}\", line);\n\n\n\n let pragma_content = match self.use_camera_integration {\n\n true => \"#define USE_SKUGGBOX_CAMERA\\n\".to_string() + SKUGGBOX_CAMERA,\n\n _ => SKUGGBOX_CAMERA.to_string(),\n\n };\n\n return Some(PragmaDirective::Camera(pragma_content));\n\n }\n\n None\n\n }\n\n\n\n pub fn process_integrations(&mut self) {\n\n self.shader_src = self\n\n .shader_src\n\n .lines()\n\n .map(|line| match self.process_pragma(line) {\n\n Some(PragmaDirective::Camera(content)) => content,\n\n _ => line.to_string(),\n", "file_path": "src/shader/read.rs", "rank": 45, "score": 23347.413925444285 }, { "content": "\n\n#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd)]\n\npub enum PragmaDirective {\n\n Camera(String),\n\n}\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd)]\n\npub struct Part {\n\n pub shader_path: PathBuf,\n\n pub shader_src: String,\n\n pub shader_name: String,\n\n pub parent_path: PathBuf,\n\n pub parent_src: String,\n\n}\n\n\n\npub struct PreProcessor {\n\n main_shader_path: PathBuf,\n\n main_shader_src: String,\n\n parts: BTreeMap<String, Part>,\n\n\n", "file_path": "src/shader/read.rs", "rank": 46, "score": 23347.26790487669 }, { "content": " })\n\n .collect::<Vec<String>>()\n\n .join(\"\\n\");\n\n }\n\n\n\n pub fn process_includes(&mut self) {\n\n self.build_include_map(self.main_shader_path.clone(), self.main_shader_src.clone());\n\n\n\n let mut include_map: HashMap<String, String> = HashMap::new();\n\n // Go through all parts to be included and build up a new map\n\n for (include_name, part) in self.parts.iter() {\n\n let part_src = part.shader_src.clone();\n\n\n\n // find the includes in this particular part src\n\n let s = part_src\n\n .lines()\n\n .map(|line| {\n\n if is_include_line(line) {\n\n let pragma_statement = pragma_shader_name(line);\n\n let include_string =\n", "file_path": "src/shader/read.rs", "rank": 47, "score": 23346.59142718755 }, { "content": " include_map.get(&pragma_statement).unwrap().to_owned()\n\n } else {\n\n line.to_string()\n\n }\n\n })\n\n .collect::<Vec<String>>()\n\n .join(\"\\n\");\n\n\n\n self.shader_src = final_shader;\n\n }\n\n\n\n pub fn build_include_map(&mut self, shader_path: PathBuf, shader_src: String) {\n\n self.included_files(shader_path.clone(), shader_src.clone())\n\n .iter()\n\n .for_each(|(inc_path, inc_name)| {\n\n let inc_src = read_shader_src(inc_path.to_owned()).unwrap();\n\n\n\n let part = Part {\n\n shader_path: inc_path.to_owned(),\n\n shader_src: inc_src,\n", "file_path": "src/shader/read.rs", "rank": 48, "score": 23346.485369334652 }, { "content": "pub mod constants;\n\npub mod glsl;\n\npub mod read;\n\npub mod service;\n\n\n\npub use constants::*;\n\npub use glsl::*;\n\npub use read::*;\n\npub use service::*;\n", "file_path": "src/shader/mod.rs", "rank": 49, "score": 23345.381427781464 }, { "content": " .parent()\n\n .unwrap()\n\n .join(Path::new(shader_name.as_str()))\n\n .canonicalize()\n\n .unwrap(),\n\n shader_name,\n\n )\n\n })\n\n .collect()\n\n }\n\n\n\n pub fn recreate_file_list(&mut self) {\n\n self.files = self\n\n .parts\n\n .iter()\n\n .map(|(_, part)| part.shader_path.clone())\n\n .collect::<Vec<PathBuf>>();\n\n }\n\n}\n", "file_path": "src/shader/read.rs", "rank": 50, "score": 23344.87792613013 }, { "content": " shader_name: inc_name.to_owned(),\n\n parent_path: shader_path.to_owned(),\n\n parent_src: shader_src.to_owned(),\n\n };\n\n\n\n self.parts.insert(inc_name.to_owned(), part.clone());\n\n\n\n self.build_include_map(part.to_owned().shader_path, part.shader_src);\n\n });\n\n }\n\n\n\n /// Find #pragma include directives in a shader and return path and shader name to included files\n\n pub fn included_files(&self, parent_path: PathBuf, source: String) -> Vec<(PathBuf, String)> {\n\n source\n\n .lines()\n\n .filter(|line| is_include_line(line.trim_start()))\n\n .map(pragma_shader_name)\n\n .map(|shader_name| {\n\n (\n\n parent_path\n", "file_path": "src/shader/read.rs", "rank": 51, "score": 23344.456995249166 }, { "content": " include_statement_from_string(pragma_statement.clone());\n\n let k = self.parts.get(&pragma_statement).unwrap();\n\n k.shader_src.replace(&include_string, &k.shader_src)\n\n } else {\n\n line.to_string()\n\n }\n\n })\n\n .collect::<Vec<String>>()\n\n .join(\"\\n\");\n\n\n\n include_map.insert(include_name.to_owned(), s);\n\n }\n\n\n\n let final_shader = self\n\n .main_shader_src\n\n .lines()\n\n .map(|line| {\n\n if is_include_line(line) {\n\n let pragma_statement = pragma_shader_name(line);\n\n debug!(\"including: {:?}\", pragma_statement);\n", "file_path": "src/shader/read.rs", "rank": 52, "score": 23344.1673573147 }, { "content": "use skuggbox::shader::*;\n\nuse std::path::PathBuf;\n\n\n\n#[test]\n", "file_path": "tests/shader_include.rs", "rank": 53, "score": 23344.10398248338 }, { "content": "pub const SKUGGBOX_CAMERA: &str = \"#ifdef USE_SKUGGBOX_CAMERA\n\n uniform mat4 sbCameraTransform;\n\n void skuggbox_camera(vec2 uv, inout vec3 ro, inout vec3 rd) {\n\n ro = sbCameraTransform[3].xyz;\n\n rd = mat3(sbCameraTransform) * normalize(vec3(uv, 1));\n\n }\n\n #else\n\n void skuggbox_camera(vec2 uv, inout vec3 ro, inout vec3 rd) {\n\n // empty\n\n }\n\n #endif\";\n\n\n\npub const VERTEX_SHADER: &str = \"#version 330 core\n\n layout (location = 0) in vec3 position;\n\n void main() {\n\n gl_Position = vec4(position, 1.0);\n\n }\n\n \";\n", "file_path": "src/shader/constants.rs", "rank": 54, "score": 23344.03451436422 }, { "content": "fn get_buffer_type(buffer_type: BufferType) -> gl::types::GLenum {\n\n match buffer_type {\n\n BufferType::VertexBuffer => gl::ARRAY_BUFFER,\n\n BufferType::IndexBuffer => gl::ELEMENT_ARRAY_BUFFER,\n\n }\n\n}\n\n\n\nimpl Buffer {\n\n pub fn new_vertex_buffer() -> Self {\n\n let vertices: Vec<f32> = vec![\n\n 1.0, 1.0, 0.0, // 0\n\n -1.0, 1.0, 0.0, // 1\n\n 1.0, -1.0, 0.0, // 2\n\n -1.0, -1.0, 0.0, // 3\n\n ];\n\n Buffer::vertex_buffer(BufferType::VertexBuffer, &vertices)\n\n }\n\n\n\n pub fn vertex_buffer(buffer_type: BufferType, vertices: &[f32]) -> Self {\n\n let buf_type = get_buffer_type(buffer_type);\n", "file_path": "src/buffer.rs", "rank": 55, "score": 21742.66630406265 }, { "content": "use std::path::PathBuf;\n\nuse std::process::Command;\n\n\n\nuse log::{error, warn};\n\nuse which::which;\n\n\n\npub struct Minime {\n\n preprocessor: PathBuf,\n\n}\n\n\n\nimpl Minime {\n\n pub fn new(preprocessor: PathBuf) -> Minime {\n\n Minime { preprocessor }\n\n }\n\n\n\n pub fn preprocess(&self, source: PathBuf, camera_integration: bool) -> Option<String> {\n\n let mut cmd = Command::new(&self.preprocessor);\n\n\n\n if camera_integration {\n\n cmd.arg(\"-D\");\n", "file_path": "src/minime.rs", "rank": 56, "score": 13.903742578565236 }, { "content": "pub use input::*;\n\npub use macros::*;\n\npub use minime::*;\n\npub use mouse::*;\n\npub use shader::*;\n\npub use state::*;\n\npub use timer::*;\n\npub use uniforms::*;\n\npub use utils::*;\n", "file_path": "src/lib.rs", "rank": 57, "score": 10.672504316341797 }, { "content": " cmd.arg(\"USE_SKUGGBOX_CAMERA\");\n\n }\n\n\n\n let output = cmd\n\n .arg(\"-stdout\")\n\n .arg(source)\n\n .output()\n\n .expect(\"Failed to invoke minime-preprocessor\");\n\n\n\n if !output.status.success() {\n\n error!(\n\n \"ERROR: Minime failed with error: {}\",\n\n String::from_utf8(output.stderr).unwrap()\n\n );\n\n return None;\n\n }\n\n\n\n Some(String::from_utf8(output.stdout).unwrap())\n\n }\n\n}\n\n\n", "file_path": "src/minime.rs", "rank": 58, "score": 10.315382740188573 }, { "content": "use std::time::Instant;\n\n\n\npub struct Timer {\n\n /// current frame\n\n time: Instant,\n\n /// last frame\n\n last_time: Instant,\n\n\n\n pub delta_time: f32,\n\n}\n\n\n\nimpl Default for Timer {\n\n fn default() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n\nimpl Timer {\n\n pub fn new() -> Self {\n\n Self {\n", "file_path": "src/timer.rs", "rank": 59, "score": 10.238621475170818 }, { "content": "\n\n // push the camera transform to the shader\n\n let transform = state.camera.calculate_uniform_data();\n\n let position = transform.w_axis;\n\n\n\n let location = get_uniform_location(program, \"sbCameraPosition\");\n\n gl::Uniform3f(location, position.x, position.y, position.z);\n\n\n\n let location = get_uniform_location(program, \"sbCameraTransform\");\n\n gl::UniformMatrix4fv(location, 1, gl::FALSE, &transform.to_cols_array()[0]);\n\n\n\n gl::Clear(gl::COLOR_BUFFER_BIT);\n\n buffer.bind();\n\n gl::DrawArrays(gl::TRIANGLE_STRIP, 0, 4);\n\n\n\n gl::UseProgram(0);\n\n }\n\n\n\n unsafe { gl::UseProgram(0) };\n\n context.swap_buffers().unwrap();\n\n}\n", "file_path": "src/main.rs", "rank": 60, "score": 9.226330103419649 }, { "content": "use glam::Vec2;\n\nuse winit::event::{ElementState, MouseButton, WindowEvent};\n\n\n\nuse crate::event::WindowEventHandler;\n\n\n\n#[derive(Debug)]\n\npub struct Mouse {\n\n pub pos: Vec2,\n\n pub last_pos: Vec2,\n\n pub delta: Vec2,\n\n\n\n pub is_lmb_down: bool,\n\n pub is_mmb_down: bool,\n\n pub is_rmb_down: bool,\n\n pub is_first_rmb_click: bool,\n\n}\n\n\n\nimpl Default for Mouse {\n\n fn default() -> Self {\n\n Self {\n", "file_path": "src/mouse.rs", "rank": 61, "score": 9.169372828344107 }, { "content": " gl::UseProgram(program.id);\n\n\n\n // viewport resolution in pixels\n\n let location = get_uniform_location(program, \"iResolution\");\n\n gl::Uniform2f(location, state.width as f32, state.height as f32);\n\n\n\n let location = get_uniform_location(program, \"iTime\");\n\n gl::Uniform1f(location, state.playback_time);\n\n let location = get_uniform_location(program, \"iTimeDelta\");\n\n gl::Uniform1f(location, state.delta_time);\n\n\n\n // push mouse location to the shader\n\n let location = get_uniform_location(program, \"iMouse\");\n\n gl::Uniform4f(\n\n location,\n\n state.mouse.pos.x,\n\n state.mouse.pos.y,\n\n if state.mouse.is_lmb_down { 1.0 } else { 0.0 },\n\n if state.mouse.is_rmb_down { 1.0 } else { 0.0 },\n\n );\n", "file_path": "src/main.rs", "rank": 62, "score": 8.780131661855606 }, { "content": "#![warn(clippy::all, future_incompatible, nonstandard_style, rust_2018_idioms)]\n\n\n\npub mod buffer;\n\npub mod camera;\n\npub mod config;\n\npub mod event;\n\npub mod input;\n\npub mod macros;\n\npub mod minime;\n\npub mod mouse;\n\npub mod shader;\n\npub mod state;\n\npub mod timer;\n\npub mod uniforms;\n\npub mod utils;\n\n\n\npub use buffer::*;\n\npub use camera::*;\n\npub use config::*;\n\npub use event::*;\n", "file_path": "src/lib.rs", "rank": 63, "score": 8.550418586737134 }, { "content": "use crate::{CameraModel, Mouse, OrbitCamera};\n\nuse serde::{Deserialize, Serialize};\n\n\n\npub struct AppState {\n\n pub width: i32,\n\n pub height: i32,\n\n /// App state - is the application running?\n\n pub is_running: bool,\n\n pub delta_time: f32,\n\n pub playback_time: f32,\n\n pub mouse: Mouse,\n\n /// Running or paused?\n\n pub play_mode: PlayMode,\n\n pub camera: Box<dyn CameraModel>,\n\n}\n\n\n\nimpl Default for AppState {\n\n fn default() -> Self {\n\n Self {\n\n width: 1024,\n", "file_path": "src/state.rs", "rank": 64, "score": 8.458907127340698 }, { "content": " )\n\n }\n\n\n\n // Feature controls\n\n VirtualKeyCode::Key1 => {\n\n if shader.use_camera_integration {\n\n info!(\"Disabling camera integration\");\n\n shader.use_camera_integration = false;\n\n shader.reload();\n\n }\n\n }\n\n VirtualKeyCode::Key2 => {\n\n if !shader.use_camera_integration {\n\n info!(\"Enabling camera integration. Please use '#pragma skuggbox(camera)' in your shader\");\n\n shader.use_camera_integration = true;\n\n shader.reload();\n\n }\n\n }\n\n VirtualKeyCode::Period => {\n\n // reset all camera settings\n", "file_path": "src/input.rs", "rank": 65, "score": 7.839814601041526 }, { "content": "extern crate gl;\n\nextern crate glutin;\n\nextern crate winit;\n\n\n\nuse clap::Parser;\n\nuse std::ffi::CString;\n\nuse std::sync::mpsc::channel;\n\nuse std::thread;\n\n\n\nuse glutin::{ContextBuilder, ContextWrapper, PossiblyCurrent};\n\n\n\nuse simple_logger::SimpleLogger;\n\nuse winit::{\n\n event_loop::EventLoop,\n\n platform::run_return::EventLoopExtRunReturn,\n\n window::{Window, WindowBuilder},\n\n};\n\n\n\nuse skuggbox::{\n\n buffer::Buffer,\n\n config::Config,\n\n handle_events,\n\n shader::{ShaderProgram, ShaderService},\n\n state::{AppState, PlayMode},\n\n timer::Timer,\n\n};\n\n\n", "file_path": "src/main.rs", "rank": 66, "score": 7.089304966044145 }, { "content": "use std::path::PathBuf;\n\n\n\n#[derive(clap::Parser, Debug)]\n\n#[clap(name = \"skuggbox\", about = \"Skuggbox GLSL shader viewer\")]\n\npub struct Config {\n\n /// GLSL shader file to load\n\n #[clap(name = \"FILE\", parse(from_os_str))]\n\n pub file: PathBuf,\n\n}\n", "file_path": "src/config.rs", "rank": 67, "score": 6.313440832108344 }, { "content": " height: 768,\n\n is_running: true,\n\n delta_time: 0.0,\n\n playback_time: 0.0,\n\n mouse: Mouse::default(),\n\n play_mode: PlayMode::Playing,\n\n camera: Box::from(OrbitCamera::default()),\n\n }\n\n }\n\n}\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub enum PlayMode {\n\n Playing,\n\n Paused,\n\n}\n\n\n\nimpl Default for PlayMode {\n\n fn default() -> Self {\n\n Self::Paused\n\n }\n\n}\n\n\n\npub enum PlaybackControl {\n\n Forward(f32),\n\n Rewind(f32),\n\n}\n\n\n", "file_path": "src/state.rs", "rank": 68, "score": 6.026690714915564 }, { "content": " pub fn bind(&self) {\n\n unsafe {\n\n gl::BindVertexArray(self.gl_buffer);\n\n }\n\n }\n\n\n\n pub fn delete(&self) {\n\n unsafe { gl::DeleteBuffers(0, &self.gl_buffer as *const _) }\n\n }\n\n}\n", "file_path": "src/buffer.rs", "rank": 69, "score": 5.8771789034276996 }, { "content": "use crate::{\n\n buffer::Buffer,\n\n state::{seek, AppState, PlayMode, PlaybackControl},\n\n timer::Timer,\n\n OrbitCamera, ShaderService, WindowEventHandler,\n\n};\n\nuse glam::Vec2;\n\nuse glutin::{ContextWrapper, PossiblyCurrent};\n\nuse log::info;\n\nuse winit::{\n\n event::{ElementState, Event, VirtualKeyCode, WindowEvent},\n\n event_loop::ControlFlow,\n\n window::Window,\n\n};\n", "file_path": "src/input.rs", "rank": 70, "score": 5.592727848304429 }, { "content": " let size = (vertices.len() * std::mem::size_of::<f32>()) as gl::types::GLsizeiptr;\n\n\n\n let mut vbo: gl::types::GLuint = 0;\n\n unsafe {\n\n gl::GenBuffers(1, &mut vbo);\n\n gl::BindBuffer(gl::ARRAY_BUFFER, vbo);\n\n gl::BufferData(\n\n buf_type, // target\n\n size, // size of data in bytes\n\n vertices.as_ptr() as *const gl::types::GLvoid, // pointer to data\n\n gl::STATIC_DRAW, // usage\n\n );\n\n gl::BindBuffer(buf_type, 0);\n\n }\n\n\n\n // set up vertex array object\n\n let mut vao: gl::types::GLuint = 0;\n\n unsafe {\n\n gl::GenVertexArrays(1, &mut vao);\n\n gl::BindVertexArray(vao);\n", "file_path": "src/buffer.rs", "rank": 71, "score": 5.520906840126204 }, { "content": "use std::ffi::CString;\n\n\n", "file_path": "src/utils.rs", "rank": 72, "score": 5.499331444024101 }, { "content": " );\n\n\n\n let line_double_quotes = \"#pragma include(\\\"some-shader.glsl\\\")\";\n\n assert_eq!(\n\n pragma_shader_name(line_double_quotes),\n\n \"some-shader.glsl\".to_string()\n\n );\n\n\n\n let line_no_quotes = \"#pragma include(some-shader.glsl)\";\n\n assert_eq!(\n\n pragma_shader_name(line_no_quotes),\n\n \"some-shader.glsl\".to_string()\n\n );\n\n }\n\n}\n", "file_path": "src/utils.rs", "rank": 73, "score": 5.423229232390819 }, { "content": " let mut state = AppState::default();\n\n\n\n // shader compiler channel\n\n let (sender, receiver) = channel();\n\n\n\n let mut shader = ShaderService::new(config.file);\n\n\n\n // TODO: Ensure we only watch the files currently in the shader\n\n let files = shader.files.clone();\n\n let _ = thread::spawn(move || {\n\n glsl_watcher::watch_all(sender, files);\n\n });\n\n\n\n let vertex_buffer = Buffer::new_vertex_buffer();\n\n\n\n while state.is_running {\n\n if receiver.try_recv().is_ok() {\n\n shader.reload();\n\n }\n\n\n", "file_path": "src/main.rs", "rank": 74, "score": 4.9674458312218945 }, { "content": "#![warn(clippy::all)]\n\n#![warn(rust_2018_idioms)]\n\n\n\nuse std::fs;\n\nuse std::path::PathBuf;\n\nuse std::sync::mpsc::{channel, Receiver, Sender};\n\nuse std::time::Duration;\n\n\n\nuse notify::{raw_watcher, Op, RawEvent, RecursiveMode, Watcher};\n\n\n\n/// Using `notify` to watch for changes to any defined shader file\n\n/// `sender` - std::sync::mpsc::channel\n\n/// `dir` - Which dir to find the files in\n\n/// `vs` - vertex shader name located in `dir`\n\n/// `fs` - fragment shader name located in `dir\n\n///\n", "file_path": "components/glsl-watcher/src/lib.rs", "rank": 75, "score": 4.9326721808153104 }, { "content": " time: Instant::now(),\n\n last_time: Instant::now(),\n\n delta_time: 0.0,\n\n }\n\n }\n\n\n\n /// Start of frame\n\n pub fn start(&mut self) {\n\n self.time = Instant::now();\n\n self.delta_time = (self.time - self.last_time).as_secs_f32();\n\n }\n\n\n\n /// End of frame\n\n pub fn stop(&mut self) {\n\n self.last_time = self.time;\n\n }\n\n}\n", "file_path": "src/timer.rs", "rank": 76, "score": 4.9094743799113285 }, { "content": " if matches!(state.play_mode, PlayMode::Playing) {\n\n timer.start();\n\n state.delta_time = timer.delta_time;\n\n }\n\n\n\n event_loop.run_return(|event, _, control_flow| {\n\n handle_events(\n\n event,\n\n control_flow,\n\n &mut state,\n\n &mut timer,\n\n &context,\n\n &vertex_buffer,\n\n &mut shader,\n\n );\n\n });\n\n\n\n render(&context, &mut state, &shader, &vertex_buffer);\n\n\n\n timer.stop();\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 77, "score": 4.8710809393505485 }, { "content": " fn calculate_uniform_data(&mut self) -> Mat4 {\n\n self.pos.x = (self.angle.x * PI).sin() * self.zoom;\n\n self.pos.y = (self.angle.y * 1.53).sin() * self.zoom;\n\n self.pos.z = (self.angle.x * PI).cos() * self.zoom;\n\n\n\n let up = Vec3::new(0.0, 1.0, 0.0);\n\n let forward = (self.target - self.pos).normalize();\n\n let side = Vec3::cross(up, forward);\n\n\n\n Mat4::from_cols(\n\n Vec4::new(side.x, side.y, side.z, 0.0),\n\n Vec4::new(up.x, up.y, up.z, 0.0),\n\n Vec4::new(forward.x, forward.y, forward.z, 0.0),\n\n Vec4::new(self.pos.x, self.pos.y, self.pos.z, 1.0),\n\n )\n\n }\n\n}\n\n\n\nimpl WindowEventHandler for OrbitCamera {\n\n fn handle_window_events(&mut self, event: &WindowEvent<'_>) -> bool {\n", "file_path": "src/camera.rs", "rank": 78, "score": 4.487992608828366 }, { "content": " angle: Vec2::ZERO,\n\n speed: 1.0,\n\n zoom: 5.0,\n\n }\n\n }\n\n}\n\n\n\nimpl CameraModel for OrbitCamera {\n\n fn handle_mouse(&mut self, mouse: &Mouse, delta_time: f32) {\n\n // scale the x and y differently since the movement range is different\n\n self.angle += mouse.delta * Vec2::new(0.75, -1.5) * delta_time;\n\n\n\n if self.angle.x < -1.0 {\n\n self.angle.x = -1.0\n\n }\n\n if self.angle.y < -1.0 {\n\n self.angle.y = -1.0\n\n }\n\n }\n\n\n", "file_path": "src/camera.rs", "rank": 79, "score": 3.4607090465693697 }, { "content": "use glam::{Mat4, Vec2, Vec3, Vec4};\n\nuse glutin::event::WindowEvent;\n\nuse std::f32::consts::PI;\n\nuse winit::event::{ElementState, MouseScrollDelta, VirtualKeyCode};\n\n\n\nuse crate::event::WindowEventHandler;\n\nuse crate::mouse::Mouse;\n\n\n", "file_path": "src/camera.rs", "rank": 80, "score": 3.158013184077781 }, { "content": "use std::path::PathBuf;\n\nuse std::sync::mpsc::channel;\n\n\n\nuse glsl_watcher::watch_all;\n\n\n", "file_path": "components/glsl-watcher/examples/watcher.rs", "rank": 81, "score": 3.1412637314790306 }, { "content": "#[allow(dead_code)]\n\n#[derive(Clone, Debug)]\n\npub enum BufferType {\n\n VertexBuffer,\n\n IndexBuffer,\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct Buffer {\n\n pub gl_buffer: gl::types::GLuint,\n\n pub size: gl::types::GLsizeiptr,\n\n}\n\n\n", "file_path": "src/buffer.rs", "rank": 82, "score": 3.0712099016722494 }, { "content": " pos: Vec2::new(0.0, 0.0),\n\n last_pos: Vec2::ZERO,\n\n delta: Vec2::new(0.0, 0.0),\n\n\n\n is_lmb_down: false,\n\n is_mmb_down: false,\n\n is_rmb_down: false,\n\n is_first_rmb_click: false,\n\n }\n\n }\n\n}\n\n\n\nimpl WindowEventHandler for Mouse {\n\n fn handle_window_events(&mut self, event: &WindowEvent<'_>) -> bool {\n\n match event {\n\n WindowEvent::CursorMoved { position, .. } => {\n\n if self.is_rmb_down {\n\n self.delta = Vec2::new(\n\n position.x as f32 - self.pos.x,\n\n self.pos.y - position.y as f32,\n", "file_path": "src/mouse.rs", "rank": 83, "score": 2.8837611531172587 }, { "content": "use winit::event::WindowEvent;\n\n\n\n/// Generic trait for handling window events.\n\n/// Mostly used to get the event types correct.\n", "file_path": "src/event.rs", "rank": 84, "score": 2.8209368502472536 }, { "content": "#[allow(unused_macros)]\n\nmacro_rules! gl_error {\n\n ($($s:stmt;)*) => {\n\n $(\n\n $s;\n\n if cfg!(debug_assertions) {\n\n let err = gl::GetError();\n\n if err != gl::NO_ERROR {\n\n let err_str = match err {\n\n gl::INVALID_ENUM => \"GL_INVALID_ENUM\",\n\n gl::INVALID_VALUE => \"GL_INVALID_VALUE\",\n\n gl::INVALID_OPERATION => \"GL_INVALID_OPERATION\",\n\n gl::INVALID_FRAMEBUFFER_OPERATION => \"GL_INVALID_FRAMEBUFFER_OPERATION\",\n\n gl::OUT_OF_MEMORY => \"GL_OUT_OF_MEMORY\",\n\n gl::STACK_UNDERFLOW => \"GL_STACK_UNDERFLOW\",\n\n gl::STACK_OVERFLOW => \"GL_STACK_OVERFLOW\",\n\n _ => \"unknown error\"\n\n };\n\n println!(\"{}:{} - {} caused {}\",\n\n file!(),\n\n line!(),\n\n stringify!($s),\n\n err_str);\n\n }\n\n }\n\n )*\n\n }\n\n}\n", "file_path": "src/macros.rs", "rank": 85, "score": 2.4928127896085615 }, { "content": " gl::BindBuffer(buf_type, vbo);\n\n gl::EnableVertexAttribArray(0); // this is \"layout (location = 0)\" in vertex shader\n\n gl::VertexAttribPointer(\n\n 0, // index of the generic vertex attribute (\"layout (location = 0)\")\n\n 3, // the number of components per generic vertex attribute\n\n gl::FLOAT, // data type\n\n gl::FALSE, // normalized (int-to-float conversion)\n\n (3 * std::mem::size_of::<f32>()) as gl::types::GLint, // stride (byte offset between consecutive attributes)\n\n std::ptr::null(), // offset of the first component\n\n );\n\n gl::BindBuffer(buf_type, 0);\n\n gl::BindVertexArray(0);\n\n }\n\n\n\n Self {\n\n gl_buffer: vao,\n\n size,\n\n }\n\n }\n\n\n", "file_path": "src/buffer.rs", "rank": 86, "score": 2.202000606152815 }, { "content": " }\n\n }\n\n\n\n Ok(event) => {\n\n println!(\"broken event: {:?}\", event);\n\n None\n\n }\n\n Err(e) => {\n\n println!(\"watch error: {:?}\", e);\n\n None\n\n }\n\n };\n\n\n\n if changed_file.is_some() {\n\n sender.send(true).unwrap();\n\n }\n\n\n\n std::thread::sleep(Duration::from_millis(10));\n\n }\n\n}\n\n\n", "file_path": "components/glsl-watcher/src/lib.rs", "rank": 87, "score": 2.03091485413339 }, { "content": " }\n\n }\n\n Ok(event) => {\n\n println!(\"broken event: {:?}\", event);\n\n None\n\n }\n\n Err(e) => {\n\n println!(\"watch error: {:?}\", e);\n\n None\n\n }\n\n };\n\n\n\n if let Some(cf) = changed_file {\n\n sender.send(cf).unwrap();\n\n }\n\n\n\n std::thread::sleep(Duration::from_millis(10));\n\n }\n\n}\n", "file_path": "components/glsl-watcher/src/lib.rs", "rank": 88, "score": 1.9991691930808955 }, { "content": "# skuggbox\n\n\n\n```bash\n\n\n\ncargo run --release -- ./path/to/shader.glsl\n\n\n\n## with cargo watch\n\n\n\ncargo install cargo-watch\n\ncargo watch -x \"run --release\"\n\n\n\n```\n\n\n\n### Run\n\n\n\n`cargo run --release`\n\n\n\n### Run tests\n\n\n\n`cargo test`\n\n\n\n### Misc\n\nSee file `.ignore` for directories and files ignored by `cargo watch`\n", "file_path": "README.md", "rank": 89, "score": 1.5231794638929923 }, { "content": " }\n\n\n\n WindowEvent::Resized(size) => {\n\n let size = size.to_logical::<i32>(1.0);\n\n // bind size\n\n world_state.width = size.width;\n\n world_state.height = size.height;\n\n }\n\n\n\n WindowEvent::KeyboardInput { input, .. } => {\n\n if input.state == ElementState::Pressed {\n\n if let Some(keycode) = input.virtual_keycode {\n\n match keycode {\n\n VirtualKeyCode::Escape => {\n\n info!(\"Bye now...\");\n\n world_state.is_running = false;\n\n buffer.delete();\n\n *control_flow = ControlFlow::Exit;\n\n }\n\n\n", "file_path": "src/input.rs", "rank": 90, "score": 1.421631211007973 } ]
Rust
libsplinter/src/peer/notification.rs
rbuysse/splinter
1864eb39be8c44f910dc0ce79693fea7f136fd5a
use std::collections::{HashMap, VecDeque}; use std::sync::mpsc::{Receiver, TryRecvError}; use super::error::PeerManagerError; use super::PeerTokenPair; #[derive(Debug, PartialEq, Clone)] pub enum PeerManagerNotification { Connected { peer: PeerTokenPair }, Disconnected { peer: PeerTokenPair }, } pub struct PeerNotificationIter { pub(super) recv: Receiver<PeerManagerNotification>, } impl PeerNotificationIter { pub fn try_next(&self) -> Result<Option<PeerManagerNotification>, PeerManagerError> { match self.recv.try_recv() { Ok(notifications) => Ok(Some(notifications)), Err(TryRecvError::Empty) => Ok(None), Err(TryRecvError::Disconnected) => Err(PeerManagerError::SendMessageError( "The peer manager is no longer running".into(), )), } } } impl Iterator for PeerNotificationIter { type Item = PeerManagerNotification; fn next(&mut self) -> Option<Self::Item> { match self.recv.recv() { Ok(notification) => Some(notification), Err(_) => { None } } } } pub type SubscriberId = usize; pub(super) type Subscriber = Box<dyn Fn(PeerManagerNotification) -> Result<(), Box<dyn std::error::Error>> + Send>; pub(super) struct SubscriberMap { queue: VecDeque<PeerManagerNotification>, queue_limit: usize, subscribers: HashMap<SubscriberId, Subscriber>, next_id: SubscriberId, } impl SubscriberMap { pub fn new() -> Self { Self::new_with_queue_limit(std::u16::MAX as usize) } pub fn new_with_queue_limit(limit: usize) -> Self { Self { queue: VecDeque::new(), queue_limit: limit, subscribers: HashMap::new(), next_id: 0, } } pub fn broadcast(&mut self, notification: PeerManagerNotification) { self.queue.push_back(notification); if self.queue.len() > self.queue_limit { self.queue.pop_front(); } if self.subscribers.is_empty() { return; } while let Some(notification) = self.queue.pop_front() { let mut failures = vec![]; for (id, callback) in self.subscribers.iter() { if let Err(err) = (*callback)(notification.clone()) { failures.push(*id); debug!("Dropping subscriber ({}): {}", id, err); } } for id in failures { self.subscribers.remove(&id); } } } pub fn add_subscriber(&mut self, subscriber: Subscriber) -> SubscriberId { let subscriber_id = self.next_id; self.next_id += 1; if self.subscribers.is_empty() { while let Some(notification) = self.queue.pop_front() { if let Err(err) = (*subscriber)(notification) { debug!("Dropping subscriber on add ({}): {}", subscriber_id, err); return subscriber_id; } } } self.subscribers.insert(subscriber_id, subscriber); subscriber_id } pub fn remove_subscriber(&mut self, subscriber_id: SubscriberId) { self.subscribers.remove(&subscriber_id); } } #[cfg(test)] pub mod tests { use super::*; use std::sync::mpsc::channel; use std::thread; use crate::peer::PeerAuthorizationToken; #[test] fn test_peer_manager_notifications() { let (send, recv) = channel(); let notifcation_iter = PeerNotificationIter { recv }; let join_handle = thread::spawn(move || { for i in 0..5 { send.send(PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: format!("test_peer{}", i), }, PeerAuthorizationToken::Trust { peer_id: "local".into(), }, ), }) .unwrap(); } }); let mut notifications_sent = 0; for notifcation in notifcation_iter { assert_eq!( notifcation, PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: format!("test_peer{}", notifications_sent), }, PeerAuthorizationToken::Trust { peer_id: "local".into(), }, ), } ); notifications_sent += 1; } assert_eq!(notifications_sent, 5); join_handle.join().unwrap(); } #[test] fn test_broadcast_queue() { let mut subscriber_map = SubscriberMap::new(); for i in 0..3 { subscriber_map.broadcast(PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: format!("test_peer_{}", i), }, PeerAuthorizationToken::Trust { peer_id: "local".into(), }, ), }) } let (tx, sub1) = channel(); let _sub1_id = subscriber_map.add_subscriber(Box::new(move |notification| { tx.send(notification).map_err(Box::from) })); assert_eq!( sub1.try_recv().expect("Unable to receive value"), PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_0".into() }, PeerAuthorizationToken::Trust { peer_id: "local".into() }, ) } ); assert_eq!( sub1.try_recv().expect("Unable to receive value"), PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_1".into() }, PeerAuthorizationToken::Trust { peer_id: "local".into() }, ) } ); assert_eq!( sub1.try_recv().expect("Unable to receive value"), PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_2".into() }, PeerAuthorizationToken::Trust { peer_id: "local".into() }, ) } ); assert!(matches!( sub1.try_recv(), Err(std::sync::mpsc::TryRecvError::Empty) )); let (tx, sub2) = channel(); let _sub2_id = subscriber_map.add_subscriber(Box::new(move |notification| { tx.send(notification).map_err(Box::from) })); subscriber_map.broadcast(PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_3".into(), }, PeerAuthorizationToken::Trust { peer_id: "local".into(), }, ), }); assert_eq!( sub1.try_recv().expect("Unable to receive value"), PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_3".into() }, PeerAuthorizationToken::Trust { peer_id: "local".into() }, ) } ); assert_eq!( sub2.try_recv().expect("Unable to receive value"), PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_3".into() }, PeerAuthorizationToken::Trust { peer_id: "local".into() }, ) } ); } #[test] fn test_broadcast_queue_limit() { let mut subscriber_map = SubscriberMap::new_with_queue_limit(1); for i in 0..3 { subscriber_map.broadcast(PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: format!("test_peer_{}", i), }, PeerAuthorizationToken::Trust { peer_id: "local".into(), }, ), }) } let (tx, sub1) = channel(); let _sub1_id = subscriber_map.add_subscriber(Box::new(move |notification| { tx.send(notification).map_err(Box::from) })); assert_eq!( sub1.try_recv().expect("Unable to receive value"), PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_2".into() }, PeerAuthorizationToken::Trust { peer_id: "local".into() }, ) } ); assert!(matches!( sub1.try_recv(), Err(std::sync::mpsc::TryRecvError::Empty) )); subscriber_map.broadcast(PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_3".into(), }, PeerAuthorizationToken::Trust { peer_id: "local".into(), }, ), }); subscriber_map.broadcast(PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_4".into(), }, PeerAuthorizationToken::Trust { peer_id: "local".into(), }, ), }); assert_eq!( sub1.try_recv().expect("Unable to receive value"), PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_3".into() }, PeerAuthorizationToken::Trust { peer_id: "local".into() }, ) } ); assert_eq!( sub1.try_recv().expect("Unable to receive value"), PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_4".into() }, PeerAuthorizationToken::Trust { peer_id: "local".into() }, ) } ); assert!(matches!( sub1.try_recv(), Err(std::sync::mpsc::TryRecvError::Empty) )); } }
use std::collections::{HashMap, VecDeque}; use std::sync::mpsc::{Receiver, TryRecvError}; use super::error::PeerManagerError; use super::PeerTokenPair; #[derive(Debug, PartialEq, Clone)] pub enum PeerManagerNotification { Connected { peer: PeerTokenPair }, Disconnected { peer: PeerTokenPair }, } pub struct PeerNotificationIter { pub(super) recv: Receiver<PeerManagerNotification>, } impl PeerNotificationIter { pub fn try_next(&self) -> Result<Option<PeerManagerNotification>, PeerManagerError> { match self.recv.try_recv() { Ok(notifications) => Ok(Some(notifications)), Err(TryRecvError::Empty) => Ok(None), Err(TryRecvError::Disconnected) => Err(PeerManagerError::SendMessageError( "The peer manager is no longer running".into(), )), } } } impl Iterator for PeerNotificationIter { type Item = PeerManagerNotification; fn next(&mut self) -> Option<Self::Item> { match self.recv.recv() { Ok(notification) => Some(notification), Err(_) => { None } } } } pub type SubscriberId = usize; pub(super) type Subscriber = Box<dyn Fn(PeerManagerNotification) -> Result<(), Box<dyn std::error::Error>> + Send>; pub(super) struct SubscriberMap { queue: VecDeque<PeerManagerNotification>, queue_limit: usize, subscribers: HashMap<SubscriberId, Subscriber>, next_id: SubscriberId, } impl SubscriberMap { pub fn new() -> Self { Self::new_with_queue_limit(std::u16::MAX as usize) } pub fn new_with_queue_limit(limit: usize) -> Self { Self { queue: VecDeque::new(), queue_limit: limit, subscribers: HashMap::new(), next_id: 0, } } pub fn broadcast(&mut self, notification: PeerManagerNotification) { self.queue.push_back(notification); if self.queue.len() > self.queue_limit { self.queue.pop_front(); } if self.subscribers.is_empty() { return; } while let Some(notification) = self.queue.pop_front() { let mut failures = vec![]; for (id, callback) in self.subscribers.iter() { if let Err(err) = (*callback)(notification.clone()) { failures.push(*id); debug!("Dropping subscriber ({}): {}", id, err); } } for id in failures { self.subscribers.remove(&id); } } } pub fn add_subscriber(&mut self, subscriber: Subscriber) -> SubscriberId { let subscriber_id = self.next_id; self.next_id += 1; if self.subscribers.is_empty() { while let Some(notification) = self.queue.pop_front() { if let Err(err) = (*subscriber)(notification) { debug!("Dropping subscriber on add ({}): {}", subscriber_id, err); return subscriber_id; } } } self.subscribers.insert(subscriber_id, subscriber); subscriber_id } pub fn remove_subscriber(&mut self, subscriber_id: SubscriberId) { self.subscribers.remove(&subscriber_id); } } #[cfg(test)] pub mod tests { use super::*; use std::sync::mpsc::channel; use std::thread; use crate::peer::PeerAuthorizationToken; #[test] fn test_peer_manager_notifications() { let (send, recv) = channel(); let notifcation_iter = PeerNotificationIter { recv }; let join_handle = thread::spawn(move || { for i in 0..5 { send.send(PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: format!("test_peer{}", i), }, PeerAuthorizationToken::Trust { peer_id: "local".into(), }, ), }) .unwrap(); } }); let mut notifications_sent = 0; for notifcation in notifcation_iter { assert_eq!( notifcation, PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: format!("test_peer{}", notifications_sent), }, PeerAuthorizationToken::Trust { peer_id: "local".into(), }, ), } ); notifications_sent += 1; } assert_eq!(notifications_sent, 5); join_handle.join().unwrap(); } #[test] fn test_broadcast_queue() { let mut subscriber_map = SubscriberMap::new(); for i in 0..3 { subscriber_map.broadcast(PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: format!("test_peer_{}", i), }, PeerAuthorizationToken::Trust { peer_id: "local".into(), }, ), }) } let (tx, sub1) = channel(); let _sub1_id = subscriber_map.add_subscriber(Box::new(move |notification| { tx.send(notification).map_err(Box::from) })); assert_eq!( sub1.try_recv().expect("Unable to receive value"), PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_0".into() }, PeerAuthorizationToken::Trust { peer_id: "local".into() }, ) } ); assert_eq!( sub1.try_recv().expect("Unable to receive value"), PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_1".into() }, PeerAuthorizationToken::Trust { peer_id: "local".into() }, ) } );
#[test] fn test_broadcast_queue_limit() { let mut subscriber_map = SubscriberMap::new_with_queue_limit(1); for i in 0..3 { subscriber_map.broadcast(PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: format!("test_peer_{}", i), }, PeerAuthorizationToken::Trust { peer_id: "local".into(), }, ), }) } let (tx, sub1) = channel(); let _sub1_id = subscriber_map.add_subscriber(Box::new(move |notification| { tx.send(notification).map_err(Box::from) })); assert_eq!( sub1.try_recv().expect("Unable to receive value"), PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_2".into() }, PeerAuthorizationToken::Trust { peer_id: "local".into() }, ) } ); assert!(matches!( sub1.try_recv(), Err(std::sync::mpsc::TryRecvError::Empty) )); subscriber_map.broadcast(PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_3".into(), }, PeerAuthorizationToken::Trust { peer_id: "local".into(), }, ), }); subscriber_map.broadcast(PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_4".into(), }, PeerAuthorizationToken::Trust { peer_id: "local".into(), }, ), }); assert_eq!( sub1.try_recv().expect("Unable to receive value"), PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_3".into() }, PeerAuthorizationToken::Trust { peer_id: "local".into() }, ) } ); assert_eq!( sub1.try_recv().expect("Unable to receive value"), PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_4".into() }, PeerAuthorizationToken::Trust { peer_id: "local".into() }, ) } ); assert!(matches!( sub1.try_recv(), Err(std::sync::mpsc::TryRecvError::Empty) )); } }
assert_eq!( sub1.try_recv().expect("Unable to receive value"), PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_2".into() }, PeerAuthorizationToken::Trust { peer_id: "local".into() }, ) } ); assert!(matches!( sub1.try_recv(), Err(std::sync::mpsc::TryRecvError::Empty) )); let (tx, sub2) = channel(); let _sub2_id = subscriber_map.add_subscriber(Box::new(move |notification| { tx.send(notification).map_err(Box::from) })); subscriber_map.broadcast(PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_3".into(), }, PeerAuthorizationToken::Trust { peer_id: "local".into(), }, ), }); assert_eq!( sub1.try_recv().expect("Unable to receive value"), PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_3".into() }, PeerAuthorizationToken::Trust { peer_id: "local".into() }, ) } ); assert_eq!( sub2.try_recv().expect("Unable to receive value"), PeerManagerNotification::Connected { peer: PeerTokenPair::new( PeerAuthorizationToken::Trust { peer_id: "test_peer_3".into() }, PeerAuthorizationToken::Trust { peer_id: "local".into() }, ) } ); }
function_block-function_prefix_line
[ { "content": "#[cfg(not(any(feature = \"trust-authorization\", feature = \"challenge-authorization\")))]\n\nfn connect_msg_bytes() -> Result<Vec<u8>, AuthorizationManagerError> {\n\n let connect_msg = AuthorizationMessage::ConnectRequest(ConnectRequest::Bidirectional);\n\n\n\n IntoBytes::<network::NetworkMessage>::into_bytes(NetworkMessage::from(connect_msg)).map_err(\n\n |err| AuthorizationManagerError(format!(\"Unable to send connect request: {}\", err)),\n\n )\n\n}\n\n\n", "file_path": "libsplinter/src/network/auth/mod.rs", "rank": 0, "score": 405364.59463423037 }, { "content": "/// Creates NetworkHeartbeat message and serializes it into a byte array.\n\nfn create_heartbeat() -> Result<Vec<u8>, ConnectionManagerError> {\n\n IntoBytes::<network::NetworkMessage>::into_bytes(NetworkMessage::NetworkHeartbeat(\n\n NetworkHeartbeat,\n\n ))\n\n .map_err(|_| {\n\n ConnectionManagerError::HeartbeatError(\"cannot create NetworkHeartbeat message\".to_string())\n\n })\n\n}\n", "file_path": "libsplinter/src/network/connection_manager/builder.rs", "rank": 1, "score": 390022.33275207214 }, { "content": "/// Defines functions to receive messages from connections within the connection matrix\n\npub trait ConnectionMatrixReceiver: Clone + Send {\n\n /// Attempts to receive a message. The envelope returned contains both the payload (message)\n\n /// and the identifier of the connection on which it was received. This function will block\n\n /// until there is a message to receive. The message will come from the first ready connection\n\n /// detected.\n\n ///\n\n /// If the receive failed, a `ConnectionMatrixRecvError` is returned.\n\n fn recv(&self) -> Result<ConnectionMatrixEnvelope, ConnectionMatrixRecvError>;\n\n\n\n /// Attempts to receive a message, with a timeout. The envelope returned contains both the\n\n /// payload (message) and the identifier of the connection on which it was received. This\n\n /// function will block until there is a message to receive or the specified timeout expires.\n\n /// The message will come from the first ready connection detected.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `timeout` - `Duration` for the amount of time the function should block waiting on an\n\n /// envelope to arrive\n\n ///\n\n /// If the receive failed or timed out, a `ConnectionMatrixRecvTimeoutError` is returned.\n\n fn recv_timeout(\n\n &self,\n\n timeout: Duration,\n\n ) -> Result<ConnectionMatrixEnvelope, ConnectionMatrixRecvTimeoutError>;\n\n}\n", "file_path": "libsplinter/src/transport/matrix.rs", "rank": 2, "score": 389324.2279697521 }, { "content": "type ShutdownSignalFn = Box<dyn Fn() -> Result<(), ServiceProcessorError> + Send>;\n\n\n\n/// The ServiceProcessor handles the networking for services. This includes talking to the\n\n/// splinter node, connecting for authorization, registering the services, and routing\n\n/// direct messages to the correct service.\n\npub struct ServiceProcessor {\n\n shared_state: Arc<RwLock<SharedState>>,\n\n services: Vec<Box<dyn ServiceInstance>>,\n\n mesh: Mesh,\n\n circuit: String,\n\n node_mesh_id: String,\n\n network_sender: Sender<Vec<u8>>,\n\n network_receiver: Receiver<Vec<u8>>,\n\n inbound_router: InboundRouter<CircuitMessageType>,\n\n inbound_receiver: Receiver<Result<(CircuitMessageType, Vec<u8>), channel::RecvError>>,\n\n channel_capacity: usize,\n\n}\n\n\n\nimpl ServiceProcessor {\n\n pub fn new(\n", "file_path": "libsplinter/src/runtime/service/instance/processor/mod.rs", "rank": 3, "score": 378492.2207054001 }, { "content": "/// Responsible for broadcasting connection manager notifications.\n\nstruct SubscriberMap {\n\n subscribers: HashMap<SubscriberId, Subscriber>,\n\n next_id: SubscriberId,\n\n}\n\n\n\nimpl SubscriberMap {\n\n fn new() -> Self {\n\n Self {\n\n subscribers: HashMap::new(),\n\n next_id: 0,\n\n }\n\n }\n\n\n\n fn broadcast(&mut self, notification: ConnectionManagerNotification) {\n\n let mut failures = vec![];\n\n for (id, callback) in self.subscribers.iter() {\n\n if let Err(err) = (*callback)(notification.clone()) {\n\n failures.push(*id);\n\n debug!(\"Dropping subscriber ({}): {}\", id, err);\n\n }\n", "file_path": "libsplinter/src/network/connection_manager/mod.rs", "rank": 4, "score": 376916.3825818316 }, { "content": "pub trait Receiver<T>: Send {\n\n fn recv(&self) -> Result<T, RecvError>;\n\n fn try_recv(&self) -> Result<T, TryRecvError>;\n\n fn recv_timeout(&self, timeout: Duration) -> Result<T, RecvTimeoutError>;\n\n}\n\n\n", "file_path": "libsplinter/src/channel/mod.rs", "rank": 5, "score": 357637.8625712461 }, { "content": "#[cfg(any(feature = \"trust-authorization\", feature = \"challenge-authorization\"))]\n\nfn protocol_msg_bytes() -> Result<Vec<u8>, AuthorizationManagerError> {\n\n let protocol_msg = AuthorizationMessage::AuthProtocolRequest(AuthProtocolRequest {\n\n auth_protocol_min: PEER_AUTHORIZATION_PROTOCOL_MIN,\n\n auth_protocol_max: PEER_AUTHORIZATION_PROTOCOL_VERSION,\n\n });\n\n\n\n IntoBytes::<network::NetworkMessage>::into_bytes(NetworkMessage::from(protocol_msg)).map_err(\n\n |err| AuthorizationManagerError(format!(\"Unable to send protocol request: {}\", err)),\n\n )\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct AuthorizationMessageSender {\n\n sender: mpsc::Sender<Vec<u8>>,\n\n}\n\n\n\nimpl AuthorizationMessageSender {\n\n pub fn send(&self, msg: Vec<u8>) -> Result<(), Vec<u8>> {\n\n self.sender.send(msg).map_err(|err| err.0)\n\n }\n", "file_path": "libsplinter/src/network/auth/mod.rs", "rank": 6, "score": 357431.39320479945 }, { "content": "pub fn into_bytes(payload: web::Payload) -> impl Future<Item = Vec<u8>, Error = ActixError> {\n\n payload\n\n .from_err::<ActixError>()\n\n .fold(web::BytesMut::new(), move |mut body, chunk| {\n\n body.extend_from_slice(&chunk);\n\n Ok::<_, ActixError>(body)\n\n })\n\n .and_then(|body| Ok(body.to_vec()))\n\n .into_future()\n\n}\n\n\n", "file_path": "libsplinter/src/rest_api/actix_web_1/resource.rs", "rank": 7, "score": 355431.91659521137 }, { "content": "fn parse_service_peer_group(peer_group: &str) -> Result<Vec<&str>, CliError> {\n\n peer_group\n\n .split(',')\n\n .map(|peer| {\n\n if peer.is_empty() {\n\n Err(CliError::ActionError(\n\n \"Empty service_id detected in '--service-peer-group' list\".into(),\n\n ))\n\n } else {\n\n Ok(peer)\n\n }\n\n })\n\n .collect::<Result<_, _>>()\n\n}\n\n\n", "file_path": "cli/src/action/circuit/mod.rs", "rank": 8, "score": 346878.3119380408 }, { "content": "/// Run all pending database migrations.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `conn` - Connection to SQLite database\n\n///\n\npub fn run_migrations(conn: &SqliteConnection) -> Result<(), InternalError> {\n\n embedded_migrations::run(conn).map_err(|err| InternalError::from_source(Box::new(err)))?;\n\n\n\n debug!(\"Successfully applied Splinter SQLite migrations\");\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "libsplinter/src/migrations/diesel/sqlite/mod.rs", "rank": 9, "score": 345573.9461688418 }, { "content": "/// Run all pending database migrations.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `conn` - Connection to PostgreSQL database\n\n///\n\npub fn run_migrations(conn: &PgConnection) -> Result<(), InternalError> {\n\n embedded_migrations::run(conn).map_err(|err| InternalError::from_source(Box::new(err)))?;\n\n\n\n debug!(\"Successfully applied Splinter PostgreSQL migrations\");\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "libsplinter/src/migrations/diesel/postgres/mod.rs", "rank": 10, "score": 345573.9461688417 }, { "content": "fn run<I: IntoIterator<Item = T>, T: Into<OsString> + Clone>(args: I) -> Result<(), CliError> {\n\n let mut app = clap_app!(myapp =>\n\n (name: APP_NAME)\n\n (version: VERSION)\n\n (author: \"Cargill\")\n\n (about: \"Command line for Splinter\")\n\n (@arg verbose: -v +multiple +global \"Log verbosely\")\n\n (@arg quiet: -q --quiet +global \"Do not display output\")\n\n (@setting SubcommandRequiredElseHelp)\n\n );\n\n\n\n app = app\n\n .subcommand(\n\n SubCommand::with_name(\"keygen\")\n\n .about(\"Generates secp256k1 keys\")\n\n .arg(\n\n Arg::with_name(\"key-name\")\n\n .takes_value(true)\n\n .help(\"Name of keys generated; defaults to user name\"),\n\n )\n", "file_path": "cli/src/main.rs", "rank": 11, "score": 344513.5721475212 }, { "content": "/// Defines a function to send a message using a connection identifier\n\npub trait ConnectionMatrixSender: Clone + Send {\n\n /// Sends a message over the specified connection.\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `id` - the identifier of the connection on which the message should be sent\n\n /// * `message` - the bytes of the message\n\n ///\n\n /// If the send failed, a `ConnectionMatrixSendError` will be returned.\n\n fn send(&self, id: String, message: Vec<u8>) -> Result<(), ConnectionMatrixSendError>;\n\n}\n\n\n", "file_path": "libsplinter/src/transport/matrix.rs", "rank": 12, "score": 336891.77276417287 }, { "content": "/// Run all pending database migrations.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `conn` - Connection to SQLite database\n\n///\n\npub fn run_migrations(conn: &SqliteConnection) -> Result<(), InternalError> {\n\n embedded_migrations::run(conn).map_err(|err| InternalError::from_source(Box::new(err)))?;\n\n\n\n run_transact_sqlite_migrations(conn)\n\n .map_err(|err| InternalError::from_source(Box::new(err)))?;\n\n\n\n run_sawtooth_sqlite_migrations(conn)\n\n .map_err(|err| InternalError::from_source(Box::new(err)))?;\n\n\n\n debug!(\"Successfully applied Scabbard SQLite migrations\");\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "services/scabbard/libscabbard/src/migrations/diesel/sqlite/mod.rs", "rank": 13, "score": 336312.792898403 }, { "content": "/// Run all pending database migrations.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `conn` - Connection to PostgreSQL database\n\n///\n\npub fn run_migrations(conn: &PgConnection) -> Result<(), InternalError> {\n\n embedded_migrations::run(conn).map_err(|err| InternalError::from_source(Box::new(err)))?;\n\n\n\n Ok(())\n\n}\n", "file_path": "services/echo/libecho/src/migrations/diesel/postgres/mod.rs", "rank": 14, "score": 336312.792898403 }, { "content": "/// Run all pending database migrations.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `conn` - Connection to SQLite database\n\n///\n\npub fn run_migrations(conn: &SqliteConnection) -> Result<(), InternalError> {\n\n embedded_migrations::run(conn).map_err(|err| InternalError::from_source(Box::new(err)))?;\n\n\n\n Ok(())\n\n}\n", "file_path": "services/echo/libecho/src/migrations/diesel/sqlite/mod.rs", "rank": 15, "score": 336312.792898403 }, { "content": "/// Run all pending database migrations.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `conn` - Connection to PostgreSQL database\n\n///\n\npub fn run_migrations(conn: &PgConnection) -> Result<(), InternalError> {\n\n embedded_migrations::run(conn).map_err(|err| InternalError::from_source(Box::new(err)))?;\n\n\n\n run_transact_postgres_migrations(conn)\n\n .map_err(|err| InternalError::from_source(Box::new(err)))?;\n\n\n\n run_sawtooth_postgres_migrations(conn)\n\n .map_err(|err| InternalError::from_source(Box::new(err)))?;\n\n\n\n debug!(\"Successfully applied Scabbard PostgreSQL migrations\");\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "services/scabbard/libscabbard/src/migrations/diesel/postgres/mod.rs", "rank": 16, "score": 336312.792898403 }, { "content": "/// Parse a service argument into a list. Check if the argument is in json or csv format\n\n/// and return the list of strings. An error is returned if json fmt cannot be parsed.\n\nfn parse_list(values_list: &str) -> Result<Vec<String>, String> {\n\n if values_list.starts_with('[') {\n\n serde_json::from_str(values_list).map_err(|err| err.to_string())\n\n } else {\n\n Ok(values_list\n\n .split(',')\n\n .map(String::from)\n\n .collect::<Vec<String>>())\n\n }\n\n}\n\n\n", "file_path": "services/scabbard/libscabbard/src/service/factory/mod.rs", "rank": 17, "score": 335715.87094378105 }, { "content": "/// Get whether there are any pending migrations\n\n///\n\n/// # Arguments\n\n///\n\n/// * `conn` - Connection to SQLite database\n\n///\n\npub fn any_pending_migrations(conn: &SqliteConnection) -> Result<bool, InternalError> {\n\n let current_version = conn.latest_run_migration_version().unwrap_or(None);\n\n\n\n // Diesel 1.4 only allows access to the list of migrations via attempting\n\n // to run the migrations, so we'll do that in a test transaction.\n\n let latest_version =\n\n conn.test_transaction::<Result<Option<String>, InternalError>, (), _>(|| {\n\n Ok(match embedded_migrations::run(conn) {\n\n Ok(_) => conn\n\n .latest_run_migration_version()\n\n .map_err(|err| InternalError::from_source(Box::new(err))),\n\n Err(err) => Err(InternalError::from_source(Box::new(err))),\n\n })\n\n })?;\n\n\n\n Ok(current_version == latest_version)\n\n}\n", "file_path": "libsplinter/src/migrations/diesel/sqlite/mod.rs", "rank": 18, "score": 335106.9953379437 }, { "content": "/// Get whether there are any pending migrations\n\n///\n\n/// # Arguments\n\n///\n\n/// * `conn` - Connection to PostgreSQL database\n\n///\n\npub fn any_pending_migrations(conn: &PgConnection) -> Result<bool, InternalError> {\n\n let current_version = conn.latest_run_migration_version().unwrap_or(None);\n\n\n\n // Diesel 1.4 only allows access to the list of migrations via attempting\n\n // to run the migrations, so we'll do that in a test transaction.\n\n let latest_version =\n\n conn.test_transaction::<Result<Option<String>, InternalError>, (), _>(|| {\n\n Ok(match embedded_migrations::run(conn) {\n\n Ok(_) => conn\n\n .latest_run_migration_version()\n\n .map_err(|err| InternalError::from_source(Box::new(err))),\n\n Err(err) => Err(InternalError::from_source(Box::new(err))),\n\n })\n\n })?;\n\n\n\n Ok(current_version == latest_version)\n\n}\n", "file_path": "libsplinter/src/migrations/diesel/postgres/mod.rs", "rank": 19, "score": 335106.9953379438 }, { "content": "/// Defines connection lifecycle operations (addition and removal of a `Connection`)\n\n///\n\n/// This trait is distinct from the sender/receiver traits because the lifecycle operations\n\n/// typically occur in a separate component. Thus, we can expose this trait only where the\n\n/// lifecycle operations are performed and nowhere else in the system.\n\npub trait ConnectionMatrixLifeCycle: Clone + Send {\n\n /// Adds a connection to the connection matrix\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `connection` - Connection being added to the connection matrix\n\n /// * `id` - Connection identifier; must be unique within the connection matrix\n\n ///\n\n /// If the add failed, a `ConnectionMatrixAddError` will be returned.\n\n fn add(\n\n &self,\n\n connection: Box<dyn Connection>,\n\n id: String,\n\n ) -> Result<usize, ConnectionMatrixAddError>;\n\n\n\n /// Removes a connection from the connection matrix\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `id` - the connection identifier for the connection being removed\n\n ///\n\n /// If the remove failed, a `ConnectionMatrixRemoveError` will be returned.\n\n fn remove(&self, id: &str) -> Result<Box<dyn Connection>, ConnectionMatrixRemoveError>;\n\n}\n\n\n", "file_path": "libsplinter/src/transport/matrix.rs", "rank": 20, "score": 332160.1403981213 }, { "content": "pub fn parse_hex(hex: &str) -> Result<Vec<u8>, HexError> {\n\n if hex.len() % 2 != 0 {\n\n return Err(HexError {\n\n context: format!(\"{} is not valid hex: odd number of digits\", hex),\n\n source: None,\n\n });\n\n }\n\n\n\n let mut res = vec![];\n\n for i in (0..hex.len()).step_by(2) {\n\n res.push(\n\n u8::from_str_radix(&hex[i..i + 2], 16).map_err(|err| HexError {\n\n context: format!(\"{} contains invalid hex\", hex),\n\n source: Some(Box::new(err)),\n\n })?,\n\n );\n\n }\n\n\n\n Ok(res)\n\n}\n\n\n", "file_path": "libsplinter/src/hex.rs", "rank": 21, "score": 331610.95563734206 }, { "content": "type Subscriber =\n\n Box<dyn Fn(ConnectionManagerNotification) -> Result<(), Box<dyn std::error::Error>> + Send>;\n\n\n", "file_path": "libsplinter/src/network/connection_manager/mod.rs", "rank": 22, "score": 331390.0764237144 }, { "content": "/// Execute a test against a postgres database.\n\n///\n\n/// This function will create a database, based on the name of the current test being run, if\n\n/// known. It will then run the migrations against the new database. After the test completes, it\n\n/// will drop the test database previously created, regardless of success or failure of the test.\n\n///\n\n/// The base url for a postgres server is specified by the environment variable\n\n/// `DIESEL_POSTGRES_TEST_URL` or defaults to `\"postgres://postgres:test@localhost:5432\"`.\n\npub fn run_postgres_test<T>(test: T) -> Result<(), Box<dyn Error>>\n\nwhere\n\n T: FnOnce(&str) -> Result<(), Box<dyn Error>> + panic::UnwindSafe,\n\n{\n\n let (drop_tables_res, test_result) = {\n\n let base_url = match env::var(\"DIESEL_POSTGRES_TEST_URL\").ok() {\n\n Some(url) => url,\n\n None => {\n\n println!(\n\n \"Ignoring {}\",\n\n std::thread::current().name().unwrap_or(\"<unknown test>\")\n\n );\n\n return Ok(());\n\n }\n\n };\n\n\n\n let db_name = db_name();\n\n {\n\n let conn = PgConnection::establish(&base_url)?;\n\n conn.batch_execute(&format!(\"create database {};\", db_name))?;\n", "file_path": "services/scabbard/libscabbard/src/store/diesel_postgres_test.rs", "rank": 23, "score": 326943.0236504476 }, { "content": "/// Get whether there are any pending migrations\n\n///\n\n/// # Arguments\n\n///\n\n/// * `conn` - Connection to PostgreSQL database\n\n///\n\npub fn any_pending_migrations(conn: &PgConnection) -> Result<bool, InternalError> {\n\n let current_version = conn.latest_run_migration_version().unwrap_or(None);\n\n\n\n // Diesel 1.4 only allows access to the list of migrations via attempting\n\n // to run the migrations, so we'll do that in a test transaction.\n\n let latest_version =\n\n conn.test_transaction::<Result<Option<String>, InternalError>, (), _>(|| {\n\n Ok(match run_migrations(conn) {\n\n Ok(_) => conn\n\n .latest_run_migration_version()\n\n .map_err(|err| InternalError::from_source(Box::new(err))),\n\n Err(err) => Err(InternalError::from_source(Box::new(err))),\n\n })\n\n })?;\n\n\n\n Ok(current_version == latest_version)\n\n}\n", "file_path": "services/scabbard/libscabbard/src/migrations/diesel/postgres/mod.rs", "rank": 24, "score": 326210.27246461244 }, { "content": "/// Get whether there are any pending migrations\n\n///\n\n/// # Arguments\n\n///\n\n/// * `conn` - Connection to SQLite database\n\n///\n\npub fn any_pending_migrations(conn: &SqliteConnection) -> Result<bool, InternalError> {\n\n let current_version = conn.latest_run_migration_version().unwrap_or(None);\n\n\n\n // Diesel 1.4 only allows access to the list of migrations via attempting\n\n // to run the migrations, so we'll do that in a test transaction.\n\n let latest_version =\n\n conn.test_transaction::<Result<Option<String>, InternalError>, (), _>(|| {\n\n Ok(match run_migrations(conn) {\n\n Ok(_) => conn\n\n .latest_run_migration_version()\n\n .map_err(|err| InternalError::from_source(Box::new(err))),\n\n Err(err) => Err(InternalError::from_source(Box::new(err))),\n\n })\n\n })?;\n\n\n\n Ok(current_version == latest_version)\n\n}\n", "file_path": "services/scabbard/libscabbard/src/migrations/diesel/sqlite/mod.rs", "rank": 25, "score": 326210.2724646125 }, { "content": "/// Messages sent to ConnectionState to report on the status of a connection\n\n/// authorization attempt.\n\nenum AuthResult {\n\n Outbound {\n\n endpoint: String,\n\n auth_result: AuthorizationResult,\n\n },\n\n Inbound {\n\n endpoint: String,\n\n auth_result: AuthorizationResult,\n\n },\n\n}\n\n\n\n/// Creates, manages, and maintains connections. A connection manager\n\n/// guarantees that the connections it creates will be maintained via\n\n/// reconnections. This is not true for external connections.\n\npub struct ConnectionManager {\n\n pacemaker: pacemaker::Pacemaker,\n\n join_handle: thread::JoinHandle<()>,\n\n sender: Sender<CmMessage>,\n\n}\n\n\n", "file_path": "libsplinter/src/network/connection_manager/mod.rs", "rank": 26, "score": 324833.3112145192 }, { "content": "pub fn parse_hex(hex: &str) -> Result<Vec<u8>, HexError> {\n\n if hex.len() % 2 != 0 {\n\n return Err(HexError {\n\n context: format!(\"{} is not valid hex: odd number of digits\", hex),\n\n source: None,\n\n });\n\n }\n\n\n\n let mut res = vec![];\n\n for i in (0..hex.len()).step_by(2) {\n\n res.push(\n\n u8::from_str_radix(&hex[i..i + 2], 16).map_err(|err| HexError {\n\n context: format!(\"{} contains invalid hex\", hex),\n\n source: Some(Box::new(err)),\n\n })?,\n\n );\n\n }\n\n\n\n Ok(res)\n\n}\n", "file_path": "services/scabbard/libscabbard/src/hex.rs", "rank": 27, "score": 323513.15515966626 }, { "content": "pub fn parse_hex(hex: &str) -> Result<Vec<u8>, CliError> {\n\n if hex.len() % 2 != 0 {\n\n return Err(CliError::ActionError(format!(\n\n \"{} is not valid hex: odd number of digits\",\n\n hex\n\n )));\n\n }\n\n\n\n let mut res = vec![];\n\n for i in (0..hex.len()).step_by(2) {\n\n res.push(\n\n u8::from_str_radix(&hex[i..i + 2], 16)\n\n .map_err(|_| CliError::ActionError(format!(\"{} contains invalid hex\", hex)))?,\n\n );\n\n }\n\n\n\n Ok(res)\n\n}\n\n\n", "file_path": "cli/src/action/circuit/builder.rs", "rank": 28, "score": 323513.1551596662 }, { "content": "#[test]\n\npub fn test_registry_add_node() {\n\n // Start a two node network\n\n let mut network = Network::new()\n\n .with_default_rest_api_variant(RestApiVariant::ActixWeb1)\n\n .add_nodes_with_defaults(2)\n\n .expect(\"Unable to start single node ActixWeb1 network\");\n\n // Get the first node\n\n let node_a = network.node(0).expect(\"Unable to get node\");\n\n // Get the first node's registry client\n\n let registry_client = node_a.registry_client();\n\n // List all nodes in the registry\n\n let registry_list_response = registry_client\n\n .list_nodes(None)\n\n .expect(\"Registry get node request failed\");\n\n // Check the length of the returned list is 2\n\n assert_eq!(registry_list_response.data.len(), 2);\n\n let key = node_a\n\n .admin_signer()\n\n .clone_box()\n\n .public_key()\n", "file_path": "splinterd/tests/admin/registry.rs", "rank": 29, "score": 323330.80776544404 }, { "content": "fn log_connect_request_err(\n\n err: ConnectionManagerError,\n\n peer_id: &PeerAuthorizationToken,\n\n endpoint: &str,\n\n) {\n\n match err {\n\n ConnectionManagerError::ConnectionCreationError {\n\n context,\n\n error_kind: None,\n\n } => {\n\n info!(\n\n \"Unable to request connection for peer {}: {}\",\n\n peer_id, context\n\n );\n\n }\n\n ConnectionManagerError::ConnectionCreationError {\n\n context,\n\n error_kind: Some(err_kind),\n\n } => match err_kind {\n\n ErrorKind::ConnectionRefused => info!(\n", "file_path": "libsplinter/src/peer/mod.rs", "rank": 30, "score": 323180.3622503477 }, { "content": "fn handle_peer_manager_notification(\n\n notification: PeerManagerNotification,\n\n admin_shared: &mut AdminServiceShared,\n\n) {\n\n match notification {\n\n PeerManagerNotification::Connected { peer } => {\n\n debug!(\"Peer {} has connected\", peer);\n\n if let Err(err) = admin_shared.on_peer_connected(&peer) {\n\n error!(\"Error occurred while handling Connected: {}\", err);\n\n }\n\n }\n\n PeerManagerNotification::Disconnected { peer } => {\n\n debug!(\"Peer {} has disconnected\", peer);\n\n admin_shared.on_peer_disconnected(peer);\n\n }\n\n }\n\n}\n\n\n", "file_path": "libsplinter/src/admin/service/mod.rs", "rank": 31, "score": 316620.1452619737 }, { "content": "#[test]\n\npub fn test_2_party_circuit_duplicate_connection() {\n\n // Start a 2-node network\n\n let mut network = Network::new()\n\n .with_default_rest_api_variant(RestApiVariant::ActixWeb1)\n\n .add_nodes_with_defaults(2)\n\n .expect(\"Unable to start 2-node ActixWeb1 network\");\n\n // Get the first node in the network\n\n let node_a = network.node(0).expect(\"Unable to get first node\");\n\n\n\n // Get the second node in the network\n\n let node_b = network.node(1).expect(\"Unable to get second node\");\n\n let circuit_id = \"ABCDE-01234\";\n\n\n\n let peer_connector_b = node_b.peer_connector();\n\n let (tx, _notification_rx): (mpsc::Sender<TestEnum>, mpsc::Receiver<TestEnum>) =\n\n mpsc::channel();\n\n peer_connector_b\n\n .subscribe_sender(tx)\n\n .expect(\"Unable to get subscriber\");\n\n\n", "file_path": "splinterd/tests/admin/circuit_create.rs", "rank": 32, "score": 314322.12875234464 }, { "content": "/// A bi-directional connection between two nodes\n\npub trait Connection: Send {\n\n /// Attempt to send a message consisting of bytes across the connection.\n\n fn send(&mut self, message: &[u8]) -> Result<(), SendError>;\n\n\n\n /// Attempt to receive a message consisting of bytes from the connection.\n\n fn recv(&mut self) -> Result<Vec<u8>, RecvError>;\n\n\n\n /// Return the remote endpoint address for this connection.\n\n ///\n\n /// For TCP-based connection types, this will contain the remote peer\n\n /// socket address.\n\n fn remote_endpoint(&self) -> String;\n\n\n\n /// Return the local endpoint address for this connection.\n\n ///\n\n /// For TCP-based connection types, this will contain the local\n\n /// socket address.\n\n fn local_endpoint(&self) -> String;\n\n\n\n /// Shut down the connection.\n\n ///\n\n /// After the connection has been disconnected, messages cannot be sent\n\n /// or received.\n\n fn disconnect(&mut self) -> Result<(), DisconnectError>;\n\n\n\n /// Returns a `mio::event::Evented` for this connection which can be used for polling.\n\n fn evented(&self) -> &dyn Evented;\n\n}\n\n\n", "file_path": "libsplinter/src/transport/mod.rs", "rank": 33, "score": 313433.0878987238 }, { "content": "/// Interface used by consensus to create, check, accept, and reject proposals\n\npub trait ProposalManager: Send {\n\n /// Informs the manager if consensus will ask for proposals; if not, the manager does not need\n\n /// to build them.\n\n ///\n\n /// Some managers may take arbitrarily long to assemble/process proposals, and some consensus\n\n /// algorithms designate nodes that do not create proposals at all. This allows for optimizing\n\n /// performance in some cases.\n\n ///\n\n /// The default implementation does nothing, since this is only useful for some managers.\n\n fn should_build_proposals(&self, _should_build: bool) -> Result<(), ProposalManagerError> {\n\n Ok(())\n\n }\n\n\n\n /// Generate a new Proposal with the given consensus bytes that’s based on the previous\n\n /// proposal if Some, otherwise the manager will use the last applied proposal.\n\n fn create_proposal(\n\n &self,\n\n previous_proposal_id: Option<ProposalId>,\n\n consensus_data: Vec<u8>,\n\n ) -> Result<(), ProposalManagerError>;\n", "file_path": "libsplinter/src/consensus/mod.rs", "rank": 34, "score": 307972.1347206213 }, { "content": "fn send(mesh: &Mesh, id: String, msg: &[u8]) {\n\n if let Err(err) = mesh.send(Envelope::new(id.clone(), msg.to_vec())) {\n\n eprintln!(\"Error sending to {}: {:?}\", id, err);\n\n }\n\n}\n", "file_path": "libsplinter/examples/mesh-echo-peer.rs", "rank": 35, "score": 307033.75206760044 }, { "content": "#[cfg(feature = \"admin-service\")]\n\npub fn deserialize_hex<'de, D>(deserializer: D) -> Result<Vec<u8>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n struct DeserializeHex;\n\n impl<'de> de::Visitor<'de> for DeserializeHex {\n\n /// Return type of this visitor. This visitor computes the max of a\n\n /// sequence of values of type T, so the type of the maximum is T.\n\n type Value = Vec<u8>;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"a hex string\")\n\n }\n\n\n\n fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n match parse_hex(v) {\n\n Ok(vec) => Ok(vec),\n", "file_path": "libsplinter/src/hex.rs", "rank": 36, "score": 306227.9694064851 }, { "content": "/// Run the sqlite migrations against the provided connection string\n\npub fn sqlite_migrations(connection_string: String) -> Result<(), CliError> {\n\n if connection_string != MEMORY {\n\n if let Err(err) = OpenOptions::new()\n\n .read(true)\n\n .write(true)\n\n .create(!Path::new(&connection_string).exists())\n\n .mode(0o640)\n\n .open(&connection_string)\n\n {\n\n match err.kind() {\n\n std::io::ErrorKind::NotFound => (),\n\n _ => {\n\n return Err(CliError::ActionError(format!(\n\n \"While opening: {} received {}\",\n\n &connection_string, err\n\n )))\n\n }\n\n }\n\n }\n\n }\n", "file_path": "cli/src/action/database/sqlite.rs", "rank": 37, "score": 304000.5687670284 }, { "content": "/// Constructs a new Splinter REST client from the CLI arguments.\n\nfn new_client(arg_matches: &Option<&ArgMatches<'_>>) -> Result<SplinterRestClient, CliError> {\n\n let url = arg_matches\n\n .and_then(|args| args.value_of(\"url\"))\n\n .map(ToOwned::to_owned)\n\n .or_else(|| std::env::var(SPLINTER_REST_API_URL_ENV).ok())\n\n .unwrap_or_else(|| DEFAULT_SPLINTER_REST_API_URL.to_string());\n\n\n\n let signer = load_signer(arg_matches.and_then(|args| args.value_of(\"private_key_file\")))?;\n\n\n\n SplinterRestClientBuilder::new()\n\n .with_url(url)\n\n .with_auth(create_cylinder_jwt_auth(signer)?)\n\n .build()\n\n}\n", "file_path": "cli/src/action/rbac/mod.rs", "rank": 38, "score": 303153.77248759405 }, { "content": "/// Manages role-based permissions associated with public keys.\n\n///\n\n/// The KeyPermissionManager provides an interface for providing details on whether or not a public\n\n/// key has permissions to act in specific roles.\n\n///\n\n/// Note: the underlying implementation determines how those values are set and modified - these\n\n/// operations are not exposed via this interface.\n\npub trait KeyPermissionManager: Send {\n\n /// Checks to see if a public key is permitted for the given role.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returns a `KeyPermissionError` if the underling implementation encountered an error while\n\n /// checking the permissions.\n\n fn is_permitted(&self, public_key: &[u8], role: &str) -> KeyPermissionResult<bool>;\n\n}\n", "file_path": "libsplinter/src/keys/mod.rs", "rank": 39, "score": 302833.04655502806 }, { "content": "// To allow the NetworkMessageSender to not make decissions about the threading model, any channel\n\n// that is used must have the following Sender trait implemented, then the send end of the channel\n\n// can be passed to a Handler.\n\npub trait Sender<T>: Send {\n\n fn send(&self, t: T) -> Result<(), SendError>;\n\n fn box_clone(&self) -> Box<dyn Sender<T>>;\n\n}\n\n\n\nimpl<T> Clone for Box<dyn Sender<T>> {\n\n fn clone(&self) -> Box<dyn Sender<T>> {\n\n self.box_clone()\n\n }\n\n}\n", "file_path": "libsplinter/src/channel/mod.rs", "rank": 40, "score": 302681.800034546 }, { "content": "/// Gets the configured database_uri\n\n///\n\n///\n\n/// # Arguments\n\n///\n\n/// * `arg_matches` - an option of clap ['ArgMatches'](https://docs.rs/clap/2.33.3/clap/struct.ArgMatches.html).\n\nfn get_database_uri(arg_matches: Option<&ArgMatches>) -> Result<ConnectionUri, CliError> {\n\n let database_uri = if let Some(arg_matches) = arg_matches {\n\n match arg_matches.value_of(\"connect\") {\n\n Some(database_uri) => database_uri.to_string(),\n\n #[cfg(feature = \"sqlite\")]\n\n None => get_database_at_state_path(get_state_dir(Some(arg_matches))?)?,\n\n #[cfg(not(feature = \"sqlite\"))]\n\n None => get_default_database(),\n\n }\n\n } else if cfg!(feature = \"sqlite\") {\n\n get_database_at_state_path(get_state_dir(arg_matches)?)?\n\n } else {\n\n get_default_database()?\n\n };\n\n let parsed_uri = ConnectionUri::from_str(&database_uri)\n\n .map_err(|e| CliError::ActionError(format!(\"database uri could not be parsed: {}\", e)))?;\n\n if let ConnectionUri::Postgres(_) = parsed_uri {\n\n // Verify database connection.\n\n // If the connection is faulty, we want to abort here instead of\n\n // creating the store, as the store would perform reconnection attempts.\n\n PgConnection::establish(&database_uri[..]).map_err(|err| {\n\n CliError::ActionError(format!(\n\n \"Failed to establish database connection to '{}': {}\",\n\n database_uri, err\n\n ))\n\n })?;\n\n }\n\n Ok(parsed_uri)\n\n}\n\n\n", "file_path": "cli/src/action/database/upgrade/mod.rs", "rank": 41, "score": 302498.70864353055 }, { "content": "#[test]\n\npub fn test_2_party_circuit_creation_challenge_authorization_unidentified_peer() {\n\n // Start a 2-node network\n\n let mut network = Network::new()\n\n .with_default_rest_api_variant(RestApiVariant::ActixWeb1)\n\n .add_nodes_with_defaults(2)\n\n .expect(\"Unable to start 2-node ActixWeb1 network\");\n\n // Get the first node in the network\n\n let node_a = network.node(0).expect(\"Unable to get first node\");\n\n\n\n // Get the second node in the network\n\n let node_b = network.node(1).expect(\"Unable to get second node\");\n\n\n\n let peer_connector = node_a.peer_connector();\n\n let (tx, notification_rx): (mpsc::Sender<TestEnum>, mpsc::Receiver<TestEnum>) = mpsc::channel();\n\n peer_connector\n\n .subscribe_sender(tx)\n\n .expect(\"Unable to get subscriber\");\n\n\n\n let _peer_ref = peer_connector\n\n .add_unidentified_peer(\n", "file_path": "splinterd/tests/admin/circuit_create.rs", "rank": 42, "score": 301751.0737020276 }, { "content": "// This should never return an error since we received a message from this service id\n\npub fn get_peer_token_from_service_id(\n\n service_id: &str,\n\n local_node_id: &str,\n\n) -> Result<PeerTokenPair, InternalError> {\n\n let mut iter = service_id.split(\"::\");\n\n\n\n let admin_prefix = iter\n\n .next()\n\n .expect(\"str::split cannot return an empty iterator\")\n\n .to_string();\n\n\n\n if admin_prefix.is_empty() {\n\n return Err(InternalError::with_message(\n\n \"Empty admin_id argument detected\".into(),\n\n ));\n\n }\n\n\n\n let node_id = iter\n\n .next()\n\n .ok_or_else(|| InternalError::with_message(\"Missing node id for recipient\".into()))?;\n", "file_path": "libsplinter/src/admin/service/shared.rs", "rank": 43, "score": 299213.46821999084 }, { "content": "type OnReconnectHandle<T> = dyn Fn(&mut WebSocketClient<T>) + Send + Sync + 'static;\n\n\n\n/// WebSocket client. Configures Websocket connection and produces `Listen` future.\n\npub struct WebSocketClient<T: ParseBytes<T> + 'static = Vec<u8>> {\n\n url: String,\n\n authorization: String,\n\n on_message: Arc<dyn Fn(Context<T>, T) -> WsResponse + Send + Sync + 'static>,\n\n on_open: Option<Arc<dyn Fn(Context<T>) -> WsResponse + Send + Sync + 'static>>,\n\n on_error: Option<Arc<OnErrorHandle<T>>>,\n\n on_reconnect: Option<Arc<OnReconnectHandle<T>>>,\n\n reconnect: bool,\n\n reconnect_limit: u64,\n\n timeout: u64,\n\n additional_headers: HashMap<String, String>,\n\n}\n\n\n\nimpl<T: ParseBytes<T> + 'static> Clone for WebSocketClient<T> {\n\n fn clone(&self) -> Self {\n\n WebSocketClient {\n\n url: self.url.clone(),\n", "file_path": "libsplinter/src/events/ws/web_socket_client.rs", "rank": 44, "score": 294088.1889083575 }, { "content": "fn load_nodes_from_remote(url: &str) -> Result<Vec<Node>, CliError> {\n\n let bytes = reqwest::blocking::get(url)\n\n .and_then(|response| response.error_for_status())\n\n .map_err(|err| {\n\n CliError::ActionError(format!(\n\n \"Failed to fetch remote node file from {}: {}\",\n\n url, err\n\n ))\n\n })?\n\n .bytes()\n\n .map_err(|err| {\n\n CliError::ActionError(format!(\n\n \"Failed to get bytes from remote node file HTTP response: {}\",\n\n err\n\n ))\n\n })?;\n\n serde_yaml::from_slice(&bytes).map_err(|_| {\n\n CliError::ActionError(\n\n \"Failed to deserialize remote node file: Not a valid YAML sequence of nodes\".into(),\n\n )\n\n })\n\n}\n\n\n", "file_path": "cli/src/action/circuit/mod.rs", "rank": 45, "score": 293427.2343937081 }, { "content": "pub trait StateSubscriber: Send {\n\n fn handle_event(&self, event: StateChangeEvent) -> Result<(), StateSubscriberError>;\n\n}\n\n\n", "file_path": "services/scabbard/libscabbard/src/service/state/mod.rs", "rank": 46, "score": 293351.8394879522 }, { "content": "#[allow(clippy::too_many_arguments)]\n\nfn add_peer(\n\n peer_id: PeerAuthorizationToken,\n\n endpoints: Vec<String>,\n\n connector: Connector,\n\n unreferenced_peers: &mut UnreferencedPeerState,\n\n peers: &mut PeerMap,\n\n peer_remover: &PeerRemover,\n\n ref_map: &mut RefMap<PeerTokenPair>,\n\n subscribers: &mut SubscriberMap,\n\n required_local_auth: PeerAuthorizationToken,\n\n) -> Result<PeerRef, PeerRefAddError> {\n\n let peer_token_pair = PeerTokenPair::new(peer_id.clone(), required_local_auth.clone());\n\n\n\n if check_for_duplicate_endpoint(&peer_id, &endpoints, peers) {\n\n return Err(PeerRefAddError::AddError(format!(\n\n \"Peer {} contains endpoints that already belong to another peer using trust\",\n\n peer_id\n\n )));\n\n }\n\n\n", "file_path": "libsplinter/src/peer/mod.rs", "rank": 47, "score": 292856.66057952674 }, { "content": "/// Parse a service argument into a list. Check if the argument is in json or csv format\n\n/// and return the list of strings. An error is returned if json fmt cannot be parsed.\n\nfn parse_list(values_list: &str) -> Result<Vec<String>, String> {\n\n if values_list.starts_with('[') {\n\n serde_json::from_str(values_list).map_err(|err| err.to_string())\n\n } else {\n\n Ok(values_list\n\n .split(',')\n\n .map(String::from)\n\n .collect::<Vec<String>>())\n\n }\n\n}\n", "file_path": "services/echo/libecho/src/service/arguments_converter.rs", "rank": 48, "score": 291103.0027254112 }, { "content": "fn load_nodes_from_file(node_file: &str) -> Result<Vec<Node>, CliError> {\n\n if node_file.starts_with(\"http://\") || node_file.starts_with(\"https://\") {\n\n load_nodes_from_remote(node_file)\n\n } else {\n\n load_nodes_from_local(node_file)\n\n }\n\n}\n\n\n", "file_path": "cli/src/action/circuit/mod.rs", "rank": 49, "score": 289652.0715709213 }, { "content": "fn load_nodes_from_local(node_file: &str) -> Result<Vec<Node>, CliError> {\n\n let path = if node_file.starts_with(\"file://\") {\n\n node_file.split_at(7).1\n\n } else {\n\n node_file\n\n };\n\n let file = File::open(path).map_err(|err| {\n\n CliError::EnvironmentError(format!(\n\n \"Unable to open node file '{}': {}\",\n\n path,\n\n msg_from_io_error(err)\n\n ))\n\n })?;\n\n serde_yaml::from_reader(file).map_err(|_| {\n\n CliError::ActionError(format!(\n\n \"Failed to read node file '{}': Not a valid YAML sequence of nodes\",\n\n path\n\n ))\n\n })\n\n}\n\n\n", "file_path": "cli/src/action/circuit/mod.rs", "rank": 50, "score": 289652.0715709213 }, { "content": "fn parse_service(service: &str) -> Result<(String, Vec<String>), CliError> {\n\n let mut iter = service.split(\"::\");\n\n\n\n let service_id = iter\n\n .next()\n\n .expect(\"str::split cannot return an empty iterator\")\n\n .to_string();\n\n if service_id.is_empty() {\n\n return Err(CliError::ActionError(\n\n \"Empty '--service' argument detected\".into(),\n\n ));\n\n }\n\n\n\n let allowed_nodes = iter\n\n .next()\n\n .ok_or_else(|| {\n\n CliError::ActionError(format!(\n\n \"Missing allowed nodes for service '{}'\",\n\n service_id\n\n ))\n", "file_path": "cli/src/action/circuit/mod.rs", "rank": 51, "score": 288221.3276703044 }, { "content": "pub fn admin_service_id(node_id: &str) -> String {\n\n format!(\"admin::{}\", node_id)\n\n}\n", "file_path": "libsplinter/src/admin/token/mod.rs", "rank": 52, "score": 288187.2300658106 }, { "content": "pub fn admin_service_id(node_id: &str) -> String {\n\n format!(\"admin::{}\", node_id)\n\n}\n\n\n", "file_path": "libsplinter/src/admin/service/mod.rs", "rank": 53, "score": 288187.2300658106 }, { "content": "/// Defines a manager for fetching and/or generating a secret.\n\npub trait SecretManager: Sync + Send {\n\n /// Returns the secret\n\n fn secret(&self) -> Result<String, SecretManagerError>;\n\n\n\n /// Updates the secret\n\n fn update_secret(&mut self) -> Result<(), SecretManagerError>;\n\n}\n", "file_path": "libsplinter/src/rest_api/secrets/mod.rs", "rank": 54, "score": 287799.3417319805 }, { "content": "/// Parse a service argument into a list. Check if the argument is in json or csv format\n\n/// and return the list of strings. An error is returned if json fmt cannot be parsed.\n\nfn parse_list(values_list: &str) -> Result<Vec<String>, String> {\n\n if values_list.starts_with('[') {\n\n serde_json::from_str(values_list).map_err(|err| err.to_string())\n\n } else {\n\n Ok(values_list\n\n .split(',')\n\n .map(String::from)\n\n .collect::<Vec<String>>())\n\n }\n\n}\n", "file_path": "services/scabbard/libscabbard/src/service/v3/arguments_converter.rs", "rank": 55, "score": 287449.1486282806 }, { "content": "pub fn make_application_handler_registration_route<A: AdminCommands + Clone + 'static>(\n\n admin_commands: A,\n\n) -> Resource {\n\n let resource = Resource::build(\"/ws/admin/register/{type}\").add_request_guard(\n\n ProtocolVersionRangeGuard::new(\n\n ADMIN_APPLICATION_REGISTRATION_PROTOCOL_MIN,\n\n SPLINTER_PROTOCOL_VERSION,\n\n ),\n\n );\n\n\n\n #[cfg(feature = \"authorization\")]\n\n {\n\n resource.add_method(\n\n Method::Get,\n\n CIRCUIT_READ_PERMISSION,\n\n move |request, payload| {\n\n let status = if let Ok(status) = admin_commands.admin_service_status() {\n\n status\n\n } else {\n\n return Box::new(HttpResponse::InternalServerError().finish().into_future());\n", "file_path": "rest_api/actix_web_1/src/admin/ws_register_type.rs", "rank": 56, "score": 285920.70559778064 }, { "content": "#[derive(Clone, Debug)]\n\nstruct ConnectionMetadata {\n\n connection_id: String,\n\n endpoint: String,\n\n identity: ConnectionAuthorizationType,\n\n extended_metadata: ConnectionMetadataExt,\n\n}\n\n\n\nimpl ConnectionMetadata {\n\n fn is_outbound(&self) -> bool {\n\n matches!(\n\n self.extended_metadata,\n\n ConnectionMetadataExt::Outbound { .. }\n\n )\n\n }\n\n\n\n fn connection_id(&self) -> &str {\n\n &self.connection_id\n\n }\n\n\n\n fn endpoint(&self) -> &str {\n\n &self.endpoint\n\n }\n\n\n\n fn identity(&self) -> &ConnectionAuthorizationType {\n\n &self.identity\n\n }\n\n}\n\n\n\n/// Enum describing metadata that is specific to the two different connection\n\n/// types, outbound and inbound.\n", "file_path": "libsplinter/src/network/connection_manager/mod.rs", "rank": 57, "score": 284255.8257676522 }, { "content": "/// Information required to request an outboudn connection\n\nstruct OutboundConnection {\n\n endpoint: String,\n\n connection_id: String,\n\n expected_authorization: Option<ConnectionAuthorizationType>,\n\n local_authorization: Option<ConnectionAuthorizationType>,\n\n}\n\n\n", "file_path": "libsplinter/src/network/connection_manager/mod.rs", "rank": 58, "score": 284255.7318754577 }, { "content": "/// Defines methods for CRUD operations and fetching and listing keys\n\n/// without defining a storage strategy\n\npub trait KeyStore: Sync + Send {\n\n /// Adds a key to the underlying storage\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `key` - The key to be added\n\n fn add_key(&self, key: Key) -> Result<(), KeyStoreError>;\n\n\n\n /// Updates a key information in the underling storage\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `public_key`: The public key of the key record to be updated.\n\n /// * `user_id`: The ID owner of the key record to be updated.\n\n /// * `new_display_name`: The new display name of the key record.\n\n fn update_key(\n\n &self,\n\n public_key: &str,\n\n user_id: &str,\n\n new_display_name: &str,\n", "file_path": "libsplinter/src/biome/key_management/store/mod.rs", "rank": 59, "score": 283313.351691122 }, { "content": "fn parse_node_argument(node_argument: &str) -> Result<(String, Vec<String>), CliError> {\n\n let mut iter = node_argument.split(\"::\");\n\n\n\n let node_id = iter\n\n .next()\n\n .expect(\"str::split cannot return an empty iterator\")\n\n .to_string();\n\n if node_id.is_empty() {\n\n return Err(CliError::ActionError(\n\n \"Empty '--node' argument detected\".into(),\n\n ));\n\n }\n\n\n\n let endpoints = iter\n\n .next()\n\n .ok_or_else(|| CliError::ActionError(format!(\"Missing endpoints for node '{}'\", node_id)))?\n\n .to_string();\n\n if endpoints.is_empty() {\n\n return Err(CliError::ActionError(format!(\n\n \"No endpoints detected for node '{}'\",\n", "file_path": "cli/src/action/circuit/mod.rs", "rank": 60, "score": 280820.98740965326 }, { "content": "/// Determines if a service ID is valid. A valid service ID is a 4 character base62 string.\n\npub fn is_valid_service_id(service_id: &str) -> bool {\n\n let is_correct_len = service_id.len() == 4;\n\n let is_base62 = service_id.chars().all(|c| c.is_ascii_alphanumeric());\n\n is_correct_len && is_base62\n\n}\n\n\n\n#[derive(Clone, Serialize, Deserialize, Debug, PartialEq, Eq)]\n\npub struct CircuitProposal {\n\n pub proposal_type: ProposalType,\n\n pub circuit_id: String,\n\n pub circuit_hash: String,\n\n pub circuit: CreateCircuit,\n\n pub votes: Vec<VoteRecord>,\n\n #[serde(serialize_with = \"as_hex\")]\n\n #[serde(deserialize_with = \"deserialize_hex\")]\n\n pub requester: Vec<u8>,\n\n pub requester_node_id: String,\n\n}\n\n\n\nimpl CircuitProposal {\n", "file_path": "libsplinter/src/admin/service/messages/v1/mod.rs", "rank": 61, "score": 280415.04893527814 }, { "content": "/// Determines if a service ID is valid. A valid service ID is a 4 character base62 string.\n\npub fn is_valid_service_id(service_id: &str) -> bool {\n\n let is_correct_len = service_id.len() == 4;\n\n let is_base62 = service_id.chars().all(|c| c.is_ascii_alphanumeric());\n\n is_correct_len && is_base62\n\n}\n\n\n\n#[derive(Clone, Serialize, Deserialize, Debug, PartialEq, Eq)]\n\npub struct CircuitProposal {\n\n pub proposal_type: ProposalType,\n\n pub circuit_id: String,\n\n pub circuit_hash: String,\n\n pub circuit: CreateCircuit,\n\n pub votes: Vec<VoteRecord>,\n\n #[serde(serialize_with = \"as_hex\")]\n\n #[serde(deserialize_with = \"deserialize_hex\")]\n\n pub requester: Vec<u8>,\n\n pub requester_node_id: String,\n\n}\n\n\n\nimpl CircuitProposal {\n", "file_path": "libsplinter/src/admin/service/messages/v2/mod.rs", "rank": 62, "score": 280415.0489352782 }, { "content": "/// Determines if a circuit ID is valid. A valid circuit ID is an 11 character string composed of\n\n/// two, 5 character base62 strings joined with a '-' (example: abcDE-F0123).\n\npub fn is_valid_circuit_id(circuit_id: &str) -> bool {\n\n let mut split = circuit_id.splitn(2, '-');\n\n let is_two_parts = split.clone().count() == 2;\n\n let are_parts_valid = split.all(|part| {\n\n let is_correct_len = part.len() == 5;\n\n let is_base62 = part.chars().all(|c| c.is_ascii_alphanumeric());\n\n is_correct_len && is_base62\n\n });\n\n is_two_parts && are_parts_valid\n\n}\n\n\n\n#[derive(Clone, Serialize, Deserialize, Debug, PartialEq, Eq)]\n\npub enum AuthorizationType {\n\n Trust,\n\n Challenge,\n\n}\n\n\n\nimpl From<&store::AuthorizationType> for AuthorizationType {\n\n fn from(store_enum: &store::AuthorizationType) -> Self {\n\n match *store_enum {\n", "file_path": "libsplinter/src/admin/service/messages/v2/mod.rs", "rank": 63, "score": 280414.7187768094 }, { "content": "/// Determines if a circuit ID is valid. A valid circuit ID is an 11 character string composed of\n\n/// two, 5 character base62 strings joined with a '-' (example: abcDE-F0123).\n\npub fn is_valid_circuit_id(circuit_id: &str) -> bool {\n\n let mut split = circuit_id.splitn(2, '-');\n\n let is_two_parts = split.clone().count() == 2;\n\n let are_parts_valid = split.all(|part| {\n\n let is_correct_len = part.len() == 5;\n\n let is_base62 = part.chars().all(|c| c.is_ascii_alphanumeric());\n\n is_correct_len && is_base62\n\n });\n\n is_two_parts && are_parts_valid\n\n}\n\n\n\n#[derive(Clone, Serialize, Deserialize, Debug, PartialEq, Eq)]\n\npub enum AuthorizationType {\n\n Trust,\n\n}\n\n\n\n#[derive(Clone, Serialize, Deserialize, Debug, PartialEq, Eq)]\n\npub enum PersistenceType {\n\n Any,\n\n}\n", "file_path": "libsplinter/src/admin/service/messages/v1/mod.rs", "rank": 64, "score": 280414.71877680934 }, { "content": "#[derive(Clone, Debug)]\n\nenum ConnectionMetadataExt {\n\n Outbound {\n\n reconnecting: bool,\n\n retry_frequency: u64,\n\n last_connection_attempt: Instant,\n\n reconnection_attempts: u64,\n\n expected_authorization: ConnectionAuthorizationType,\n\n local_authorization: ConnectionAuthorizationType,\n\n },\n\n Inbound {\n\n disconnected: bool,\n\n local_authorization: ConnectionAuthorizationType,\n\n },\n\n}\n\n\n\nimpl ConnectionMetadataExt {\n\n fn expected_authorization(&self) -> Option<ConnectionAuthorizationType> {\n\n match self {\n\n ConnectionMetadataExt::Outbound {\n\n expected_authorization,\n", "file_path": "libsplinter/src/network/connection_manager/mod.rs", "rank": 65, "score": 279476.06073508866 }, { "content": "fn handle_disconnection(\n\n endpoint: String,\n\n identity: PeerAuthorizationToken,\n\n connection_id: String,\n\n unreferenced_peers: &mut UnreferencedPeerState,\n\n peers: &mut PeerMap,\n\n connector: Connector,\n\n subscribers: &mut SubscriberMap,\n\n) {\n\n if let Some(mut peer_metadata) = peers.get_by_connection_id(&connection_id).cloned() {\n\n if endpoint != peer_metadata.active_endpoint {\n\n warn!(\n\n \"Received disconnection notification for peer {} with \\\n\n different endpoint {}\",\n\n peer_metadata.id, endpoint\n\n );\n\n return;\n\n }\n\n\n\n let notification = PeerManagerNotification::Disconnected {\n", "file_path": "libsplinter/src/peer/mod.rs", "rank": 66, "score": 279358.9594918801 }, { "content": "#[allow(clippy::too_many_arguments)]\n\nfn handle_notifications(\n\n notification: ConnectionManagerNotification,\n\n unreferenced_peers: &mut UnreferencedPeerState,\n\n peers: &mut PeerMap,\n\n connector: Connector,\n\n subscribers: &mut SubscriberMap,\n\n max_retry_attempts: u64,\n\n ref_map: &mut RefMap<PeerTokenPair>,\n\n retry_frequency: u64,\n\n) {\n\n match notification {\n\n // If a connection has disconnected, forward notification to subscribers\n\n ConnectionManagerNotification::Disconnected {\n\n endpoint,\n\n identity,\n\n connection_id,\n\n } => handle_disconnection(\n\n endpoint,\n\n PeerAuthorizationToken::from(identity),\n\n connection_id,\n", "file_path": "libsplinter/src/peer/mod.rs", "rank": 67, "score": 279288.8348697824 }, { "content": "#[allow(clippy::too_many_arguments, clippy::cognitive_complexity)]\n\nfn handle_connected(\n\n endpoint: String,\n\n identity: PeerAuthorizationToken,\n\n connection_id: String,\n\n local_authorization: PeerAuthorizationToken,\n\n unreferenced_peers: &mut UnreferencedPeerState,\n\n peers: &mut PeerMap,\n\n connector: Connector,\n\n subscribers: &mut SubscriberMap,\n\n ref_map: &mut RefMap<PeerTokenPair>,\n\n retry_frequency: u64,\n\n) {\n\n let peer_token_pair = PeerTokenPair::new(identity.clone(), local_authorization.clone());\n\n if let Some(mut peer_metadata) = peers.get_by_peer_id(&peer_token_pair).cloned() {\n\n match peer_metadata.status {\n\n PeerStatus::Pending => {\n\n info!(\n\n \"Pending peer {} connected via {}\",\n\n peer_metadata.id, endpoint\n\n );\n", "file_path": "libsplinter/src/peer/mod.rs", "rank": 68, "score": 279269.3414734128 }, { "content": "// Request a connection, the resulting connection will be treated as an InboundConnection\n\nfn add_unidentified(\n\n endpoint: String,\n\n connector: Connector,\n\n unreferenced_peers: &mut UnreferencedPeerState,\n\n peer_remover: &PeerRemover,\n\n peers: &PeerMap,\n\n ref_map: &mut RefMap<PeerTokenPair>,\n\n local_authorization: PeerAuthorizationToken,\n\n) -> EndpointPeerRef {\n\n info!(\"Attempting to peer with peer by endpoint {}\", endpoint);\n\n if let Some(peer_metadatas) = peers.get_peer_from_endpoint(&endpoint) {\n\n for peer_metadata in peer_metadatas {\n\n // need to verify that the existing peer has the correct local authorization\n\n if peer_metadata.required_local_auth == local_authorization {\n\n let peer_token_pair = PeerTokenPair::new(\n\n peer_metadata.id.clone(),\n\n peer_metadata.required_local_auth.clone(),\n\n );\n\n // if there is peer in the peer_map, there is reference in the ref map\n\n ref_map.add_ref(peer_token_pair);\n", "file_path": "libsplinter/src/peer/mod.rs", "rank": 69, "score": 279048.0321955827 }, { "content": "// Adds a test profile to the user profile store\n\nfn add_profile(node: &Node) -> Result<(), InternalError> {\n\n let profile = ProfileBuilder::new()\n\n .with_user_id(\"test_user_id\".into())\n\n .with_subject(\"subject\".into())\n\n .with_name(Some(\"name\".into()))\n\n .build()\n\n .expect(\"Unable to build profile\");\n\n\n\n let profile_store = node.user_profile_store();\n\n Ok(profile_store\n\n .add_profile(profile)\n\n .map_err(|err| InternalError::from_source(Box::new(err)))?)\n\n}\n", "file_path": "splinterd/tests/admin/rest_api.rs", "rank": 70, "score": 278755.4034554812 }, { "content": "/// Struct describing the connection manager's internal state and handling\n\n/// requests sent to the connection manager by its Connectors. Connection state\n\n/// is responsible for adding, removing, and authorizing connections.\n\nstruct ConnectionManagerState<T, U>\n\nwhere\n\n T: ConnectionMatrixLifeCycle,\n\n U: ConnectionMatrixSender,\n\n{\n\n connections: HashMap<String, ConnectionMetadata>,\n\n life_cycle: T,\n\n matrix_sender: U,\n\n transport: Box<dyn Transport>,\n\n maximum_retry_frequency: u64,\n\n}\n\n\n\nimpl<T, U> ConnectionManagerState<T, U>\n\nwhere\n\n T: ConnectionMatrixLifeCycle,\n\n U: ConnectionMatrixSender,\n\n{\n\n fn new(\n\n life_cycle: T,\n\n matrix_sender: U,\n", "file_path": "libsplinter/src/network/connection_manager/mod.rs", "rank": 71, "score": 278652.2402932485 }, { "content": "pub fn make_add_batches_to_queue_endpoint() -> ServiceEndpoint {\n\n ServiceEndpoint {\n\n service_type: SERVICE_TYPE.into(),\n\n route: \"/batches\".into(),\n\n method: Method::Post,\n\n handler: Arc::new(move |_, payload, service| {\n\n let scabbard = match service.as_any().downcast_ref::<Scabbard>() {\n\n Some(s) => s,\n\n None => {\n\n error!(\"Failed to downcast to scabbard service\");\n\n return Box::new(\n\n HttpResponse::InternalServerError()\n\n .json(ErrorResponse::internal_error())\n\n .into_future(),\n\n );\n\n }\n\n }\n\n .clone();\n\n\n\n Box::new(\n", "file_path": "rest_api/actix_web_1/src/scabbard/batches.rs", "rank": 72, "score": 276344.406629634 }, { "content": "type Callback =\n\n Box<dyn Fn(ConnectionAuthorizationState) -> Result<(), Box<dyn std::error::Error>> + Send>;\n\n\n\npub struct AuthorizationConnector {\n\n local_identity: String,\n\n #[cfg(feature = \"challenge-authorization\")]\n\n signers: Vec<Box<dyn Signer>>,\n\n shared: Arc<Mutex<ManagedAuthorizations>>,\n\n executor: JobExecutor,\n\n #[cfg(feature = \"challenge-authorization\")]\n\n verifier_factory: Arc<Mutex<Box<dyn VerifierFactory>>>,\n\n}\n\n\n\nimpl AuthorizationConnector {\n\n pub fn add_connection(\n\n &self,\n\n connection_id: String,\n\n connection: Box<dyn Connection>,\n\n expected_authorization: Option<ConnectionAuthorizationType>,\n\n local_authorization: Option<ConnectionAuthorizationType>,\n", "file_path": "libsplinter/src/network/auth/mod.rs", "rank": 73, "score": 275323.8158563115 }, { "content": "pub fn get_node_id(\n\n passed_in_node_id: Option<String>,\n\n node_id_store: Box<dyn NodeIdStore>,\n\n) -> Result<String, UserError> {\n\n get_from_store(passed_in_node_id, node_id_store)\n\n}\n\n\n", "file_path": "splinterd/src/node_id.rs", "rank": 74, "score": 273616.2976682122 }, { "content": "#[allow(clippy::too_many_arguments)]\n\nfn handle_inbound_connection(\n\n endpoint: String,\n\n identity: PeerAuthorizationToken,\n\n connection_id: String,\n\n local_authorization: PeerAuthorizationToken,\n\n unreferenced_peers: &mut UnreferencedPeerState,\n\n peers: &mut PeerMap,\n\n connector: Connector,\n\n subscribers: &mut SubscriberMap,\n\n retry_frequency: u64,\n\n) {\n\n info!(\n\n \"Received peer connection from {} (remote endpoint: {})\",\n\n identity, endpoint\n\n );\n\n\n\n let peer_token_pair = PeerTokenPair::new(identity.clone(), local_authorization.clone());\n\n // If we got an inbound counnection for an existing peer, replace old connection with\n\n // this new one unless we are already connected.\n\n if let Some(mut peer_metadata) = peers.get_by_peer_id(&peer_token_pair).cloned() {\n", "file_path": "libsplinter/src/peer/mod.rs", "rank": 75, "score": 273600.5728148681 }, { "content": "fn handle_fatal_connection(\n\n connection_id: String,\n\n error: String,\n\n peers: &mut PeerMap,\n\n subscribers: &mut SubscriberMap,\n\n max_retry_frequency: u64,\n\n) {\n\n if let Some(mut peer_metadata) = peers.get_by_connection_id(&connection_id).cloned() {\n\n warn!(\n\n \"Peer {} encountered a fatal connection error: {}\",\n\n peer_metadata.id, error\n\n );\n\n\n\n // Tell subscribers this peer is disconnected\n\n let notification = PeerManagerNotification::Disconnected {\n\n peer: PeerTokenPair::new(\n\n peer_metadata.id.clone(),\n\n peer_metadata.required_local_auth.clone(),\n\n ),\n\n };\n", "file_path": "libsplinter/src/peer/mod.rs", "rank": 76, "score": 273600.5728148681 }, { "content": "#[test]\n\npub fn test_registry_creation() {\n\n // Start a single node network\n\n let mut network = Network::new()\n\n .with_default_rest_api_variant(RestApiVariant::ActixWeb1)\n\n .add_nodes_with_defaults(1)\n\n .expect(\"Unable to start single node ActixWeb1 network\");\n\n // Get the node\n\n let node = network.node(0).expect(\"Unable to get node\");\n\n // Get node's registry client\n\n let registry_client = node.registry_client();\n\n // List all nodes in the registry\n\n let registry_list_response = registry_client\n\n .list_nodes(None)\n\n .expect(\"Registry get node request failed\");\n\n // Check the length of the returned list is 1\n\n assert_eq!(registry_list_response.data.len(), 1);\n\n // Check that the fields of the one node in the list are correct\n\n assert_eq!(registry_list_response.data[0].identity, node.node_id());\n\n assert_eq!(\n\n registry_list_response.data[0].endpoints,\n", "file_path": "splinterd/tests/admin/registry.rs", "rank": 77, "score": 273581.94454388943 }, { "content": "CREATE TYPE status_type AS ENUM ('NEW', 'COMPLETE');\n\n\n", "file_path": "libsplinter/src/migrations/diesel/postgres/migrations/2022-02-17-183942_service_lifecycle/up.sql", "rank": 78, "score": 273383.5599251641 }, { "content": "/// Gets the ID of the coordinator. The coordinator is the node with the lowest ID in the set of\n\n/// verifiers.\n\nfn get_coordinator(peers: Vec<ServiceId>) -> Option<ServiceId> {\n\n peers.into_iter().min_by(|x, y| x.as_str().cmp(y.as_str()))\n\n}\n", "file_path": "services/scabbard/libscabbard/src/store/command/finalize_service.rs", "rank": 79, "score": 270801.17385796807 }, { "content": "/// Gets the ID of the coordinator. The coordinator is the node with the lowest ID in the set of\n\n/// verifiers.\n\nfn get_coordinator(peers: Vec<ServiceId>) -> Option<ServiceId> {\n\n peers.into_iter().min_by(|x, y| x.as_str().cmp(y.as_str()))\n\n}\n", "file_path": "services/scabbard/libscabbard/src/store/command/prepare_service.rs", "rank": 80, "score": 270801.17385796807 }, { "content": "#[test]\n\npub fn test_registry_update_node() {\n\n // Start two node network\n\n let mut network = Network::new()\n\n .with_default_rest_api_variant(RestApiVariant::ActixWeb1)\n\n .add_nodes_with_defaults(2)\n\n .expect(\"Unable to start single node ActixWeb1 network\");\n\n // Get the first node\n\n let node_a = network.node(0).expect(\"Unable to get node\");\n\n // Get the node's registry client\n\n let registry_client = node_a.registry_client();\n\n // List all nodes in the registry\n\n let registry_list_response = registry_client\n\n .list_nodes(None)\n\n .expect(\"Registry get node request failed\");\n\n // Check the length of the returned list is 2\n\n assert_eq!(registry_list_response.data.len(), 2);\n\n let key = node_a\n\n .admin_signer()\n\n .clone_box()\n\n .public_key()\n", "file_path": "splinterd/tests/admin/registry.rs", "rank": 81, "score": 269956.20945339446 }, { "content": "#[test]\n\npub fn test_registry_delete_node() {\n\n // Start three node network\n\n let mut network = Network::new()\n\n .with_default_rest_api_variant(RestApiVariant::ActixWeb1)\n\n .add_nodes_with_defaults(3)\n\n .expect(\"Unable to start single node ActixWeb1 network\");\n\n // Get the first node\n\n let node_a = network.node(0).expect(\"Unable to get node\");\n\n // Get the second node\n\n let node_b = network.node(1).expect(\"Unable to get node\");\n\n // Get node's registry client\n\n let registry_client = node_a.registry_client();\n\n // List all nodes in the registry\n\n let registry_list_response = registry_client\n\n .list_nodes(None)\n\n .expect(\"Registry get node request failed\");\n\n // Check the length of the returned list is 3\n\n assert_eq!(registry_list_response.data.len(), 3);\n\n // Delete `node_b` from the registry\n\n registry_client\n", "file_path": "splinterd/tests/admin/registry.rs", "rank": 82, "score": 269956.20945339446 }, { "content": "/// Messages handled by the connection manager.\n\nenum CmMessage {\n\n Shutdown,\n\n Request(CmRequest),\n\n AuthResult(AuthResult),\n\n SendHeartbeats,\n\n}\n\n\n", "file_path": "libsplinter/src/network/connection_manager/mod.rs", "rank": 83, "score": 269924.0154297624 }, { "content": "/// CmMessages sent by a Connector.\n\nenum CmRequest {\n\n RequestOutboundConnection {\n\n endpoint: String,\n\n connection_id: String,\n\n expected_authorization: Option<ConnectionAuthorizationType>,\n\n local_authorization: Option<ConnectionAuthorizationType>,\n\n sender: Sender<Result<(), ConnectionManagerError>>,\n\n },\n\n RemoveConnection {\n\n endpoint: String,\n\n connection_id: String,\n\n sender: Sender<Result<Option<String>, ConnectionManagerError>>,\n\n },\n\n ListConnections {\n\n sender: Sender<Result<Vec<String>, ConnectionManagerError>>,\n\n },\n\n AddInboundConnection {\n\n connection: Box<dyn Connection>,\n\n sender: Sender<Result<(), ConnectionManagerError>>,\n\n },\n\n Subscribe {\n\n sender: Sender<Result<SubscriberId, ConnectionManagerError>>,\n\n callback: Subscriber,\n\n },\n\n Unsubscribe {\n\n subscriber_id: SubscriberId,\n\n sender: Sender<Result<(), ConnectionManagerError>>,\n\n },\n\n}\n\n\n", "file_path": "libsplinter/src/network/connection_manager/mod.rs", "rank": 84, "score": 269910.5235749336 }, { "content": "type LeafIter<T> = Box<dyn Iterator<Item = IterResult<T>>>;\n\n\n\nimpl MerkleRadixLeafReader for MerkleState {\n\n fn leaves(\n\n &self,\n\n state_id: &Self::StateId,\n\n subtree: Option<&str>,\n\n ) -> IterResult<LeafIter<(Self::Key, Self::Value)>> {\n\n match self {\n\n MerkleState::KeyValue { state, .. } => state.leaves(state_id, subtree),\n\n #[cfg(feature = \"postgres\")]\n\n MerkleState::SqlPostgres { state } => state.leaves(state_id, subtree),\n\n #[cfg(feature = \"sqlite\")]\n\n MerkleState::SqlSqlite { state } => state.leaves(state_id, subtree),\n\n }\n\n }\n\n}\n\n\n\nimpl Write for MerkleState {\n\n type StateId = String;\n", "file_path": "services/scabbard/libscabbard/src/service/state/merkle_state.rs", "rank": 85, "score": 269754.39933749934 }, { "content": "/// The `PeerLookup` trait provides an interface for looking up details about individual peer\n\n/// connections.\n\npub trait PeerLookup: Send {\n\n /// Retrieves the connection ID for a given peer ID, if found.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returns a `PeerLookupError` if the connection ID cannot be retrieved.\n\n fn connection_id(&self, peer_id: &PeerTokenPair) -> Result<Option<String>, PeerLookupError>;\n\n\n\n /// Retrieves the peer ID for a given connection ID, if found.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returns a `PeerLookupError` if the peer ID cannot be retrieved.\n\n fn peer_id(&self, connection_id: &str) -> Result<Option<PeerTokenPair>, PeerLookupError>;\n\n}\n\n\n", "file_path": "libsplinter/src/peer/connector.rs", "rank": 86, "score": 269122.10405422706 }, { "content": "#[test]\n\npub fn test_registry_list_nodes_filter() {\n\n // Start single node network\n\n let mut network = Network::new()\n\n .with_default_rest_api_variant(RestApiVariant::ActixWeb1)\n\n .add_nodes_with_defaults(1)\n\n .expect(\"Unable to start single node ActixWeb1 network\");\n\n // Get the node\n\n let node_a = network.node(0).expect(\"Unable to get node\");\n\n // Get node's registry client\n\n let registry_client = node_a.registry_client();\n\n // List all nodes in the registry\n\n let registry_list_response = registry_client\n\n .list_nodes(None)\n\n .expect(\"Registry get node request failed\");\n\n // Check the length of the returned list is 1\n\n assert_eq!(registry_list_response.data.len(), 1);\n\n let key = node_a\n\n .admin_signer()\n\n .clone_box()\n\n .public_key()\n", "file_path": "splinterd/tests/admin/registry.rs", "rank": 87, "score": 266480.6517551848 }, { "content": "#[test]\n\npub fn test_3_party_circuit_abandon() {\n\n // Start a 3-node network\n\n let mut network = Network::new()\n\n .with_default_rest_api_variant(RestApiVariant::ActixWeb1)\n\n .add_nodes_with_defaults(3)\n\n .expect(\"Unable to start 3-node ActixWeb1 network\");\n\n // Get the first node in the network\n\n let node_a = network.node(0).expect(\"Unable to get first node\");\n\n // Get the second node in the network\n\n let node_b = network.node(1).expect(\"Unable to get second node\");\n\n // Get the third node from the network\n\n let node_c = network.node(2).expect(\"Unable to get third node\");\n\n\n\n let circuit_id = \"ABCDE-01234\";\n\n // Commit a circuit to state\n\n commit_3_party_circuit(\n\n &circuit_id,\n\n node_a,\n\n node_b,\n\n node_c,\n", "file_path": "splinterd/tests/admin/circuit_abandon.rs", "rank": 88, "score": 266480.6517551848 }, { "content": "#[test]\n\npub fn test_3_party_circuit_purge() {\n\n let mut network = Network::new()\n\n .with_default_rest_api_variant(RestApiVariant::ActixWeb1)\n\n .add_nodes_with_defaults(3)\n\n .expect(\"Unable to start 3-node ActixWeb1 network\");\n\n let node_a = network.node(0).expect(\"Could not get first node\");\n\n let node_b = network.node(1).expect(\"Could not get second node\");\n\n let node_c = network.node(2).expect(\"Could not get third node\");\n\n\n\n let test_circuit_id = \"QWERT-01234\";\n\n commit_3_party_circuit(\n\n test_circuit_id,\n\n node_a,\n\n node_b,\n\n node_c,\n\n AuthorizationType::Trust,\n\n );\n\n\n\n let service_id_a = get_node_service_id(test_circuit_id, node_a);\n\n let service_id_b = get_node_service_id(test_circuit_id, node_b);\n", "file_path": "splinterd/tests/admin/circuit_purge.rs", "rank": 89, "score": 266480.6517551848 }, { "content": "#[test]\n\npub fn test_2_party_circuit_lifecycle() {\n\n // Start a 2-node network\n\n let mut network = Network::new()\n\n .with_default_rest_api_variant(RestApiVariant::ActixWeb1)\n\n .add_nodes_with_defaults(2)\n\n .expect(\"Unable to start 2-node ActixWeb1 network\");\n\n // Get the first node in the network\n\n let node_a = network.node(0).expect(\"Unable to get first node\");\n\n // Get the second node in the network\n\n let node_b = network.node(1).expect(\"Unable to get second node\");\n\n // Commit the circuit to state\n\n let node_b_admin_pubkey = admin_pubkey(node_b);\n\n\n\n let CircuitData {\n\n circuit_id,\n\n management_type,\n\n ..\n\n } = network\n\n .circuit_builder(&[0, 1])\n\n .expect(\"Could not create builder\")\n", "file_path": "splinterd/tests/admin/circuit_disband.rs", "rank": 90, "score": 266480.6517551848 }, { "content": "#[test]\n\npub fn test_2_party_circuit_creation() {\n\n // Start a 2-node network\n\n let mut network = Network::new()\n\n .with_default_rest_api_variant(RestApiVariant::ActixWeb1)\n\n .add_nodes_with_defaults(2)\n\n .expect(\"Unable to start 2-node ActixWeb1 network\");\n\n // Get the first node in the network\n\n let node_a = network.node(0).expect(\"Unable to get first node\");\n\n // Get the second node in the network\n\n let node_b = network.node(1).expect(\"Unable to get second node\");\n\n\n\n let circuit_id = \"ABCDE-01234\";\n\n\n\n commit_2_party_circuit(circuit_id, node_a, node_b, AuthorizationType::Trust);\n\n\n\n shutdown!(network).expect(\"Unable to shutdown network\");\n\n}\n\n\n\n/// Test that a 2-party circuit may be created on a 2-node network using challenge authorization.\n\n///\n\n/// 1. Start a two node network and get both nodes.\n\n/// 2. Use commit_2_party_circuit to verify that a circuit can be created between the two nodes\n\n/// using challenge authorization.\n\n/// 3. Shutdown the network\n", "file_path": "splinterd/tests/admin/circuit_create.rs", "rank": 91, "score": 266480.6517551848 }, { "content": "#[test]\n\npub fn test_2_party_circuit_abandon() {\n\n // Start a 2-node network\n\n let mut network = Network::new()\n\n .with_default_rest_api_variant(RestApiVariant::ActixWeb1)\n\n .add_nodes_with_defaults(2)\n\n .expect(\"Unable to start 2-node ActixWeb1 network\");\n\n // Get the first node in the network\n\n let node_a = network.node(0).expect(\"Unable to get first node\");\n\n // Get the second node in the network\n\n let node_b = network.node(1).expect(\"Unable to get second node\");\n\n let circuit_id = \"ABCDE-01234\";\n\n // Commit the circuit to state\n\n commit_2_party_circuit(&circuit_id, node_a, node_b, AuthorizationType::Trust);\n\n\n\n // Create the `ServiceId` struct based on the first node's associated `service_id` and the\n\n // committed `circuit_id`\n\n let service_id_a = get_node_service_id(&circuit_id, node_a);\n\n // Submit a `CreateContractRegistryAction` to validate the service transaction is\n\n // valid on the active circuit\n\n let scabbard_batch =\n", "file_path": "splinterd/tests/admin/circuit_abandon.rs", "rank": 92, "score": 266480.6517551848 }, { "content": "#[test]\n\npub fn test_2_party_circuit_purge() {\n\n let mut network = Network::new()\n\n .with_default_rest_api_variant(RestApiVariant::ActixWeb1)\n\n .add_nodes_with_defaults(2)\n\n .expect(\"Unable to start 2-node ActixWeb1 network\");\n\n let node_a = network.node(0).expect(\"Unable to get first node\");\n\n let node_b = network.node(1).expect(\"Unable to get first node\");\n\n let test_circuit_id = \"QAZED-12345\";\n\n commit_2_party_circuit(&test_circuit_id, node_a, node_b, AuthorizationType::Trust);\n\n\n\n let test_service_id_a = get_node_service_id(&test_circuit_id, node_a);\n\n let test_service_id_b = get_node_service_id(&test_circuit_id, node_b);\n\n let scabbard_batch =\n\n make_create_contract_registry_batch(\"contract_registry_0\", &*node_a.admin_signer())\n\n .expect(\"Unable to build `CreateContractRegistryAction`\");\n\n node_a\n\n .scabbard_client()\n\n .expect(\"Unable to get first node's scabbard client\")\n\n .submit(\n\n &test_service_id_a,\n", "file_path": "splinterd/tests/admin/circuit_purge.rs", "rank": 93, "score": 266480.6517551848 }, { "content": "#[test]\n\npub fn test_3_party_circuit_creation() {\n\n // Start a 3-node network\n\n let mut network = Network::new()\n\n .with_default_rest_api_variant(RestApiVariant::ActixWeb1)\n\n .add_nodes_with_defaults(3)\n\n .expect(\"Unable to start 3-node ActixWeb1 network\");\n\n // Get the first node in the network\n\n let node_a = network.node(0).expect(\"Unable to get first node\");\n\n // Get the second node in the network\n\n let node_b = network.node(1).expect(\"Unable to get second node\");\n\n // Get the third node in the network\n\n let node_c = network.node(2).expect(\"Unable to get third node\");\n\n\n\n let circuit_id = \"ABCDE-01234\";\n\n commit_3_party_circuit(circuit_id, node_a, node_b, node_c, AuthorizationType::Trust);\n\n\n\n shutdown!(network).expect(\"Unable to shutdown network\");\n\n}\n\n\n\n/// Test that a 3-party circuit may be created on a 3-node network using challenge authorization.\n\n///\n\n/// 1. Create a 3 node network and get each node.\n\n/// 2. Use commit_3_party_circuit to verify that a circuit can be created between the three nodes\n\n/// using challenge authorization.\n\n/// 3. Shutdown the network.\n", "file_path": "splinterd/tests/admin/circuit_create.rs", "rank": 94, "score": 266480.6517551848 }, { "content": "#[test]\n\n#[ignore]\n\npub fn test_3_party_circuit_lifecycle() {\n\n // Start a 3-node network\n\n let mut network = Network::new()\n\n .with_default_rest_api_variant(RestApiVariant::ActixWeb1)\n\n .add_nodes_with_defaults(3)\n\n .expect(\"Unable to start 3-node ActixWeb1 network\");\n\n // Get the first node from the network\n\n let node_a = network.node(0).expect(\"Unable to get first node\");\n\n // Get the second node from the network\n\n let node_b = network.node(1).expect(\"Unable to get second node\");\n\n let node_b_admin_pubkey = admin_pubkey(node_b);\n\n // Get the third node from the network\n\n let node_c = network.node(2).expect(\"Unable to get third node\");\n\n let node_c_admin_pubkey = admin_pubkey(node_b);\n\n\n\n let circuit_id = \"ABCDE-01234\";\n\n commit_3_party_circuit(circuit_id, node_a, node_b, node_c, AuthorizationType::Trust);\n\n\n\n // As we've started a new event client, we'll skip just past the circuit ready event\n\n let mut node_a_events = BlockingAdminServiceEventIterator::new(\n", "file_path": "splinterd/tests/admin/circuit_disband.rs", "rank": 95, "score": 266480.58401933126 }, { "content": "fn parse_service_type_argument(service_type: &str) -> Result<(String, String), CliError> {\n\n let mut iter = service_type.split(\"::\");\n\n\n\n let service_id = iter\n\n .next()\n\n .expect(\"str::split cannot return an empty iterator\")\n\n .to_string();\n\n if service_id.is_empty() {\n\n return Err(CliError::ActionError(\n\n \"Empty '--service-type' argument detected\".into(),\n\n ));\n\n }\n\n\n\n let service_type = iter\n\n .next()\n\n .ok_or_else(|| CliError::ActionError(format!(\"Missing service type for '{}'\", service_id)))?\n\n .to_string();\n\n if service_type.is_empty() {\n\n return Err(CliError::ActionError(format!(\n\n \"Empty service type detected for '{}'\",\n", "file_path": "cli/src/action/circuit/mod.rs", "rank": 96, "score": 265725.487515829 }, { "content": "pub trait Authorizer {\n\n fn authorize_connection(\n\n &self,\n\n connection_id: String,\n\n connection: Box<dyn Connection>,\n\n on_complete: AuthorizerCallback,\n\n expected_authorization: Option<ConnectionAuthorizationType>,\n\n local_authorization: Option<ConnectionAuthorizationType>,\n\n ) -> Result<(), AuthorizerError>;\n\n}\n\n\n\npub enum AuthorizationResult {\n\n Authorized {\n\n connection_id: String,\n\n identity: ConnectionAuthorizationType,\n\n connection: Box<dyn Connection>,\n\n expected_authorization: ConnectionAuthorizationType,\n\n local_authorization: ConnectionAuthorizationType,\n\n },\n\n Unauthorized {\n\n connection_id: String,\n\n connection: Box<dyn Connection>,\n\n },\n\n}\n\n\n\npub type SubscriberId = usize;\n", "file_path": "libsplinter/src/network/connection_manager/mod.rs", "rank": 97, "score": 265694.96154127 }, { "content": "/// Creates and returns the path to the default sqlite database\n\n///\n\n/// Gets the splinter default state path, creating it if it does not exist. Creates a db file with\n\n/// the name splinter_state.db.\n\npub fn get_default_database() -> Result<String, CliError> {\n\n let state_path = SplinterEnvironment::load().get_state_path();\n\n if !state_path.is_dir() {\n\n fs::create_dir_all(&state_path).map_err(|_| {\n\n CliError::ActionError(format!(\n\n \"Unable to create directory: {}\",\n\n state_path.display()\n\n ))\n\n })?;\n\n }\n\n\n\n get_database_at_state_path(state_path)\n\n}\n\n\n", "file_path": "cli/src/action/database/sqlite.rs", "rank": 98, "score": 265561.1298699264 }, { "content": "#[cfg(not(feature = \"sqlite\"))]\n\npub fn get_default_database() -> Result<String, CliError> {\n\n Ok(\"postgres://admin:admin@localhost:5432/splinterd\".to_string())\n\n}\n", "file_path": "cli/src/action/database/postgres.rs", "rank": 99, "score": 265555.20080849505 } ]
Rust
crates/platform/src/input/mouse.rs
gents83/NRG
62743a54ac873a8dea359f3816e24c189a323ebb
use std::collections::HashMap; use sabi_commands::CommandParser; use sabi_messenger::{implement_message, Message, MessageFromString}; #[derive(Debug, Hash, Ord, PartialOrd, PartialEq, Eq, Clone, Copy)] pub enum MouseButton { None, Left, Right, Middle, Other(u16), } #[derive(Debug, Hash, Ord, PartialOrd, PartialEq, Eq, Clone, Copy)] pub enum MouseState { Invalid, Move, DoubleClick, Down, Up, } #[derive(Debug, PartialOrd, PartialEq, Clone, Copy)] pub struct MouseEvent { pub x: f64, pub y: f64, pub normalized_x: f32, pub normalized_y: f32, pub button: MouseButton, pub state: MouseState, } implement_message!(MouseEvent); impl MessageFromString for MouseEvent { fn from_command_parser(command_parser: CommandParser) -> Option<Box<dyn Message>> where Self: Sized, { if command_parser.has("mouse_move") { let values = command_parser.get_values_of("mouse_move"); return Some( MouseEvent { x: values[0], y: values[1], normalized_x: values[0] as _, normalized_y: values[1] as _, button: MouseButton::None, state: MouseState::Move, } .as_boxed(), ); } else if command_parser.has("mouse_left_down") { let values = command_parser.get_values_of("mouse_left_down"); return Some( MouseEvent { x: values[0], y: values[1], normalized_x: values[0] as _, normalized_y: values[1] as _, button: MouseButton::Left, state: MouseState::Down, } .as_boxed(), ); } else if command_parser.has("mouse_right_down") { let values = command_parser.get_values_of("mouse_right_down"); return Some( MouseEvent { x: values[0], y: values[1], normalized_x: values[0] as _, normalized_y: values[1] as _, button: MouseButton::Right, state: MouseState::Down, } .as_boxed(), ); } else if command_parser.has("mouse_left_up") { let values = command_parser.get_values_of("mouse_left_up"); return Some( MouseEvent { x: values[0], y: values[1], normalized_x: values[0] as _, normalized_y: values[1] as _, button: MouseButton::Left, state: MouseState::Up, } .as_boxed(), ); } else if command_parser.has("mouse_right_up") { let values = command_parser.get_values_of("mouse_right_up"); return Some( MouseEvent { x: values[0], y: values[1], normalized_x: values[0] as _, normalized_y: values[1] as _, button: MouseButton::Right, state: MouseState::Up, } .as_boxed(), ); } None } } impl Default for MouseEvent { #[inline] fn default() -> Self { Self { x: 0., y: 0., normalized_x: 0., normalized_y: 0., button: MouseButton::None, state: MouseState::Move, } } } pub struct MouseData { pub(super) pos_x: f64, pub(super) pos_y: f64, pub(super) move_x: f64, pub(super) move_y: f64, pub(super) is_pressed: bool, pub(super) buttons: HashMap<MouseButton, MouseState>, } impl Default for MouseData { fn default() -> Self { Self { pos_x: 0.0, pos_y: 0.0, move_x: 0.0, move_y: 0.0, is_pressed: false, buttons: HashMap::new(), } } } impl MouseData { pub fn get_x(&self) -> f64 { self.pos_x } pub fn get_y(&self) -> f64 { self.pos_y } pub fn movement_x(&self) -> f64 { self.move_x } pub fn movement_y(&self) -> f64 { self.move_y } pub fn is_pressed(&self) -> bool { self.is_pressed } pub fn get_button_state(&self, button: MouseButton) -> MouseState { if let Some(button) = self.buttons.get(&button) { *button } else { MouseState::Invalid } } pub fn is_button_down(&self, button: MouseButton) -> bool { if let Some(button) = self.buttons.get(&button) { *button == MouseState::Down } else { false } } pub fn is_button_up(&self, button: MouseButton) -> bool { if let Some(button) = self.buttons.get(&button) { *button == MouseState::Up } else { false } } }
use std::collections::HashMap; use sabi_commands::CommandParser; use sabi_messenger::{implement_message, Message, MessageFromString}; #[derive(Debug, Hash, Ord, PartialOrd, PartialEq, Eq, Clone, Copy)] pub enum MouseButton { None, Left, Right, Middle, Other(u16), } #
ues_of("mouse_move"); return Some( MouseEvent { x: values[0], y: values[1], normalized_x: values[0] as _, normalized_y: values[1] as _, button: MouseButton::None, state: MouseState::Move, } .as_boxed(), ); } else if command_parser.has("mouse_left_down") { let values = command_parser.get_values_of("mouse_left_down"); return Some( MouseEvent { x: values[0], y: values[1], normalized_x: values[0] as _, normalized_y: values[1] as _, button: MouseButton::Left, state: MouseState::Down, } .as_boxed(), ); } else if command_parser.has("mouse_right_down") { let values = command_parser.get_values_of("mouse_right_down"); return Some( MouseEvent { x: values[0], y: values[1], normalized_x: values[0] as _, normalized_y: values[1] as _, button: MouseButton::Right, state: MouseState::Down, } .as_boxed(), ); } else if command_parser.has("mouse_left_up") { let values = command_parser.get_values_of("mouse_left_up"); return Some( MouseEvent { x: values[0], y: values[1], normalized_x: values[0] as _, normalized_y: values[1] as _, button: MouseButton::Left, state: MouseState::Up, } .as_boxed(), ); } else if command_parser.has("mouse_right_up") { let values = command_parser.get_values_of("mouse_right_up"); return Some( MouseEvent { x: values[0], y: values[1], normalized_x: values[0] as _, normalized_y: values[1] as _, button: MouseButton::Right, state: MouseState::Up, } .as_boxed(), ); } None } } impl Default for MouseEvent { #[inline] fn default() -> Self { Self { x: 0., y: 0., normalized_x: 0., normalized_y: 0., button: MouseButton::None, state: MouseState::Move, } } } pub struct MouseData { pub(super) pos_x: f64, pub(super) pos_y: f64, pub(super) move_x: f64, pub(super) move_y: f64, pub(super) is_pressed: bool, pub(super) buttons: HashMap<MouseButton, MouseState>, } impl Default for MouseData { fn default() -> Self { Self { pos_x: 0.0, pos_y: 0.0, move_x: 0.0, move_y: 0.0, is_pressed: false, buttons: HashMap::new(), } } } impl MouseData { pub fn get_x(&self) -> f64 { self.pos_x } pub fn get_y(&self) -> f64 { self.pos_y } pub fn movement_x(&self) -> f64 { self.move_x } pub fn movement_y(&self) -> f64 { self.move_y } pub fn is_pressed(&self) -> bool { self.is_pressed } pub fn get_button_state(&self, button: MouseButton) -> MouseState { if let Some(button) = self.buttons.get(&button) { *button } else { MouseState::Invalid } } pub fn is_button_down(&self, button: MouseButton) -> bool { if let Some(button) = self.buttons.get(&button) { *button == MouseState::Down } else { false } } pub fn is_button_up(&self, button: MouseButton) -> bool { if let Some(button) = self.buttons.get(&button) { *button == MouseState::Up } else { false } } }
[derive(Debug, Hash, Ord, PartialOrd, PartialEq, Eq, Clone, Copy)] pub enum MouseState { Invalid, Move, DoubleClick, Down, Up, } #[derive(Debug, PartialOrd, PartialEq, Clone, Copy)] pub struct MouseEvent { pub x: f64, pub y: f64, pub normalized_x: f32, pub normalized_y: f32, pub button: MouseButton, pub state: MouseState, } implement_message!(MouseEvent); impl MessageFromString for MouseEvent { fn from_command_parser(command_parser: CommandParser) -> Option<Box<dyn Message>> where Self: Sized, { if command_parser.has("mouse_move") { let values = command_parser.get_val
random
[]
Rust
tools/ldr2img/src/main.rs
segfault87/ldraw.rs
f181317cf1505ca64456cf67efdf3ac406d05831
use std::{ collections::HashMap, env, fs::File, io::BufReader, path::Path, rc::Rc, }; use bincode::deserialize_from; use clap::{App, Arg}; use glutin::event_loop::EventLoop; use ldraw::{ parser::{parse_color_definition, parse_multipart_document}, }; use ldraw_ir::{ part::PartBuilder, }; use ldraw_olr::{ context::{create_headless_context, create_osmesa_context}, ops::render_display_list, }; use ldraw_renderer::{ display_list::DisplayList, part::Part, }; fn main() { let matches = App::new("ldr2img") .about("Render LDraw model into still image") .arg(Arg::with_name("ldraw_dir") .long("ldraw-dir") .value_name("PATH") .takes_value(true) .help("Path to LDraw directory")) .arg(Arg::with_name("parts_path") .short("p") .value_name("PATH") .takes_value(true) .help("Path to baked LDraw parts")) .arg(Arg::with_name("use_window_system") .short("w") .help("Use window system to utilize GPU rendering")) .arg(Arg::with_name("output") .short("o") .takes_value(true) .help("Output file name")) .arg(Arg::with_name("input") .takes_value(true) .required(true) .index(1) .help("Input file name")) .arg(Arg::with_name("size") .short("s") .takes_value(true) .help("Maximum width/height pixel size")) .get_matches(); let ldrawdir = match matches.value_of("ldraw_dir") { Some(v) => v.to_string(), None => { match env::var("LDRAWDIR") { Ok(v) => v, Err(_) => { panic!("--ldraw-dir option or LDRAWDIR environment variable is required."); } } } }; let ldrawdir = Path::new(&ldrawdir); let bakeddir = match matches.value_of("parts_path") { Some(v) => Path::new(v).to_path_buf(), None => { let baked = Path::new(&ldrawdir).join("baked"); if baked.exists() { baked } else { panic!("Parts path is not provided.") } } }; let use_window_system = matches.is_present("use_window_system"); let size = matches.value_of("size").unwrap_or("1024").parse::<usize>().unwrap(); let mut context = if use_window_system { let evloop = EventLoop::new(); create_headless_context(evloop, size, size) } else { create_osmesa_context(size, size) }.unwrap(); let gl = Rc::clone(&context.gl); let colors = parse_color_definition(&mut BufReader::new( File::open(ldrawdir.join("LDConfig.ldr")).unwrap(), )).unwrap(); let input = matches.value_of("input").unwrap(); let output = matches.value_of("output").unwrap_or("image.png"); let document = parse_multipart_document( &colors, &mut BufReader::new(File::open(&input).unwrap()) ).unwrap(); let mut parts = HashMap::new(); for dep in document.list_dependencies() { let path = bakeddir.join(format!("{}.part", dep.normalized)); let file = match File::open(&path) { Ok(f) => f, Err(_) => { println!("Could not open part file {}.", path.to_str().unwrap_or("")); continue }, }; let mut part = deserialize_from::<_, PartBuilder>(&mut BufReader::new(file)).unwrap(); part.part_builder.resolve_colors(&colors); let part = Part::create(&part, Rc::clone(&gl)); parts.insert(dep.clone(), part); } let mut display_list = DisplayList::from_multipart_document(Rc::clone(&gl), &document); { let mut rc = context.rendering_context.borrow_mut(); rc.set_initial_state(); rc.resize(size as _, size as _); rc.upload_shading_data(); } let image = render_display_list(&context, &parts, &mut display_list); image.save(&Path::new(output)).unwrap(); }
use std::{ collections::HashMap, env, fs::File, io::BufReader, path::Path, rc::Rc, }; use bincode::deserialize_from; use clap::{App, Arg}; use glutin::event_loop::EventLoop; use ldraw::{ parser::{parse_color_definition, parse_multipart_document}, }; use ldraw_ir::{ part::PartBuilder, }; use ldraw_olr::{ context::{create_headless_context, create_osmesa_context}, ops::render_display_list, }; use ldraw_renderer::{ display_list::DisplayList, part::Part, };
fn main() { let matches = App::new("ldr2img") .about("Render LDraw model into still image") .arg(Arg::with_name("ldraw_dir") .long("ldraw-dir") .value_name("PATH") .takes_value(true) .help("Path to LDraw directory")) .arg(Arg::with_name("parts_path") .short("p") .value_name("PATH") .takes_value(true) .help("Path to baked LDraw parts")) .arg(Arg::with_name("use_window_system") .short("w") .help("Use window system to utilize GPU rendering")) .arg(Arg::with_name("output") .short("o") .takes_value(true) .help("Output file name")) .arg(Arg::with_name("input") .takes_value(true) .required(true) .index(1) .help("Input file name")) .arg(Arg::with_name("size") .short("s") .takes_value(true) .help("Maximum width/height pixel size")) .get_matches(); let ldrawdir = match matches.value_of("ldraw_dir") { Some(v) => v.to_string(), None => { match env::var("LDRAWDIR") { Ok(v) => v, Err(_) => { panic!("--ldraw-dir option or LDRAWDIR environment variable is required."); } } } }; let ldrawdir = Path::new(&ldrawdir); let bakeddir = match matches.value_of("parts_path") { Some(v) => Path::new(v).to_path_buf(), None => { let baked = Path::new(&ldrawdir).join("baked"); if baked.exists() { baked } else { panic!("Parts path is not provided.") } } }; let use_window_system = matches.is_present("use_window_system"); let size = matches.value_of("size").unwrap_or("1024").parse::<usize>().unwrap(); let mut context = if use_window_system { let evloop = EventLoop::new(); create_headless_context(evloop, size, size) } else { create_osmesa_context(size, size) }.unwrap(); let gl = Rc::clone(&context.gl); let colors = parse_color_definition(&mut BufReader::new( File::open(ldrawdir.join("LDConfig.ldr")).unwrap(), )).unwrap(); let input = matches.value_of("input").unwrap(); let output = matches.value_of("output").unwrap_or("image.png"); let document = parse_multipart_document( &colors, &mut BufReader::new(File::open(&input).unwrap()) ).unwrap(); let mut parts = HashMap::new(); for dep in document.list_dependencies() { let path = bakeddir.join(format!("{}.part", dep.normalized)); let file = match File::open(&path) { Ok(f) => f, Err(_) => { println!("Could not open part file {}.", path.to_str().unwrap_or("")); continue }, }; let mut part = deserialize_from::<_, PartBuilder>(&mut BufReader::new(file)).unwrap(); part.part_builder.resolve_colors(&colors); let part = Part::create(&part, Rc::clone(&gl)); parts.insert(dep.clone(), part); } let mut display_list = DisplayList::from_multipart_document(Rc::clone(&gl), &document); { let mut rc = context.rendering_context.borrow_mut(); rc.set_initial_state(); rc.resize(size as _, size as _); rc.upload_shading_data(); } let image = render_display_list(&context, &parts, &mut display_list); image.save(&Path::new(output)).unwrap(); }
function_block-full_function
[ { "content": "}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{parse_color_definition, parse_multipart_document, parse_single_document};\n\n use crate::color::MaterialRegistry;\n\n use crate::error::{ColorDefinitionParseError, ParseError};\n\n use std::fs::File;\n\n use std::io::BufReader;\n\n\n\n const PATH_LDCONFIG: &str = \"/home/segfault/.ldraw/LDConfig.ldr\";\n\n const PATH_PART: &str = \"/home/segfault/.ldraw/parts/u9318.dat\";\n\n const PATH_MPD: &str = \"/home/segfault/Downloads/6973.ldr\";\n\n\n\n fn set_up_materials() -> Result<MaterialRegistry, ColorDefinitionParseError> {\n\n let mut reader = BufReader::new(File::open(PATH_LDCONFIG).unwrap());\n\n match parse_color_definition::<BufReader<File>>(&mut reader) {\n\n Ok(m) => Ok(m),\n\n Err(e) => Err(e),\n\n }\n", "file_path": "ldraw/src/parser.rs", "rank": 0, "score": 32577.83005204076 }, { "content": "use std::collections::HashMap;\n\nuse std::fmt;\n\nuse std::hash::{Hash, Hasher};\n\n\n\nuse serde::de::{Deserializer, Error as DeError, Visitor};\n\nuse serde::ser::Serializer;\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::Vector4;\n\n\n\n#[derive(Clone, Copy, Debug)]\n\npub struct Rgba {\n\n value: [u8; 4],\n\n}\n\n\n\nimpl Rgba {\n\n pub fn new(r: u8, g: u8, b: u8, a: u8) -> Rgba {\n\n Rgba {\n\n value: [r, g, b, a],\n\n }\n", "file_path": "ldraw/src/color.rs", "rank": 1, "score": 32577.507347578376 }, { "content": "#![feature(trait_alias)]\n\n\n\nuse std::cmp;\n\nuse std::fmt::{Debug, Display, Formatter, Result as FmtResult};\n\nuse std::hash::{Hash, Hasher};\n\nuse std::ops::BitXor;\n\n\n\nuse cgmath::{\n\n Matrix3 as Matrix3_, Matrix4 as Matrix4_, Point2 as Point2_, Point3 as Point3_,\n\n Vector2 as Vector2_, Vector3 as Vector3_, Vector4 as Vector4_,\n\n};\n\nuse serde::de::{Error as DeserializeError, Visitor};\n\nuse serde::{Deserialize, Deserializer, Serialize, Serializer};\n\n\n\npub mod color;\n\npub mod document;\n\npub mod elements;\n\npub mod error;\n\npub mod library;\n\n#[cfg(not(target_arch = \"wasm32\"))]\n", "file_path": "ldraw/src/lib.rs", "rank": 2, "score": 32577.221030469187 }, { "content": "use std::fmt;\n\nuse std::io::Write;\n\n\n\nuse cgmath::{Matrix, Vector4};\n\n\n\nuse crate::color::ColorReference;\n\nuse crate::document::{BfcCertification, Document, MultipartDocument};\n\nuse crate::elements::{\n\n BfcStatement, Command, Header, Line, Meta, OptionalLine, PartReference, Quad, Triangle,\n\n};\n\nuse crate::error::SerializeError;\n\nuse crate::Winding;\n\n\n", "file_path": "ldraw/src/writer.rs", "rank": 3, "score": 32577.19921852091 }, { "content": "use std::collections::HashMap;\n\nuse std::io::{BufRead, Lines};\n\nuse std::iter::Enumerate;\n\nuse std::str::Chars;\n\n\n\nuse cgmath::Matrix;\n\n\n\nuse crate::{\n\n color::{\n\n ColorReference, CustomizedMaterial, Finish, Material, MaterialGlitter, MaterialRegistry,\n\n MaterialSpeckle, Rgba,\n\n },\n\n document::{BfcCertification, Document, MultipartDocument},\n\n elements::{\n\n BfcStatement, Command, Header, Line, Meta, OptionalLine, PartReference, Quad, Triangle,\n\n },\n\n error::{ColorDefinitionParseError, DocumentParseError, ParseError},\n\n {Matrix4, PartAlias, Vector4, Winding},\n\n};\n\n\n\n#[derive(Debug)]\n", "file_path": "ldraw/src/parser.rs", "rank": 4, "score": 32577.031282434204 }, { "content": "use std::{\n\n collections::{HashMap, HashSet},\n\n hash,\n\n ops::Deref,\n\n sync::{Arc, RwLock},\n\n};\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::{\n\n document::{Document, MultipartDocument},\n\n elements::PartReference,\n\n AliasType, PartAlias,\n\n};\n\n\n\n#[derive(Serialize, Deserialize, Clone, Copy, Debug)]\n\npub enum PartKind {\n\n Primitive,\n\n Part,\n\n}\n", "file_path": "ldraw/src/library.rs", "rank": 5, "score": 32575.44578533158 }, { "content": "use std::{\n\n collections::{HashMap, HashSet},\n\n iter::Iterator,\n\n vec::Vec,\n\n};\n\n\n\nuse crate::{\n\n elements::{Command, Header, Line, Meta, OptionalLine, PartReference, Quad, Triangle},\n\n PartAlias, Winding,\n\n};\n\n\n\n#[derive(Clone, Debug)]\n\npub enum BfcCertification {\n\n NotApplicable,\n\n NoCertify,\n\n Certify(Winding),\n\n}\n\n\n\nimpl BfcCertification {\n\n pub fn is_certified(&self) -> Option<bool> {\n", "file_path": "ldraw/src/document.rs", "rank": 6, "score": 32574.815162862844 }, { "content": "use std::{error::Error, fmt, io::Error as IoError};\n\n\n\n#[derive(Debug)]\n\npub enum ParseError {\n\n TypeMismatch(&'static str, String),\n\n IoError(Box<IoError>),\n\n EndOfLine,\n\n InvalidBfcStatement(String),\n\n InvalidDocumentStructure,\n\n UnexpectedCommand(String),\n\n InvalidToken(String),\n\n MultipartDocument,\n\n}\n\n\n\nimpl From<IoError> for ParseError {\n\n fn from(e: IoError) -> ParseError {\n\n ParseError::IoError(Box::new(e))\n\n }\n\n}\n\n\n", "file_path": "ldraw/src/error.rs", "rank": 7, "score": 32573.285485542903 }, { "content": "use crate::color::ColorReference;\n\nuse crate::{Matrix4, PartAlias, Vector4, Winding};\n\n\n\n#[derive(Clone, Debug)]\n\npub struct Header(pub String, pub String);\n\n\n\n#[derive(Clone, Debug)]\n\npub enum BfcStatement {\n\n Winding(Winding),\n\n Clip(Option<Winding>),\n\n NoClip,\n\n InvertNext,\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub enum Meta {\n\n Comment(String),\n\n Step,\n\n Write(String),\n\n Print(String),\n", "file_path": "ldraw/src/elements.rs", "rank": 8, "score": 32572.24535555033 }, { "content": " }\n\n }\n\n\n\n pub fn list_all_dependencies(&self, document: &Document) -> HashSet<PartAlias> {\n\n let mut result = HashSet::new();\n\n\n\n self.traverse_dependencies(document, &mut result);\n\n\n\n result\n\n }\n\n}\n\n\n\n#[cfg(not(target_arch = \"wasm32\"))]\n\npub use crate::library_native::*;\n", "file_path": "ldraw/src/library.rs", "rank": 9, "score": 32571.766895641882 }, { "content": " writer.write_all(format!(\"0 {}\\n\", line).as_bytes())?;\n\n }\n\n }\n\n Meta::Step => {\n\n writer.write_all(b\"0 STEP\\n\")?;\n\n }\n\n Meta::Write(message) => {\n\n for line in message.lines() {\n\n writer.write_all(format!(\"0 WRITE {}\\n\", line).as_bytes())?;\n\n }\n\n }\n\n Meta::Print(message) => {\n\n for line in message.lines() {\n\n writer.write_all(format!(\"0 PRINT {}\\n\", line).as_bytes())?;\n\n }\n\n }\n\n Meta::Clear => {\n\n writer.write_all(b\"0 CLEAR\\n\")?;\n\n }\n\n Meta::Pause => {\n", "file_path": "ldraw/src/writer.rs", "rank": 10, "score": 32568.85291265064 }, { "content": " _ => None,\n\n })\n\n }\n\n\n\n pub fn resolve<D: Deref<Target = Document>>(\n\n &mut self,\n\n document: &D,\n\n parent: Option<&'a MultipartDocument>,\n\n ) {\n\n for i in document.iter_refs() {\n\n let name = &i.name;\n\n\n\n if self.map.contains_key(name) {\n\n continue;\n\n }\n\n\n\n if let Some(e) = parent {\n\n if let Some(doc) = e.subparts.get(name) {\n\n self.map\n\n .insert(name.clone(), ResolutionResult::Subpart(doc));\n", "file_path": "ldraw/src/library.rs", "rank": 11, "score": 32568.85291265064 }, { "content": " }\n\n \"SIZE\" => {\n\n size = next_token_u32(iterator)?;\n\n }\n\n \"MINSIZE\" => {\n\n minsize = next_token_f32(iterator)?;\n\n }\n\n \"MAXSIZE\" => {\n\n maxsize = next_token_f32(iterator)?;\n\n }\n\n _ => {\n\n return Err(ColorDefinitionParseError::ParseError(\n\n ParseError::InvalidToken(token.clone()),\n\n ));\n\n }\n\n }\n\n }\n\n Ok(CustomizedMaterial::Glitter(MaterialGlitter {\n\n value: Rgba::new(vr, vg, vb, alpha),\n\n luminance,\n", "file_path": "ldraw/src/parser.rs", "rank": 12, "score": 32568.85291265064 }, { "content": " let f = File::open(PATH_MPD).unwrap();\n\n let mut reader = BufReader::new(f);\n\n\n\n match parse_multipart_document::<BufReader<File>>(&materials, &mut reader) {\n\n Ok(model) => {\n\n println!(\"{:#?}\\n\", model);\n\n }\n\n Err(e) => {\n\n assert!(false, \"{}\", e);\n\n }\n\n };\n\n }\n\n}\n", "file_path": "ldraw/src/parser.rs", "rank": 13, "score": 32568.85291265064 }, { "content": " }\n\n\n\n pub fn update(&mut self, key: &PartAlias, document: Arc<Document>) {\n\n self.resolve(&Arc::clone(&document), None);\n\n self.map.insert(\n\n key.clone(),\n\n ResolutionResult::Associated(Arc::clone(&document)),\n\n );\n\n }\n\n\n\n pub fn query(&'a self, elem: &PartReference) -> Option<&'a Document> {\n\n match self.map.get(&elem.name) {\n\n Some(e) => match e {\n\n ResolutionResult::Missing => None,\n\n ResolutionResult::Pending(_) => None,\n\n ResolutionResult::Subpart(e) => Some(e),\n\n ResolutionResult::Associated(e) => Some(e),\n\n },\n\n None => None,\n\n }\n", "file_path": "ldraw/src/library.rs", "rank": 14, "score": 32568.85291265064 }, { "content": " writer.write_all(format!(\"0 {}\\n\", self.description).as_bytes())?;\n\n writer.write_all(format!(\"0 Name: {}\\n\", self.name).as_bytes())?;\n\n writer.write_all(format!(\"0 Author: {}\\n\", self.author).as_bytes())?;\n\n for header in &self.headers {\n\n header.write(writer)?;\n\n }\n\n writer.write_all(b\"\\n\")?;\n\n match self.bfc.write(writer) {\n\n Ok(()) => {\n\n writer.write_all(b\"\\n\")?;\n\n }\n\n Err(SerializeError::NoSerializable) => {}\n\n Err(e) => return Err(e),\n\n };\n\n for command in &self.commands {\n\n command.write(writer)?;\n\n }\n\n writer.write_all(b\"0\\n\\n\")?;\n\n\n\n Ok(())\n", "file_path": "ldraw/src/writer.rs", "rank": 15, "score": 32568.85291265064 }, { "content": " fn from(alias: String) -> PartAlias {\n\n PartAlias {\n\n normalized: Self::normalize(&alias),\n\n original: alias,\n\n }\n\n }\n\n}\n\n\n\nimpl From<&String> for PartAlias {\n\n fn from(alias: &String) -> PartAlias {\n\n PartAlias {\n\n normalized: Self::normalize(alias),\n\n original: alias.clone(),\n\n }\n\n }\n\n}\n\n\n\nimpl From<&str> for PartAlias {\n\n fn from(alias: &str) -> PartAlias {\n\n let string = alias.to_string();\n", "file_path": "ldraw/src/lib.rs", "rank": 16, "score": 32568.85291265064 }, { "content": " luminance: 0,\n\n finish: Finish::Plastic,\n\n }\n\n }\n\n\n\n pub fn resolve(code: u32, materials: &MaterialRegistry) -> ColorReference {\n\n match code {\n\n 16 => return ColorReference::Current,\n\n 24 => return ColorReference::Complement,\n\n _ => (),\n\n }\n\n\n\n if let Some(c) = materials.get(&code) {\n\n return ColorReference::Material(c.clone());\n\n }\n\n\n\n if (256..=512).contains(&code) {\n\n if let Some(c) = ColorReference::resolve_blended(code, materials) {\n\n return ColorReference::Material(c);\n\n }\n", "file_path": "ldraw/src/color.rs", "rank": 17, "score": 32568.85291265064 }, { "content": " }\n\n}\n\n\n\nimpl Eq for ColorReference {}\n\n\n\nimpl PartialEq for ColorReference {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.code() == other.code()\n\n }\n\n}\n\n\n\nimpl Hash for ColorReference {\n\n fn hash<H: Hasher>(&self, state: &mut H) {\n\n self.code().hash(state)\n\n }\n\n}\n\n\n\nimpl ColorReference {\n\n pub fn code(&self) -> u32 {\n\n match self {\n", "file_path": "ldraw/src/color.rs", "rank": 18, "score": 32568.85291265064 }, { "content": " }\n\n}\n\n\n\nimpl<T> PartDirectory<T> {\n\n pub fn add(&mut self, key: PartAlias, entry: PartEntry<T>) {\n\n match entry.kind {\n\n PartKind::Primitive => self.primitives.insert(key, entry),\n\n PartKind::Part => self.parts.insert(key, entry),\n\n };\n\n }\n\n\n\n pub fn query(&self, key: &PartAlias) -> Option<&PartEntry<T>> {\n\n match self.parts.get(key) {\n\n Some(v) => Some(v),\n\n None => match self.primitives.get(key) {\n\n Some(v) => Some(v),\n\n None => None,\n\n },\n\n }\n\n }\n", "file_path": "ldraw/src/library.rs", "rank": 19, "score": 32568.85291265064 }, { "content": "\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct PartEntry<T> {\n\n pub kind: PartKind,\n\n pub locator: T,\n\n}\n\n\n\nimpl<T> Clone for PartEntry<T>\n\nwhere\n\n T: Clone,\n\n{\n\n fn clone(&self) -> PartEntry<T> {\n\n PartEntry {\n\n kind: self.kind,\n\n locator: self.locator.clone(),\n\n }\n\n }\n\n}\n\n\n\nimpl<T> hash::Hash for PartEntry<T>\n", "file_path": "ldraw/src/library.rs", "rank": 20, "score": 32568.85291265064 }, { "content": " }\n\n}\n\n\n\nimpl fmt::Display for ColorDefinitionParseError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n ColorDefinitionParseError::ParseError(e) => write!(f, \"{}\", e),\n\n ColorDefinitionParseError::DocumentParseError(e) => write!(f, \"{}\", e),\n\n ColorDefinitionParseError::UnknownMaterial(e) => write!(f, \"Unknown material: {}\", e),\n\n }\n\n }\n\n}\n\n\n\nimpl Error for ColorDefinitionParseError {\n\n fn source(&self) -> Option<&(dyn Error + 'static)> {\n\n match self {\n\n ColorDefinitionParseError::ParseError(e) => Some(e),\n\n ColorDefinitionParseError::DocumentParseError(e) => Some(e),\n\n _ => None,\n\n }\n", "file_path": "ldraw/src/error.rs", "rank": 21, "score": 32568.85291265064 }, { "content": " fn bitxor(self, rhs: bool) -> Self::Output {\n\n match (self, rhs) {\n\n (Winding::Ccw, false) => Winding::Ccw,\n\n (Winding::Ccw, true) => Winding::Cw,\n\n (Winding::Cw, false) => Winding::Cw,\n\n (Winding::Cw, true) => Winding::Ccw,\n\n }\n\n }\n\n}\n\n\n\nimpl BitXor<bool> for &Winding {\n\n type Output = Winding;\n\n\n\n fn bitxor(self, rhs: bool) -> Self::Output {\n\n match (self, rhs) {\n\n (Winding::Ccw, false) => Winding::Ccw,\n\n (Winding::Ccw, true) => Winding::Cw,\n\n (Winding::Cw, false) => Winding::Cw,\n\n (Winding::Cw, true) => Winding::Ccw,\n\n }\n\n }\n\n}\n", "file_path": "ldraw/src/lib.rs", "rank": 22, "score": 32568.85291265064 }, { "content": " let d = Vector4::new(\n\n next_token_f32(iterator)?,\n\n next_token_f32(iterator)?,\n\n next_token_f32(iterator)?,\n\n 1.0,\n\n );\n\n Ok(Quad {\n\n color: ColorReference::resolve(color, materials),\n\n a,\n\n b,\n\n c,\n\n d,\n\n })\n\n}\n\n\n", "file_path": "ldraw/src/parser.rs", "rank": 23, "score": 32568.85291265064 }, { "content": " }\n\n\n\n pub fn get(&self, elem: &PartReference) -> Option<&ResolutionResult<T>> {\n\n self.map.get(&elem.name)\n\n }\n\n\n\n fn traverse_dependencies(&self, document: &Document, list: &mut HashSet<PartAlias>) {\n\n for part_ref in document.iter_refs() {\n\n match self.get(part_ref) {\n\n Some(&ResolutionResult::Subpart(doc)) => {\n\n self.traverse_dependencies(doc, list);\n\n }\n\n Some(ResolutionResult::Associated(part)) => {\n\n if !list.contains(&part_ref.name) {\n\n list.insert(part_ref.name.clone());\n\n }\n\n self.traverse_dependencies(part, list);\n\n }\n\n _ => {}\n\n }\n", "file_path": "ldraw/src/library.rs", "rank": 24, "score": 32568.85291265064 }, { "content": "impl<'a> Deserialize<'a> for PartAlias {\n\n fn deserialize<D: Deserializer<'a>>(deserializer: D) -> Result<Self, D::Error> {\n\n Ok(PartAlias::from(\n\n &deserializer.deserialize_str(StringVisitor)?,\n\n ))\n\n }\n\n}\n\n\n\nimpl cmp::Eq for PartAlias {}\n\n\n\nimpl cmp::PartialEq for PartAlias {\n\n fn eq(&self, other: &PartAlias) -> bool {\n\n self.normalized.eq(&other.normalized)\n\n }\n\n}\n\n\n\nimpl Hash for PartAlias {\n\n fn hash<H: Hasher>(&self, state: &mut H) {\n\n self.normalized.hash(state)\n\n }\n", "file_path": "ldraw/src/lib.rs", "rank": 25, "score": 32568.85291265064 }, { "content": "impl LDrawWriter for Line {\n\n fn write(&self, writer: &mut dyn Write) -> Result<(), SerializeError> {\n\n writer.write_all(\n\n format!(\n\n \"2 {} {} {}\\n\",\n\n self.color,\n\n serialize_vec3(&self.a),\n\n serialize_vec3(&self.b)\n\n )\n\n .as_bytes(),\n\n )?;\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl LDrawWriter for Triangle {\n\n fn write(&self, writer: &mut dyn Write) -> Result<(), SerializeError> {\n\n writer.write_all(\n\n format!(\n\n \"2 {} {} {} {}\\n\",\n", "file_path": "ldraw/src/writer.rs", "rank": 26, "score": 32568.85291265064 }, { "content": " ParseError::InvalidToken(e.to_string()),\n\n ));\n\n }\n\n };\n\n let (er, eg, eb) = next_token_rgb(&mut it)?;\n\n\n\n loop {\n\n let token = match next_token(&mut it, false) {\n\n Ok(v) => v,\n\n Err(ParseError::EndOfLine) => break,\n\n Err(e) => return Err(ColorDefinitionParseError::ParseError(e)),\n\n };\n\n\n\n match token.as_str() {\n\n \"ALPHA\" => {\n\n alpha = next_token_u32(&mut it)? as u8;\n\n }\n\n \"LUMINANCE\" => {\n\n luminance = next_token_u32(&mut it)? as u8;\n\n }\n", "file_path": "ldraw/src/parser.rs", "rank": 27, "score": 32568.85291265064 }, { "content": " }\n\n}\n\n\n\nimpl LDrawWriter for MultipartDocument {\n\n fn write(&self, writer: &mut dyn Write) -> Result<(), SerializeError> {\n\n self.body.write(writer)?;\n\n for subpart in self.subparts.values() {\n\n writer.write_all(format!(\"0 FILE {}\\n\", subpart.name).as_bytes())?;\n\n subpart.write(writer)?;\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl LDrawWriter for Meta {\n\n fn write(&self, writer: &mut dyn Write) -> Result<(), SerializeError> {\n\n match self {\n\n Meta::Comment(message) => {\n\n for line in message.lines() {\n", "file_path": "ldraw/src/writer.rs", "rank": 28, "score": 32568.85291265064 }, { "content": " match self {\n\n BfcCertification::Certify(_) => Some(true),\n\n BfcCertification::NoCertify => Some(false),\n\n BfcCertification::NotApplicable => None,\n\n }\n\n }\n\n\n\n pub fn get_winding(&self) -> Option<Winding> {\n\n match self {\n\n BfcCertification::Certify(w) => Some(*w),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct Document {\n\n pub name: String,\n\n pub description: String,\n\n pub author: String,\n\n pub bfc: BfcCertification,\n\n pub headers: Vec<Header>,\n\n pub commands: Vec<Command>,\n\n}\n\n\n", "file_path": "ldraw/src/document.rs", "rank": 29, "score": 32568.85291265064 }, { "content": " ColorReference::Unknown(c) => *c,\n\n ColorReference::Current => 16,\n\n ColorReference::Complement => 24,\n\n ColorReference::Material(m) => m.code,\n\n }\n\n }\n\n\n\n pub fn is_current(&self) -> bool {\n\n matches!(self, ColorReference::Current)\n\n }\n\n\n\n pub fn is_complement(&self) -> bool {\n\n matches!(self, ColorReference::Complement)\n\n }\n\n\n\n pub fn is_material(&self) -> bool {\n\n matches!(self, ColorReference::Material(_))\n\n }\n\n\n\n pub fn get_material(&self) -> Option<&Material> {\n", "file_path": "ldraw/src/color.rs", "rank": 30, "score": 32568.85291265064 }, { "content": " })\n\n }\n\n }\n\n )\n\n);\n\n\n\ndefine_iterator!(iter_meta, iter_meta_mut, Command::Meta, Meta);\n\ndefine_iterator!(\n\n iter_refs,\n\n iter_refs_mut,\n\n Command::PartReference,\n\n PartReference\n\n);\n\ndefine_iterator!(iter_lines, iter_lines_mut, Command::Line, Line);\n\ndefine_iterator!(\n\n iter_triangles,\n\n iter_triangles_mut,\n\n Command::Triangle,\n\n Triangle\n\n);\n", "file_path": "ldraw/src/document.rs", "rank": 31, "score": 32568.85291265064 }, { "content": " fn default() -> Self {\n\n Material {\n\n code: 0,\n\n name: String::from(\"Black\"),\n\n color: Rgba::new(0x05, 0x13, 0x1d, 0xff),\n\n edge: Rgba::new(0x59, 0x59, 0x59, 0xff),\n\n luminance: 0x00,\n\n finish: Finish::Plastic,\n\n }\n\n }\n\n}\n\n\n\nimpl Material {\n\n pub fn is_translucent(&self) -> bool {\n\n self.color.alpha() < 255u8\n\n }\n\n}\n\n\n\npub type MaterialRegistry = HashMap<u32, Material>;\n\n\n", "file_path": "ldraw/src/color.rs", "rank": 32, "score": 32568.85291265064 }, { "content": " }\n\n\n\n if (code & 0xff00_0000) == 0x0200_0000 {\n\n return ColorReference::Material(ColorReference::resolve_rgb_2(code));\n\n } else if (code & 0xff00_0000) == 0x0400_0000 {\n\n return ColorReference::Material(ColorReference::resolve_rgb_4(code));\n\n }\n\n\n\n ColorReference::Unknown(code)\n\n }\n\n\n\n pub fn get_color(&self) -> Option<Vector4> {\n\n match self {\n\n ColorReference::Material(m) => Some(m.color.into()),\n\n _ => None,\n\n }\n\n }\n\n\n\n pub fn get_edge_color(&self) -> Option<Vector4> {\n\n match self {\n\n ColorReference::Material(m) => Some(m.edge.into()),\n\n _ => None,\n\n }\n\n }\n\n}\n", "file_path": "ldraw/src/color.rs", "rank": 33, "score": 32568.85291265064 }, { "content": "where\n\n T: hash::Hash,\n\n{\n\n fn hash<H: hash::Hasher>(&self, state: &mut H) {\n\n self.locator.hash(state)\n\n }\n\n}\n\n\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct PartDirectory<T> {\n\n pub primitives: HashMap<PartAlias, PartEntry<T>>,\n\n pub parts: HashMap<PartAlias, PartEntry<T>>,\n\n}\n\n\n\nimpl<T> Default for PartDirectory<T> {\n\n fn default() -> PartDirectory<T> {\n\n PartDirectory {\n\n primitives: HashMap::new(),\n\n parts: HashMap::new(),\n\n }\n", "file_path": "ldraw/src/library.rs", "rank": 34, "score": 32568.85291265064 }, { "content": " fraction,\n\n vfraction,\n\n size,\n\n minsize,\n\n maxsize,\n\n }))\n\n }\n\n \"SPECKLE\" => {\n\n let mut alpha = 255u8;\n\n let mut luminance = 0u8;\n\n let mut fraction = 0.0;\n\n let mut size = 0u32;\n\n let mut minsize = 0.0;\n\n let mut maxsize = 0.0;\n\n match next_token(iterator, false)?.as_str() {\n\n \"VALUE\" => (),\n\n e => {\n\n return Err(ColorDefinitionParseError::ParseError(\n\n ParseError::InvalidToken(e.to_string()),\n\n ));\n", "file_path": "ldraw/src/parser.rs", "rank": 35, "score": 32568.85291265064 }, { "content": " write!(f, \"{} (at line {})\", self.error, self.line)\n\n }\n\n}\n\n\n\nimpl Error for DocumentParseError {\n\n fn source(&self) -> Option<&(dyn Error + 'static)> {\n\n Some(&self.error)\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum ColorDefinitionParseError {\n\n ParseError(ParseError),\n\n DocumentParseError(DocumentParseError),\n\n UnknownMaterial(String),\n\n}\n\n\n\nimpl From<ParseError> for ColorDefinitionParseError {\n\n fn from(e: ParseError) -> ColorDefinitionParseError {\n\n ColorDefinitionParseError::ParseError(e)\n", "file_path": "ldraw/src/error.rs", "rank": 36, "score": 32568.85291265064 }, { "content": " }\n\n}\n\n\n\nimpl LDrawWriter for BfcStatement {\n\n fn write(&self, writer: &mut dyn Write) -> Result<(), SerializeError> {\n\n match self {\n\n BfcStatement::Winding(Winding::Cw) => writer.write_all(b\"0 BFC CW\\n\")?,\n\n BfcStatement::Winding(Winding::Ccw) => writer.write_all(b\"0 BFC CCW\\n\")?,\n\n BfcStatement::Clip(None) => writer.write_all(b\"0 BFC CLIP\\n\")?,\n\n BfcStatement::Clip(Some(Winding::Cw)) => writer.write_all(b\"0 BFC CLIP CW\\n\")?,\n\n BfcStatement::Clip(Some(Winding::Ccw)) => writer.write_all(b\"0 BFC CLIP CW\\n\")?,\n\n BfcStatement::NoClip => writer.write_all(b\"0 BFC NOCLIP\\n\")?,\n\n BfcStatement::InvertNext => writer.write_all(b\"0 BFC INVERTNEXT\\n\")?,\n\n };\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl LDrawWriter for Document {\n\n fn write(&self, writer: &mut dyn Write) -> Result<(), SerializeError> {\n", "file_path": "ldraw/src/writer.rs", "rank": 37, "score": 32568.85291265064 }, { "content": "impl Document {\n\n pub fn has_geometry(&self) -> bool {\n\n for item in self.commands.iter() {\n\n match item {\n\n Command::Line(_)\n\n | Command::Triangle(_)\n\n | Command::Quad(_)\n\n | Command::OptionalLine(_) => {\n\n return true;\n\n }\n\n _ => (),\n\n }\n\n }\n\n\n\n false\n\n }\n\n\n\n pub fn list_dependencies(&self) -> HashSet<PartAlias> {\n\n let mut result = HashSet::new();\n\n\n", "file_path": "ldraw/src/document.rs", "rank": 38, "score": 32568.85291265064 }, { "content": " Plastic,\n\n Chrome,\n\n Pearlescent,\n\n Rubber,\n\n MatteMetallic,\n\n Metal,\n\n Custom(CustomizedMaterial),\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct Material {\n\n pub code: u32,\n\n pub name: String,\n\n pub color: Rgba,\n\n pub edge: Rgba,\n\n pub luminance: u8,\n\n pub finish: Finish,\n\n}\n\n\n\nimpl Default for Material {\n", "file_path": "ldraw/src/color.rs", "rank": 39, "score": 32568.85291265064 }, { "content": "#[derive(Clone, Debug)]\n\npub enum ColorReference {\n\n Unknown(u32),\n\n Current,\n\n Complement,\n\n Material(Material),\n\n}\n\n\n\nimpl Serialize for ColorReference {\n\n fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {\n\n serializer.serialize_u32(self.code())\n\n }\n\n}\n\n\n", "file_path": "ldraw/src/color.rs", "rank": 40, "score": 32568.85291265064 }, { "content": " match self {\n\n ColorReference::Material(m) => Some(m),\n\n _ => None,\n\n }\n\n }\n\n\n\n fn resolve_blended(code: u32, materials: &MaterialRegistry) -> Option<Material> {\n\n let code1 = code / 16;\n\n let code2 = code % 16;\n\n\n\n let color1 = match materials.get(&code1) {\n\n Some(c) => c,\n\n None => return None,\n\n };\n\n let color2 = match materials.get(&code2) {\n\n Some(c) => c,\n\n None => return None,\n\n };\n\n\n\n let new_color = Rgba::new(\n", "file_path": "ldraw/src/color.rs", "rank": 41, "score": 32568.85291265064 }, { "content": " }\n\n\n\n #[test]\n\n fn test_parse_color_definition() {\n\n let materials = set_up_materials().unwrap();\n\n\n\n println!(\"{:#?}\\n\", materials);\n\n }\n\n\n\n #[test]\n\n fn test_parse_single_document() {\n\n let materials = set_up_materials().unwrap();\n\n let mut reader_part = BufReader::new(File::open(PATH_PART).unwrap());\n\n match parse_single_document::<BufReader<File>>(&materials, &mut reader_part) {\n\n Ok(model) => {\n\n println!(\"{:#?}\\n\", model);\n\n }\n\n Err(e) => {\n\n assert!(false, \"{}\", e);\n\n }\n", "file_path": "ldraw/src/parser.rs", "rank": 42, "score": 32568.85291265064 }, { "content": " z,\n\n 0.0,\n\n 0.0,\n\n 0.0,\n\n 1.0,\n\n )\n\n .transpose();\n\n let name = next_token(iterator, true)?;\n\n Ok(PartReference {\n\n color: ColorReference::resolve(color, materials),\n\n matrix,\n\n name: PartAlias::from(name),\n\n })\n\n}\n\n\n", "file_path": "ldraw/src/parser.rs", "rank": 43, "score": 32568.85291265064 }, { "content": " )?;\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl LDrawWriter for Command {\n\n fn write(&self, writer: &mut dyn Write) -> Result<(), SerializeError> {\n\n match self {\n\n Command::Meta(meta) => meta.write(writer),\n\n Command::PartReference(ref_) => ref_.write(writer),\n\n Command::Line(line) => line.write(writer),\n\n Command::Triangle(triangle) => triangle.write(writer),\n\n Command::Quad(quad) => quad.write(writer),\n\n Command::OptionalLine(optional_line) => optional_line.write(writer),\n\n }\n\n }\n\n}\n", "file_path": "ldraw/src/writer.rs", "rank": 44, "score": 32568.85291265064 }, { "content": " self.resolve(&doc, parent);\n\n continue;\n\n }\n\n }\n\n\n\n let cached = self.cache.read().unwrap().query(name);\n\n if let Some(e) = cached {\n\n self.map\n\n .insert(name.clone(), ResolutionResult::Associated(Arc::clone(&e)));\n\n self.resolve(&e, None);\n\n continue;\n\n }\n\n\n\n if let Some(e) = self.directory.read().unwrap().query(name) {\n\n self.map\n\n .insert(name.clone(), ResolutionResult::Pending(e.clone()));\n\n } else {\n\n self.map.insert(name.clone(), ResolutionResult::Missing);\n\n }\n\n }\n", "file_path": "ldraw/src/library.rs", "rank": 45, "score": 32568.85291265064 }, { "content": " });\n\n }\n\n };\n\n let mut it = line.chars();\n\n match next_token(&mut it, false) {\n\n Ok(token) => match token.as_str() {\n\n \"0\" => match parse_line_0(&mut it) {\n\n Ok(val) => match val {\n\n Line0::BfcCertification(bfc_) => {\n\n bfc = bfc_;\n\n }\n\n Line0::File(file_) => {\n\n if multipart {\n\n if !description.is_empty() {\n\n next = Some(file_);\n\n break 'read_loop;\n\n }\n\n } else {\n\n return Err(DocumentParseError {\n\n line: index + 1,\n", "file_path": "ldraw/src/parser.rs", "rank": 46, "score": 32568.85291265064 }, { "content": " error: ParseError::MultipartDocument,\n\n });\n\n }\n\n }\n\n Line0::Name(name_) => {\n\n name = name_;\n\n }\n\n Line0::Author(author_) => {\n\n author = author_;\n\n }\n\n Line0::Meta(meta) => {\n\n if let Meta::Comment(comment) = meta {\n\n if description.is_empty() {\n\n description = comment;\n\n } else {\n\n commands.push(Command::Meta(Meta::Comment(comment)));\n\n }\n\n } else {\n\n commands.push(Command::Meta(meta));\n\n }\n", "file_path": "ldraw/src/parser.rs", "rank": 47, "score": 32568.85291265064 }, { "content": " };\n\n\n\n let mut reader_mpd = BufReader::new(File::open(PATH_MPD).unwrap());\n\n match parse_single_document::<BufReader<File>>(&materials, &mut reader_mpd) {\n\n Ok(_) => {\n\n assert!(false, \"Should not read properly\");\n\n }\n\n Err(e) => {\n\n assert!(if let ParseError::MultipartDocument = e.error {\n\n true\n\n } else {\n\n false\n\n });\n\n }\n\n };\n\n }\n\n\n\n #[test]\n\n fn test_parse_multipart_document() {\n\n let materials = set_up_materials().unwrap();\n", "file_path": "ldraw/src/parser.rs", "rank": 48, "score": 32568.85291265064 }, { "content": " },\n\n \"Author:\" => match next_token(&mut inner_iterator, true) {\n\n Ok(msg) => Ok(Line0::Author(msg)),\n\n Err(_) => Ok(Line0::Author(String::from(\"\"))),\n\n },\n\n \"FILE\" => match next_token(&mut inner_iterator, true) {\n\n Ok(msg) => Ok(Line0::File(msg)),\n\n Err(e) => Err(e),\n\n },\n\n \"STEP\" => Ok(Line0::Meta(Meta::Step)),\n\n \"WRITE\" => match next_token(&mut inner_iterator, true) {\n\n Ok(msg) => Ok(Line0::Meta(Meta::Write(msg))),\n\n Err(e) => Err(e),\n\n },\n\n \"PRINT\" => match next_token(&mut inner_iterator, true) {\n\n Ok(msg) => Ok(Line0::Meta(Meta::Print(msg))),\n\n Err(e) => Err(e),\n\n },\n\n \"CLEAR\" => Ok(Line0::Meta(Meta::Clear)),\n\n \"PAUSE\" => Ok(Line0::Meta(Meta::Pause)),\n\n \"SAVE\" => Ok(Line0::Meta(Meta::Save)),\n\n _ => Ok(Line0::Meta(Meta::Comment(text))),\n\n }\n\n}\n\n\n", "file_path": "ldraw/src/parser.rs", "rank": 49, "score": 32568.85291265064 }, { "content": " directory: Arc<RwLock<PartDirectory<T>>>,\n\n cache: Arc<RwLock<PartCache>>,\n\n pub map: HashMap<PartAlias, ResolutionResult<'a, T>>,\n\n}\n\n\n\nimpl<'a, 'b, T: Clone> ResolutionMap<'a, T> {\n\n pub fn new(\n\n directory: Arc<RwLock<PartDirectory<T>>>,\n\n cache: Arc<RwLock<PartCache>>,\n\n ) -> ResolutionMap<'a, T> {\n\n ResolutionMap {\n\n directory,\n\n cache,\n\n map: HashMap::new(),\n\n }\n\n }\n\n\n\n pub fn get_pending(&'b self) -> impl Iterator<Item = (&'b PartAlias, &'b PartEntry<T>)> {\n\n self.map.iter().filter_map(|(key, value)| match value {\n\n ResolutionResult::Pending(a) => Some((key, a)),\n", "file_path": "ldraw/src/library.rs", "rank": 50, "score": 32568.85291265064 }, { "content": " self.color,\n\n serialize_vec3(&self.a),\n\n serialize_vec3(&self.b),\n\n serialize_vec3(&self.c)\n\n )\n\n .as_bytes(),\n\n )?;\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl LDrawWriter for Quad {\n\n fn write(&self, writer: &mut dyn Write) -> Result<(), SerializeError> {\n\n writer.write_all(\n\n format!(\n\n \"2 {} {} {} {} {}\\n\",\n\n self.color,\n\n serialize_vec3(&self.a),\n\n serialize_vec3(&self.b),\n\n serialize_vec3(&self.c),\n", "file_path": "ldraw/src/writer.rs", "rank": 51, "score": 32568.85291265064 }, { "content": "\n\nimpl PartCache {\n\n pub fn register(&mut self, kind: PartKind, alias: PartAlias, document: Document) {\n\n match kind {\n\n PartKind::Part => self.parts.insert(alias, Arc::new(document)),\n\n PartKind::Primitive => self.primitives.insert(alias, Arc::new(document)),\n\n };\n\n }\n\n\n\n pub fn query(&self, alias: &PartAlias) -> Option<Arc<Document>> {\n\n match self.parts.get(alias) {\n\n Some(part) => Some(Arc::clone(part)),\n\n None => self.primitives.get(alias).map(Arc::clone),\n\n }\n\n }\n\n\n\n fn collect_round(&mut self, collection_strategy: CacheCollectionStrategy) -> usize {\n\n let prev_size = self.parts.len() + self.primitives.len();\n\n match collection_strategy {\n\n CacheCollectionStrategy::Parts => {\n", "file_path": "ldraw/src/library.rs", "rank": 52, "score": 32568.85291265064 }, { "content": " writer.write_all(b\"0 PAUSE\\n\")?;\n\n }\n\n Meta::Save => {\n\n writer.write_all(b\"0 SAVE\\n\")?;\n\n }\n\n Meta::Bfc(bfc) => {\n\n bfc.write(writer)?;\n\n }\n\n };\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl LDrawWriter for PartReference {\n\n fn write(&self, writer: &mut dyn Write) -> Result<(), SerializeError> {\n\n let m = self.matrix.transpose();\n\n writer.write_all(\n\n format!(\n\n \"1 {} {} {} {} {} {} {} {} {} {} {} {} {}\\n\",\n", "file_path": "ldraw/src/writer.rs", "rank": 53, "score": 32568.85291265064 }, { "content": " serialize_vec3(&self.d)\n\n )\n\n .as_bytes(),\n\n )?;\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl LDrawWriter for OptionalLine {\n\n fn write(&self, writer: &mut dyn Write) -> Result<(), SerializeError> {\n\n writer.write_all(\n\n format!(\n\n \"2 {} {} {} {} {}\\n\",\n\n self.color,\n\n serialize_vec3(&self.a),\n\n serialize_vec3(&self.b),\n\n serialize_vec3(&self.c),\n\n serialize_vec3(&self.d)\n\n )\n\n .as_bytes(),\n", "file_path": "ldraw/src/writer.rs", "rank": 54, "score": 32568.85291265064 }, { "content": " fn from(e: IoError) -> LibraryError {\n\n LibraryError::IoError(Box::new(e))\n\n }\n\n}\n\n\n\nimpl fmt::Display for LibraryError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n LibraryError::NoLDrawDir => write!(f, \"No LDraw library found.\"),\n\n LibraryError::IoError(err) => write!(f, \"{}\", err),\n\n }\n\n }\n\n}\n\n\n\nimpl Error for LibraryError {\n\n fn source(&self) -> Option<&(dyn Error + 'static)> {\n\n match self {\n\n LibraryError::IoError(e) => Some(e),\n\n _ => None,\n\n }\n\n }\n\n}\n", "file_path": "ldraw/src/error.rs", "rank": 55, "score": 32568.85291265064 }, { "content": " self.parts\n\n .retain(|_, v| Arc::strong_count(v) > 1 || Arc::weak_count(v) > 0);\n\n }\n\n CacheCollectionStrategy::Primitives => {\n\n self.primitives\n\n .retain(|_, v| Arc::strong_count(v) > 1 || Arc::weak_count(v) > 0);\n\n }\n\n CacheCollectionStrategy::PartsAndPrimitives => {\n\n self.parts\n\n .retain(|_, v| Arc::strong_count(v) > 1 || Arc::weak_count(v) > 0);\n\n self.primitives\n\n .retain(|_, v| Arc::strong_count(v) > 1 || Arc::weak_count(v) > 0);\n\n }\n\n };\n\n prev_size - self.parts.len() - self.primitives.len()\n\n }\n\n\n\n pub fn collect(&mut self, collection_strategy: CacheCollectionStrategy) -> usize {\n\n let mut total_collected = 0;\n\n loop {\n", "file_path": "ldraw/src/library.rs", "rank": 56, "score": 32568.85291265064 }, { "content": " ParseError::InvalidToken(e.to_string()),\n\n ));\n\n }\n\n };\n\n let code = next_token_u32(&mut it)?;\n\n\n\n match next_token(&mut it, false)?.as_str() {\n\n \"VALUE\" => (),\n\n e => {\n\n return Err(ColorDefinitionParseError::ParseError(\n\n ParseError::InvalidToken(e.to_string()),\n\n ));\n\n }\n\n };\n\n let (cr, cg, cb) = next_token_rgb(&mut it)?;\n\n\n\n match next_token(&mut it, false)?.as_str() {\n\n \"EDGE\" => (),\n\n e => {\n\n return Err(ColorDefinitionParseError::ParseError(\n", "file_path": "ldraw/src/parser.rs", "rank": 57, "score": 32568.85291265064 }, { "content": " let (vr, vg, vb) = next_token_rgb(iterator)?;\n\n loop {\n\n let token = match next_token(iterator, false) {\n\n Ok(v) => v,\n\n Err(ParseError::EndOfLine) => break,\n\n Err(e) => return Err(ColorDefinitionParseError::ParseError(e)),\n\n };\n\n\n\n match token.as_str() {\n\n \"ALPHA\" => {\n\n alpha = next_token_u32(iterator)? as u8;\n\n }\n\n \"LUMINANCE\" => {\n\n luminance = next_token_u32(iterator)? as u8;\n\n }\n\n \"FRACTION\" => {\n\n fraction = next_token_f32(iterator)?;\n\n }\n\n \"VFRACTION\" => {\n\n vfraction = next_token_f32(iterator)?;\n", "file_path": "ldraw/src/parser.rs", "rank": 58, "score": 32568.85291265064 }, { "content": " ParseError::InvalidToken(token.clone()),\n\n ));\n\n }\n\n }\n\n }\n\n\n\n materials.insert(\n\n code,\n\n Material {\n\n code,\n\n name,\n\n color: Rgba::new(cr, cg, cb, alpha),\n\n edge: Rgba::new(er, eg, eb, 255),\n\n luminance,\n\n finish,\n\n },\n\n );\n\n }\n\n\n\n Ok(materials)\n", "file_path": "ldraw/src/parser.rs", "rank": 59, "score": 32568.85291265064 }, { "content": " let edge_red = (((code & 0xf0_0000) >> 20) * 16) as u8;\n\n let edge_green = (((code & 0x0f_0000) >> 16) * 16) as u8;\n\n let edge_blue = (((code & 0x00_f000) >> 12) * 16) as u8;\n\n\n\n Material {\n\n code,\n\n name: format!(\"RGB Color ({:03x})\", code & 0xfff),\n\n color: Rgba::new(red, green, blue, 255),\n\n edge: Rgba::new(edge_red, edge_green, edge_blue, 255),\n\n luminance: 0,\n\n finish: Finish::Plastic,\n\n }\n\n }\n\n\n\n fn resolve_rgb_2(code: u32) -> Material {\n\n Material {\n\n code,\n\n name: format!(\"RGB Color ({:06x})\", code & 0xff_ffff),\n\n color: Rgba::from_value(0xff00_0000 | (code & 0xff_ffff)),\n\n edge: Rgba::from_value(0xff59_5959),\n", "file_path": "ldraw/src/color.rs", "rank": 60, "score": 32568.85291265064 }, { "content": " next_token_f32(iterator)?,\n\n 1.0,\n\n );\n\n Ok(Triangle {\n\n color: ColorReference::resolve(color, materials),\n\n a,\n\n b,\n\n c,\n\n })\n\n}\n\n\n", "file_path": "ldraw/src/parser.rs", "rank": 61, "score": 32568.85291265064 }, { "content": " Clear,\n\n Pause,\n\n Save,\n\n Bfc(BfcStatement),\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct PartReference {\n\n pub color: ColorReference,\n\n pub matrix: Matrix4,\n\n pub name: PartAlias,\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct Line {\n\n pub color: ColorReference,\n\n pub a: Vector4,\n\n pub b: Vector4,\n\n}\n\n\n", "file_path": "ldraw/src/elements.rs", "rank": 62, "score": 32568.85291265064 }, { "content": " \"CHROME\" => {\n\n finish = Finish::Chrome;\n\n }\n\n \"PEARLESCENT\" => {\n\n finish = Finish::Pearlescent;\n\n }\n\n \"METAL\" => {\n\n finish = Finish::Metal;\n\n }\n\n \"RUBBER\" => {\n\n finish = Finish::Rubber;\n\n }\n\n \"MATTE_METALLIC\" => {\n\n finish = Finish::MatteMetallic;\n\n }\n\n \"MATERIAL\" => {\n\n finish = Finish::Custom(parse_customized_material(&mut it)?);\n\n }\n\n _ => {\n\n return Err(ColorDefinitionParseError::ParseError(\n", "file_path": "ldraw/src/parser.rs", "rank": 63, "score": 32568.85291265064 }, { "content": " color1.color.red() / 2 + color2.color.red() / 2,\n\n color1.color.green() / 2 + color2.color.green() / 2,\n\n color1.color.blue() / 2 + color2.color.blue() / 2,\n\n 255,\n\n );\n\n Some(Material {\n\n code,\n\n name: format!(\"Blended Color ({} and {})\", code1, code2),\n\n color: new_color,\n\n edge: Rgba::from_value(0xff59_5959),\n\n luminance: 0,\n\n finish: Finish::Plastic,\n\n })\n\n }\n\n\n\n fn resolve_rgb_4(code: u32) -> Material {\n\n let red = (((code & 0xf00) >> 8) * 16) as u8;\n\n let green = (((code & 0x0f0) >> 4) * 16) as u8;\n\n let blue = ((code & 0x00f) * 16) as u8;\n\n\n", "file_path": "ldraw/src/color.rs", "rank": 64, "score": 32568.85291265064 }, { "content": "impl From<Rgba> for Vector4 {\n\n fn from(src: Rgba) -> Vector4 {\n\n Vector4::new(\n\n f32::from(src.red()) / 255.0,\n\n f32::from(src.green()) / 255.0,\n\n f32::from(src.blue()) / 255.0,\n\n f32::from(src.alpha()) / 255.0,\n\n )\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct MaterialGlitter {\n\n pub value: Rgba,\n\n pub luminance: u8,\n\n pub fraction: f32,\n\n pub vfraction: f32,\n\n pub size: u32,\n\n pub minsize: f32,\n\n pub maxsize: f32,\n", "file_path": "ldraw/src/color.rs", "rank": 65, "score": 32568.85291265064 }, { "content": " \"SIZE\" => {\n\n size = next_token_u32(iterator)?;\n\n }\n\n \"MINSIZE\" => {\n\n minsize = next_token_f32(iterator)?;\n\n }\n\n \"MAXSIZE\" => {\n\n maxsize = next_token_f32(iterator)?;\n\n }\n\n _ => {\n\n return Err(ColorDefinitionParseError::ParseError(\n\n ParseError::InvalidToken(token.clone()),\n\n ));\n\n }\n\n }\n\n }\n\n Ok(CustomizedMaterial::Speckle(MaterialSpeckle {\n\n value: Rgba::new(vr, vg, vb, alpha),\n\n luminance,\n\n fraction,\n\n size,\n\n minsize,\n\n maxsize,\n\n }))\n\n }\n\n e => Err(ColorDefinitionParseError::UnknownMaterial(e.to_string())),\n\n }\n\n}\n\n\n", "file_path": "ldraw/src/parser.rs", "rank": 66, "score": 32568.85291265064 }, { "content": " ParseError::IoError(e) => Some(e),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct DocumentParseError {\n\n pub line: usize,\n\n pub error: ParseError,\n\n}\n\n\n\nimpl From<DocumentParseError> for ColorDefinitionParseError {\n\n fn from(e: DocumentParseError) -> ColorDefinitionParseError {\n\n ColorDefinitionParseError::DocumentParseError(e)\n\n }\n\n}\n\n\n\nimpl fmt::Display for DocumentParseError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n", "file_path": "ldraw/src/error.rs", "rank": 67, "score": 32568.85291265064 }, { "content": "}\n\n\n\n#[derive(Debug, Default)]\n\npub struct PartCache {\n\n primitives: HashMap<PartAlias, Arc<Document>>,\n\n parts: HashMap<PartAlias, Arc<Document>>,\n\n}\n\n\n\n#[derive(Copy, Clone, Debug)]\n\npub enum CacheCollectionStrategy {\n\n Parts,\n\n Primitives,\n\n PartsAndPrimitives,\n\n}\n\n\n\nimpl Drop for PartCache {\n\n fn drop(&mut self) {\n\n self.collect(CacheCollectionStrategy::PartsAndPrimitives);\n\n }\n\n}\n", "file_path": "ldraw/src/library.rs", "rank": 68, "score": 32568.85291265064 }, { "content": "}\n\n\n\n#[derive(Copy, Clone, Debug, PartialEq)]\n\npub enum Winding {\n\n Ccw,\n\n Cw,\n\n}\n\n\n\nimpl Winding {\n\n pub fn invert(self) -> Self {\n\n match self {\n\n Winding::Ccw => Winding::Cw,\n\n Winding::Cw => Winding::Ccw,\n\n }\n\n }\n\n}\n\n\n\nimpl BitXor<bool> for Winding {\n\n type Output = Self;\n\n\n", "file_path": "ldraw/src/lib.rs", "rank": 69, "score": 32568.85291265064 }, { "content": " next_token_f32(iterator)?,\n\n 1.0,\n\n );\n\n let d = Vector4::new(\n\n next_token_f32(iterator)?,\n\n next_token_f32(iterator)?,\n\n next_token_f32(iterator)?,\n\n 1.0,\n\n );\n\n Ok(OptionalLine {\n\n color: ColorReference::resolve(color, materials),\n\n a,\n\n b,\n\n c,\n\n d,\n\n })\n\n}\n\n\n", "file_path": "ldraw/src/parser.rs", "rank": 70, "score": 32568.85291265064 }, { "content": " traverse_dependencies(self, None, &mut result);\n\n\n\n result\n\n }\n\n}\n\n\n\nmacro_rules! define_iterator(\n\n ($fn:ident, $fn_mut:ident, $cmdval:path, $type:ty) => (\n\n impl<'a> Document {\n\n pub fn $fn(&'a self) -> impl Iterator<Item = &'a $type> {\n\n self.commands.iter().filter_map(|value| match value {\n\n $cmdval(m) => Some(m),\n\n _ => None,\n\n })\n\n }\n\n\n\n pub fn $fn_mut(&'a mut self) -> impl Iterator<Item = &'a mut $type> + 'a {\n\n self.commands.iter_mut().filter_map(|value| match value {\n\n $cmdval(m) => Some(m),\n\n _ => None,\n", "file_path": "ldraw/src/document.rs", "rank": 71, "score": 32568.85291265064 }, { "content": "#[derive(Clone, Debug)]\n\npub struct Triangle {\n\n pub color: ColorReference,\n\n pub a: Vector4,\n\n pub b: Vector4,\n\n pub c: Vector4,\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct Quad {\n\n pub color: ColorReference,\n\n pub a: Vector4,\n\n pub b: Vector4,\n\n pub c: Vector4,\n\n pub d: Vector4,\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct OptionalLine {\n\n pub color: ColorReference,\n", "file_path": "ldraw/src/elements.rs", "rank": 72, "score": 32568.85291265064 }, { "content": " }\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum SerializeError {\n\n NoSerializable,\n\n IoError(Box<IoError>),\n\n}\n\n\n\nimpl From<IoError> for SerializeError {\n\n fn from(e: IoError) -> SerializeError {\n\n SerializeError::IoError(Box::new(e))\n\n }\n\n}\n\n\n\nimpl fmt::Display for SerializeError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n SerializeError::NoSerializable => write!(f, \"Statement is not serializable.\"),\n\n SerializeError::IoError(err) => write!(f, \"{}\", err),\n", "file_path": "ldraw/src/error.rs", "rank": 73, "score": 32568.85291265064 }, { "content": " }\n\n\n\n pub fn from_value(value: u32) -> Rgba {\n\n let r = ((value & 0x00ff_0000) >> 16) as u8;\n\n let g = ((value & 0x0000_ff00) >> 8) as u8;\n\n let b = (value & 0x0000_00ff) as u8;\n\n let a = ((value & 0xff00_0000) >> 24) as u8;\n\n Rgba {\n\n value: [r, g, b, a],\n\n }\n\n }\n\n\n\n pub fn red(self) -> u8 {\n\n self.value[0]\n\n }\n\n\n\n pub fn green(self) -> u8 {\n\n self.value[1]\n\n }\n\n\n", "file_path": "ldraw/src/color.rs", "rank": 74, "score": 32568.85291265064 }, { "content": " let g = match u8::from_str_radix(gs.as_str(), 16) {\n\n Ok(v) => v,\n\n Err(_) => return Err(ParseError::TypeMismatch(\"u8\", gs)),\n\n };\n\n let b = match u8::from_str_radix(bs.as_str(), 16) {\n\n Ok(v) => v,\n\n Err(_) => return Err(ParseError::TypeMismatch(\"u8\", bs)),\n\n };\n\n\n\n Ok((r, g, b))\n\n}\n\n\n", "file_path": "ldraw/src/parser.rs", "rank": 75, "score": 32568.85291265064 }, { "content": " pub fn blue(self) -> u8 {\n\n self.value[2]\n\n }\n\n\n\n pub fn alpha(self) -> u8 {\n\n self.value[3]\n\n }\n\n}\n\n\n\nimpl From<&Rgba> for Vector4 {\n\n fn from(src: &Rgba) -> Vector4 {\n\n Vector4::new(\n\n f32::from(src.red()) / 255.0,\n\n f32::from(src.green()) / 255.0,\n\n f32::from(src.blue()) / 255.0,\n\n f32::from(src.alpha()) / 255.0,\n\n )\n\n }\n\n}\n\n\n", "file_path": "ldraw/src/color.rs", "rank": 76, "score": 32568.85291265064 }, { "content": "\n\n PartAlias {\n\n normalized: Self::normalize(&string),\n\n original: string,\n\n }\n\n }\n\n}\n\n\n\nimpl Display for PartAlias {\n\n fn fmt(&self, f: &mut Formatter) -> FmtResult {\n\n Display::fmt(&self.original, f)\n\n }\n\n}\n\n\n", "file_path": "ldraw/src/lib.rs", "rank": 77, "score": 32568.85291265064 }, { "content": " error: ParseError::UnexpectedCommand(token),\n\n });\n\n }\n\n },\n\n Err(ParseError::EndOfLine) => {}\n\n Err(e) => {\n\n return Err(DocumentParseError {\n\n line: index + 1,\n\n error: e,\n\n });\n\n }\n\n }\n\n }\n\n\n\n Ok((\n\n Document {\n\n name,\n\n description,\n\n author,\n\n bfc,\n\n headers,\n\n commands,\n\n },\n\n next,\n\n ))\n\n}\n\n\n", "file_path": "ldraw/src/parser.rs", "rank": 78, "score": 32568.85291265064 }, { "content": " pub a: Vector4,\n\n pub b: Vector4,\n\n pub c: Vector4,\n\n pub d: Vector4,\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub enum Command {\n\n Meta(Meta),\n\n PartReference(PartReference),\n\n Line(Line),\n\n Triangle(Triangle),\n\n Quad(Quad),\n\n OptionalLine(OptionalLine),\n\n}\n", "file_path": "ldraw/src/elements.rs", "rank": 79, "score": 32568.85291265064 }, { "content": " }\n\n Line0::Header(header) => {\n\n headers.push(header);\n\n }\n\n },\n\n Err(e) => {\n\n return Err(DocumentParseError {\n\n line: index + 1,\n\n error: e,\n\n });\n\n }\n\n },\n\n \"1\" => match parse_line_1(materials, &mut it) {\n\n Ok(val) => commands.push(Command::PartReference(val)),\n\n Err(e) => {\n\n return Err(DocumentParseError {\n\n line: index + 1,\n\n error: e,\n\n });\n\n }\n", "file_path": "ldraw/src/parser.rs", "rank": 80, "score": 32568.85291265064 }, { "content": " },\n\n \"2\" => match parse_line_2(materials, &mut it) {\n\n Ok(val) => commands.push(Command::Line(val)),\n\n Err(e) => {\n\n return Err(DocumentParseError {\n\n line: index + 1,\n\n error: e,\n\n });\n\n }\n\n },\n\n \"3\" => match parse_line_3(materials, &mut it) {\n\n Ok(val) => commands.push(Command::Triangle(val)),\n\n Err(e) => {\n\n return Err(DocumentParseError {\n\n line: index + 1,\n\n error: e,\n\n });\n\n }\n\n },\n\n \"4\" => match parse_line_4(materials, &mut it) {\n", "file_path": "ldraw/src/parser.rs", "rank": 81, "score": 32568.85291265064 }, { "content": " self.color,\n\n m.x.w,\n\n m.y.w,\n\n m.z.w,\n\n m.x.x,\n\n m.x.y,\n\n m.x.z,\n\n m.y.x,\n\n m.y.y,\n\n m.y.z,\n\n m.z.x,\n\n m.z.y,\n\n m.z.z\n\n )\n\n .as_bytes(),\n\n )?;\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "ldraw/src/writer.rs", "rank": 82, "score": 32568.85291265064 }, { "content": " }\n\n };\n\n let (vr, vg, vb) = next_token_rgb(iterator)?;\n\n loop {\n\n let token = match next_token(iterator, false) {\n\n Ok(v) => v,\n\n Err(ParseError::EndOfLine) => break,\n\n Err(e) => return Err(ColorDefinitionParseError::ParseError(e)),\n\n };\n\n\n\n match token.as_str() {\n\n \"ALPHA\" => {\n\n alpha = next_token_u32(iterator)? as u8;\n\n }\n\n \"LUMINANCE\" => {\n\n luminance = next_token_u32(iterator)? as u8;\n\n }\n\n \"FRACTION\" => {\n\n fraction = next_token_f32(iterator)?;\n\n }\n", "file_path": "ldraw/src/parser.rs", "rank": 83, "score": 32568.85291265064 }, { "content": "define_iterator!(iter_quads, iter_quads_mut, Command::Quad, Quad);\n\ndefine_iterator!(\n\n iter_optional_lines,\n\n iter_optioanl_lines_mut,\n\n Command::OptionalLine,\n\n OptionalLine\n\n);\n\n\n\n#[derive(Clone, Debug)]\n\npub struct MultipartDocument {\n\n pub body: Document,\n\n pub subparts: HashMap<PartAlias, Document>,\n\n}\n\n\n\nimpl MultipartDocument {\n\n pub fn list_dependencies(&self) -> HashSet<PartAlias> {\n\n let mut result = HashSet::new();\n\n\n\n traverse_dependencies(&self.body, Some(self), &mut result);\n\n\n\n result\n\n }\n\n}\n", "file_path": "ldraw/src/document.rs", "rank": 84, "score": 32568.85291265064 }, { "content": "pub mod library_native;\n\npub mod parser;\n\npub mod writer;\n\n\n\npub type Matrix3 = Matrix3_<f32>;\n\npub type Matrix4 = Matrix4_<f32>;\n\npub type Vector2 = Vector2_<f32>;\n\npub type Vector3 = Vector3_<f32>;\n\npub type Vector4 = Vector4_<f32>;\n\npub type Point2 = Point2_<f32>;\n\npub type Point3 = Point3_<f32>;\n\n\n", "file_path": "ldraw/src/lib.rs", "rank": 85, "score": 32568.85291265064 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl Error for SerializeError {\n\n fn source(&self) -> Option<&(dyn Error + 'static)> {\n\n match self {\n\n SerializeError::IoError(e) => Some(e),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum LibraryError {\n\n NoLDrawDir,\n\n IoError(Box<IoError>),\n\n}\n\n\n\nimpl From<IoError> for LibraryError {\n", "file_path": "ldraw/src/error.rs", "rank": 86, "score": 32568.85291265064 }, { "content": " Ok(val) => commands.push(Command::Quad(val)),\n\n Err(e) => {\n\n return Err(DocumentParseError {\n\n line: index + 1,\n\n error: e,\n\n });\n\n }\n\n },\n\n \"5\" => match parse_line_5(materials, &mut it) {\n\n Ok(val) => commands.push(Command::OptionalLine(val)),\n\n Err(e) => {\n\n return Err(DocumentParseError {\n\n line: index + 1,\n\n error: e,\n\n });\n\n }\n\n },\n\n _ => {\n\n return Err(DocumentParseError {\n\n line: index + 1,\n", "file_path": "ldraw/src/parser.rs", "rank": 87, "score": 32568.85291265064 }, { "content": " let collected = self.collect_round(collection_strategy);\n\n if collected == 0 {\n\n break;\n\n }\n\n total_collected += collected;\n\n }\n\n total_collected\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub enum ResolutionResult<'a, T> {\n\n Missing,\n\n Pending(PartEntry<T>),\n\n Subpart(&'a Document),\n\n Associated(Arc<Document>),\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct ResolutionMap<'a, T> {\n", "file_path": "ldraw/src/library.rs", "rank": 88, "score": 32568.85291265064 }, { "content": "}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct MaterialSpeckle {\n\n pub value: Rgba,\n\n pub luminance: u8,\n\n pub fraction: f32,\n\n pub size: u32,\n\n pub minsize: f32,\n\n pub maxsize: f32,\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub enum CustomizedMaterial {\n\n Glitter(MaterialGlitter),\n\n Speckle(MaterialSpeckle),\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub enum Finish {\n", "file_path": "ldraw/src/color.rs", "rank": 89, "score": 32568.85291265064 }, { "content": "impl fmt::Display for ParseError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n ParseError::TypeMismatch(type_, val) => {\n\n write!(f, \"Error reading value '{}' into {}\", val, type_)\n\n }\n\n ParseError::IoError(err) => write!(f, \"{}\", err),\n\n ParseError::EndOfLine => write!(f, \"End of line\"),\n\n ParseError::InvalidBfcStatement(stmt) => write!(f, \"Invalid BFC statement: {}\", stmt),\n\n ParseError::InvalidDocumentStructure => write!(f, \"Invalid document structure.\"),\n\n ParseError::UnexpectedCommand(cmd) => write!(f, \"Unexpected command: {}\", cmd),\n\n ParseError::InvalidToken(token) => write!(f, \"Invalid token: {}\", token),\n\n ParseError::MultipartDocument => write!(f, \"Unexpected multipart document.\"),\n\n }\n\n }\n\n}\n\n\n\nimpl Error for ParseError {\n\n fn source(&self) -> Option<&(dyn Error + 'static)> {\n\n match self {\n", "file_path": "ldraw/src/error.rs", "rank": 90, "score": 32568.85291265064 }, { "content": " if loaded.is_empty() {\n\n None\n\n } else {\n\n Some(loaded)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n const LDRAW_DIR: &'static str = \"/home/segfault/.ldraw\";\n\n\n\n use super::scan_ldraw_directory;\n\n\n\n #[test]\n\n fn test_scan_ldraw_directory() {\n\n match scan_ldraw_directory(LDRAW_DIR) {\n\n Ok(v) => {\n\n println!(\"{:#?}\", v.primitives);\n\n }\n\n Err(e) => {\n\n assert!(false, \"{}\", e);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "ldraw/src/library_native.rs", "rank": 91, "score": 30993.03152858135 }, { "content": "use std::{\n\n collections::HashMap,\n\n ffi::{OsStr, OsString},\n\n fs::File,\n\n io::BufReader,\n\n path::{Path, PathBuf},\n\n sync::{Arc, RwLock},\n\n};\n\n\n\nuse encoding_rs::WINDOWS_1252;\n\nuse encoding_rs_io::DecodeReaderBytesBuilder;\n\n\n\nuse crate::{\n\n color::MaterialRegistry,\n\n error::LibraryError,\n\n library::{PartCache, PartDirectory, PartEntry, PartKind},\n\n parser::parse_single_document,\n\n PartAlias,\n\n};\n\n\n\npub type PartEntryNative = PartEntry<OsString>;\n\npub type PartDirectoryNative = PartDirectory<OsString>;\n\n\n\nimpl From<&OsString> for PartAlias {\n\n fn from(e: &OsString) -> PartAlias {\n\n PartAlias::from(&e.to_string_lossy().to_owned().to_string())\n\n }\n\n}\n\n\n", "file_path": "ldraw/src/library_native.rs", "rank": 92, "score": 30990.81305517507 }, { "content": " } else {\n\n let key = relpath.join(path.file_name().unwrap());\n\n let alias = PartAlias::from(&key.into_os_string());\n\n dir.insert(\n\n alias,\n\n PartEntryNative {\n\n kind,\n\n locator: path.into_os_string(),\n\n },\n\n );\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "ldraw/src/library_native.rs", "rank": 93, "score": 30985.4820770199 }, { "content": " &mut BufReader::new(\n\n DecodeReaderBytesBuilder::new()\n\n .encoding(Some(WINDOWS_1252))\n\n .build(file),\n\n ),\n\n ) {\n\n Ok(v) => v,\n\n Err(e) => {\n\n println!(\"Could not read part file {}: {:?}\", alias.original, e);\n\n continue;\n\n }\n\n };\n\n\n\n cache\n\n .write()\n\n .unwrap()\n\n .register(entry.kind, alias.clone(), result);\n\n loaded.push(alias.clone());\n\n }\n\n\n", "file_path": "ldraw/src/library_native.rs", "rank": 94, "score": 30985.4820770199 }, { "content": "#[derive(Debug)]\n\nenum Line0 {\n\n Header(Header),\n\n Meta(Meta),\n\n File(String),\n\n Name(String),\n\n Author(String),\n\n BfcCertification(BfcCertification),\n\n}\n\n\n", "file_path": "ldraw/src/parser.rs", "rank": 95, "score": 30985.4820770199 }, { "content": "fn traverse_dependencies(\n\n document: &Document,\n\n parent: Option<&MultipartDocument>,\n\n list: &mut HashSet<PartAlias>,\n\n) {\n\n for part_ref in document.iter_refs() {\n\n if let Some(parent) = parent {\n\n if parent.subparts.contains_key(&part_ref.name) {\n\n traverse_dependencies(\n\n parent.subparts.get(&part_ref.name).unwrap(),\n\n Some(parent),\n\n list,\n\n );\n\n continue;\n\n }\n\n }\n\n list.insert(part_ref.name.clone());\n\n }\n\n}\n\n\n", "file_path": "ldraw/src/document.rs", "rank": 96, "score": 29548.928223006777 }, { "content": "struct U32Visitor;\n\n\n\nimpl<'de> Visitor<'de> for U32Visitor {\n\n type Value = u32;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"an unsigned 32-bit integer\")\n\n }\n\n\n\n fn visit_u32<E: DeError>(self, value: u32) -> Result<Self::Value, E> {\n\n Ok(value)\n\n }\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for ColorReference {\n\n fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {\n\n // Needs to be resolved later\n\n Ok(ColorReference::Unknown(\n\n deserializer.deserialize_u32(U32Visitor)?,\n\n ))\n", "file_path": "ldraw/src/color.rs", "rank": 97, "score": 29548.928223006777 }, { "content": "struct StringVisitor;\n\n\n\nimpl<'a> Visitor<'a> for StringVisitor {\n\n type Value = String;\n\n\n\n fn expecting(&self, formatter: &mut Formatter) -> FmtResult {\n\n write!(formatter, \"a string\")\n\n }\n\n\n\n fn visit_str<E: DeserializeError>(self, v: &str) -> Result<Self::Value, E> {\n\n Ok(String::from(v))\n\n }\n\n}\n\n\n\nimpl Serialize for PartAlias {\n\n fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {\n\n serializer.serialize_str(self.original.as_str())\n\n }\n\n}\n\n\n", "file_path": "ldraw/src/lib.rs", "rank": 98, "score": 29548.928223006777 }, { "content": "fn parse_line_1(\n\n materials: &MaterialRegistry,\n\n iterator: &mut Chars,\n\n) -> Result<PartReference, ParseError> {\n\n let color = next_token_u32(iterator)?;\n\n let x = next_token_f32(iterator)?;\n\n let y = next_token_f32(iterator)?;\n\n let z = next_token_f32(iterator)?;\n\n let matrix = Matrix4::new(\n\n next_token_f32(iterator)?,\n\n next_token_f32(iterator)?,\n\n next_token_f32(iterator)?,\n\n x,\n\n next_token_f32(iterator)?,\n\n next_token_f32(iterator)?,\n\n next_token_f32(iterator)?,\n\n y,\n\n next_token_f32(iterator)?,\n\n next_token_f32(iterator)?,\n\n next_token_f32(iterator)?,\n", "file_path": "ldraw/src/parser.rs", "rank": 99, "score": 29548.928223006777 } ]
Rust
src/components/message.rs
jtakalai/sanuli
e21e1f887f88ab7c9716b8cba98b1fe59727cf24
use yew::prelude::*; use crate::manager::GameMode; use crate::Msg as GameMsg; const FORMS_LINK_TEMPLATE_ADD: &str = "https://docs.google.com/forms/d/e/1FAIpQLSfH8gs4sq-Ynn8iGOvlc99J_zOG2rJEC4m8V0kCgF_en3RHFQ/viewform?usp=pp_url&entry.461337706=Lis%C3%A4yst%C3%A4&entry.560255602="; const FORMS_LINK_TEMPLATE_DEL: &str = "https://docs.google.com/forms/d/e/1FAIpQLSfH8gs4sq-Ynn8iGOvlc99J_zOG2rJEC4m8V0kCgF_en3RHFQ/viewform?usp=pp_url&entry.461337706=Poistoa&entry.560255602="; const DICTIONARY_LINK_TEMPLATE: &str = "https://www.kielitoimistonsanakirja.fi/#/"; #[derive(Properties, Clone, PartialEq)] pub struct MessageProps { pub message: String, pub is_unknown: bool, pub is_winner: bool, pub is_guessing: bool, pub is_hidden: bool, pub is_emojis_copied: bool, pub is_link_copied: bool, pub word: String, pub last_guess: String, pub game_mode: GameMode, pub callback: Callback<GameMsg>, } #[function_component(Message)] pub fn message(props: &MessageProps) -> Html { html! { <div class="message"> { &props.message } <div class="message-small">{ if props.is_hidden { let callback = props.callback.clone(); let reveal_hidden_tiles = Callback::from(move |e: MouseEvent| { e.prevent_default(); callback.emit(GameMsg::RevealHiddenTiles); }); let callback = props.callback.clone(); let reset_game = Callback::from(move |e: MouseEvent| { e.prevent_default(); callback.emit(GameMsg::ResetGame); }); html! { <> <a class="link" href={"javascript:void(0)"} onclick={reset_game}> {"Kokeile ratkaista"} </a> {" | "} <a class="link" href={"javascript:void(0)"} onclick={reveal_hidden_tiles}> {"Paljasta"} </a> </> } } else if !props.is_guessing { html! { <SubMessage is_winner={props.is_winner} is_emojis_copied={props.is_emojis_copied} is_link_copied={props.is_link_copied} word={props.word.clone()} game_mode={props.game_mode} callback={props.callback.clone()} /> } } else if props.is_guessing && props.is_unknown { let last_guess = props.last_guess.to_lowercase(); html! { <a class="link" href={format!("{}{}", FORMS_LINK_TEMPLATE_ADD, last_guess)} target="_blank">{ "Ehdota lisäystä?" } </a> } } else { html! {} } } </div> </div> } } #[derive(Properties, Clone, PartialEq)] pub struct SubMessageProps { pub is_winner: bool, pub is_emojis_copied: bool, pub is_link_copied: bool, pub word: String, pub game_mode: GameMode, pub callback: Callback<GameMsg>, } #[function_component(SubMessage)] fn sub_message(props: &SubMessageProps) -> Html { let word = props.word.to_lowercase(); let callback = props.callback.clone(); let share_emojis = Callback::from(move |e: MouseEvent| { e.prevent_default(); callback.emit(GameMsg::ShareEmojis); }); let callback = props.callback.clone(); let share_link = Callback::from(move |e: MouseEvent| { e.prevent_default(); callback.emit(GameMsg::ShareLink); }); if props.game_mode == GameMode::Quadruple { return html!{} } html! { <> <a class="link" href={format!("{}{}?searchMode=all", DICTIONARY_LINK_TEMPLATE, word)} target="_blank">{ "Sanakirja" } </a> {" | "} <a class="link" href={"javascript:void(0)"} onclick={share_link}> { if !props.is_link_copied { {"Kopioi linkki"} } else { {"Kopioitu!"} } } </a> { if matches!(props.game_mode, GameMode::DailyWord(_)) { html! { <> {" | "} <a class="link" href={"javascript:void(0)"} onclick={share_emojis}> { if !props.is_emojis_copied { {"Kopioi tulos"} } else { {"Kopioitu!"} } } </a> </> } } else if !props.is_winner { html! { <> {" | "} <a class="link" href={format!("{}{}", FORMS_LINK_TEMPLATE_DEL, word)} target="_blank">{ "Ehdota poistoa?" } </a> </> } } else { html! {} } } </> } }
use yew::prelude::*; use crate::manager::GameMode; use crate::Msg as GameMsg; const FORMS_LINK_TEMPLATE_ADD: &str = "https://docs.google.com/forms/d/e/1FAIpQLSfH8gs4sq-Ynn8iGOvlc99J_zOG2rJEC4m8V0kCgF_en3RHFQ/viewform?usp=pp_url&entry.461337706=Lis%C3%A4yst%C3%A4&entry.560255602="; const FORMS_LINK_TEMPLATE_DEL: &str = "https://docs.google.com/forms/d/e/1FAIpQLSfH8gs4sq-Ynn8iGOvlc99J_zOG2rJEC4m8V0kCgF_en3RHFQ/viewform?usp=pp_url&entry.461337706=Poistoa&entry.560255602="; const DICTIONARY_LINK_TEMPLATE: &str = "https://www.kielitoimistonsanakirja.fi/#/"; #[derive(Properties, Clone, PartialEq)] pub struct MessageProps { pub message: String, pub is_unknown: bool, pub is_winner: bool, pub is_guessing: bool, pub is_hidden: bool, pub is_emojis_copied: bool, pub is_link_copied: bool, pub word: String, pub last_guess: String, pub game_mode: GameMode, pub callback: Callback<Ga
|e: MouseEvent| { e.prevent_default(); callback.emit(GameMsg::ShareEmojis); }); let callback = props.callback.clone(); let share_link = Callback::from(move |e: MouseEvent| { e.prevent_default(); callback.emit(GameMsg::ShareLink); }); if props.game_mode == GameMode::Quadruple { return html!{} } html! { <> <a class="link" href={format!("{}{}?searchMode=all", DICTIONARY_LINK_TEMPLATE, word)} target="_blank">{ "Sanakirja" } </a> {" | "} <a class="link" href={"javascript:void(0)"} onclick={share_link}> { if !props.is_link_copied { {"Kopioi linkki"} } else { {"Kopioitu!"} } } </a> { if matches!(props.game_mode, GameMode::DailyWord(_)) { html! { <> {" | "} <a class="link" href={"javascript:void(0)"} onclick={share_emojis}> { if !props.is_emojis_copied { {"Kopioi tulos"} } else { {"Kopioitu!"} } } </a> </> } } else if !props.is_winner { html! { <> {" | "} <a class="link" href={format!("{}{}", FORMS_LINK_TEMPLATE_DEL, word)} target="_blank">{ "Ehdota poistoa?" } </a> </> } } else { html! {} } } </> } }
meMsg>, } #[function_component(Message)] pub fn message(props: &MessageProps) -> Html { html! { <div class="message"> { &props.message } <div class="message-small">{ if props.is_hidden { let callback = props.callback.clone(); let reveal_hidden_tiles = Callback::from(move |e: MouseEvent| { e.prevent_default(); callback.emit(GameMsg::RevealHiddenTiles); }); let callback = props.callback.clone(); let reset_game = Callback::from(move |e: MouseEvent| { e.prevent_default(); callback.emit(GameMsg::ResetGame); }); html! { <> <a class="link" href={"javascript:void(0)"} onclick={reset_game}> {"Kokeile ratkaista"} </a> {" | "} <a class="link" href={"javascript:void(0)"} onclick={reveal_hidden_tiles}> {"Paljasta"} </a> </> } } else if !props.is_guessing { html! { <SubMessage is_winner={props.is_winner} is_emojis_copied={props.is_emojis_copied} is_link_copied={props.is_link_copied} word={props.word.clone()} game_mode={props.game_mode} callback={props.callback.clone()} /> } } else if props.is_guessing && props.is_unknown { let last_guess = props.last_guess.to_lowercase(); html! { <a class="link" href={format!("{}{}", FORMS_LINK_TEMPLATE_ADD, last_guess)} target="_blank">{ "Ehdota lisäystä?" } </a> } } else { html! {} } } </div> </div> } } #[derive(Properties, Clone, PartialEq)] pub struct SubMessageProps { pub is_winner: bool, pub is_emojis_copied: bool, pub is_link_copied: bool, pub word: String, pub game_mode: GameMode, pub callback: Callback<GameMsg>, } #[function_component(SubMessage)] fn sub_message(props: &SubMessageProps) -> Html { let word = props.word.to_lowercase(); let callback = props.callback.clone(); let share_emojis = Callback::from(move
random
[ { "content": "pub trait Game {\n\n fn title(&self) -> String;\n\n fn next_word(&mut self);\n\n fn keyboard_tilestate(&self, key: &char) -> KeyState;\n\n fn submit_guess(&mut self);\n\n fn push_character(&mut self, character: char);\n\n fn pop_character(&mut self);\n\n fn share_emojis(&self, theme: Theme) -> Option<String>;\n\n fn share_link(&self) -> Option<String>;\n\n fn reveal_hidden_tiles(&mut self);\n\n fn reset(&mut self);\n\n fn refresh(&mut self);\n\n fn persist(&self) -> Result<(), StorageError>;\n\n fn set_allow_profanities(&mut self, is_allowed: bool);\n\n\n\n fn game_mode(&self) -> &GameMode;\n\n fn word_list(&self) -> &WordList;\n\n fn word_length(&self) -> usize;\n\n fn max_guesses(&self) -> usize;\n\n fn word(&self) -> Vec<char>;\n", "file_path": "src/game.rs", "rank": 1, "score": 34857.40054811493 }, { "content": "pub fn known_count(\n\n character: &char,\n\n current_guess: usize,\n\n guess: &[(char, TileState)],\n\n counts: &[KnownCounts],\n\n word: &[char],\n\n) -> Option<CharacterCount> {\n\n let known_count = counts[current_guess]\n\n .get(character)\n\n .unwrap_or(&CharacterCount::AtLeast(0));\n\n\n\n // At most the same amount of characters are highlighted as there are in the word\n\n let count_in_word = word.iter().filter(|c| *c == character).count();\n\n if count_in_word == 0 {\n\n return Some(CharacterCount::Exactly(0));\n\n }\n\n\n\n let count_in_guess = guess.iter().filter(|(c, _)| c == character).count();\n\n\n\n // Exact count should never change\n", "file_path": "src/game.rs", "rank": 2, "score": 33457.94531746529 }, { "content": "pub fn board_tile_state(\n\n revealed_counts: &mut HashMap<char, usize>,\n\n current_guess: usize,\n\n states: &[KnownStates],\n\n counts: &[KnownCounts],\n\n index: usize,\n\n character: &char,\n\n) -> TileState {\n\n match states[current_guess].get(&(*character, index)) {\n\n Some(CharacterState::Correct) => {\n\n return TileState::Correct;\n\n }\n\n Some(CharacterState::Absent) => {\n\n let revealed = revealed_counts\n\n .entry(*character)\n\n .and_modify(|count| *count += 1)\n\n .or_insert(1);\n\n\n\n let discovered_count = counts[current_guess]\n\n .get(character)\n", "file_path": "src/game.rs", "rank": 3, "score": 32210.193114792382 }, { "content": "pub fn hint_tile_state(\n\n character: char,\n\n character_index: usize,\n\n guess_index: usize,\n\n states: &[KnownStates],\n\n counts: &[KnownCounts],\n\n) -> TileState {\n\n match states[guess_index].get(&(character, character_index)) {\n\n Some(CharacterState::Correct) => TileState::Correct,\n\n Some(CharacterState::Absent) => TileState::Absent,\n\n _ => {\n\n match counts[guess_index].get(&character) {\n\n Some(CharacterCount::Exactly(count)) => {\n\n // We may know the exact count, but not the exact index of any characters..\n\n if *count == 0 {\n\n return TileState::Absent;\n\n }\n\n\n\n let is_every_correct_found = states[guess_index]\n\n .iter()\n", "file_path": "src/game.rs", "rank": 4, "score": 32210.193114792382 }, { "content": "pub fn keyboard_tile_state(\n\n key: &char,\n\n current_guess: usize,\n\n states: &[KnownStates],\n\n counts: &[KnownCounts],\n\n) -> TileState {\n\n let is_correct = states[current_guess]\n\n .iter()\n\n .any(|((c, _index), state)| c == key && state == &CharacterState::Correct);\n\n if is_correct {\n\n return TileState::Correct;\n\n }\n\n\n\n match counts[current_guess].get(key) {\n\n Some(CharacterCount::AtLeast(count)) => {\n\n if *count == 0 {\n\n return TileState::Unknown;\n\n }\n\n TileState::Present\n\n }\n\n Some(CharacterCount::Exactly(count)) => {\n\n if *count == 0 {\n\n return TileState::Absent;\n\n }\n\n TileState::Present\n\n }\n\n None => TileState::Unknown,\n\n }\n\n}\n\n\n", "file_path": "src/game.rs", "rank": 5, "score": 32210.193114792382 }, { "content": "pub fn update_known_information(\n\n states: &mut [KnownStates],\n\n counts: &mut [KnownCounts],\n\n guess: &mut [(char, TileState)],\n\n guess_index: usize,\n\n word: &[char],\n\n max_guesses: usize,\n\n) {\n\n for (index, (character, _)) in guess.iter().enumerate() {\n\n let known = states[guess_index]\n\n .entry((*character, index))\n\n .or_insert(CharacterState::Unknown);\n\n\n\n if word[index] == *character {\n\n *known = CharacterState::Correct;\n\n } else {\n\n *known = CharacterState::Absent;\n\n\n\n if let Some(updated_count) = known_count(character, guess_index, guess, counts, word) {\n\n counts[guess_index].insert(*character, updated_count);\n", "file_path": "src/game.rs", "rank": 6, "score": 32210.193114792382 }, { "content": "pub fn update_guess_tile_states(\n\n guess: &mut [(char, TileState)],\n\n guess_index: usize,\n\n states: &[KnownStates],\n\n counts: &[KnownCounts],\n\n) {\n\n let mut revealed_counts = revealed_by_char(guess, guess_index, states);\n\n\n\n for (index, (character, tile_state)) in guess.iter_mut().enumerate() {\n\n *tile_state = board_tile_state(\n\n &mut revealed_counts,\n\n guess_index,\n\n states,\n\n counts,\n\n index,\n\n character,\n\n );\n\n }\n\n}\n\n\n", "file_path": "src/game.rs", "rank": 8, "score": 31090.744932959962 }, { "content": "fn parse_all_words() -> Rc<WordLists> {\n\n let mut word_lists: HashMap<(WordList, usize), HashSet<Vec<char>>> = HashMap::with_capacity(3);\n\n for word in FULL_WORDS.lines() {\n\n let chars = word.chars();\n\n let word_length = chars.clone().count();\n\n word_lists\n\n .entry((WordList::Full, word_length))\n\n .or_insert_with(HashSet::new)\n\n .insert(chars.collect());\n\n }\n\n\n\n // TODO: Only 5-letter easy words exist for now on this list; fake them from common list\n\n for word in EASY_WORDS.lines() {\n\n let chars = word.chars();\n\n let word_length = chars.clone().count();\n\n word_lists\n\n .entry((WordList::Easy, word_length))\n\n .or_insert_with(HashSet::new)\n\n .insert(chars.collect());\n\n }\n", "file_path": "src/manager.rs", "rank": 9, "score": 29767.31348836654 }, { "content": "#[function_component(Header)]\n\npub fn header(props: &Props) -> Html {\n\n let on_toggle_help_cb = props.on_toggle_help_cb.clone();\n\n let onclick_help = {\n\n Callback::from(move |e: MouseEvent| {\n\n e.prevent_default();\n\n on_toggle_help_cb.emit(e);\n\n })\n\n };\n\n\n\n let on_toggle_menu_cb = props.on_toggle_menu_cb.clone();\n\n let onclick_menu = {\n\n Callback::from(move |e: MouseEvent| {\n\n e.prevent_default();\n\n on_toggle_menu_cb.emit(e);\n\n })\n\n };\n\n\n\n html! {\n\n <header>\n\n <nav onclick={onclick_help} class=\"title-icon\">{\"?\"}</nav>\n\n <h1 class=\"title\">{&props.title}</h1>\n\n <nav onclick={onclick_menu} class=\"title-icon\">{\"≡\"}</nav>\n\n </header>\n\n }\n\n}\n", "file_path": "src/components/header.rs", "rank": 10, "score": 28026.84948893711 }, { "content": "#[function_component(Keyboard)]\n\npub fn keyboard(props: &Props) -> Html {\n\n let callback = props.callback.clone();\n\n let onbackspace = Callback::from(move |e: MouseEvent| {\n\n e.prevent_default();\n\n callback.emit(Msg::Backspace);\n\n });\n\n\n\n html! {\n\n <div class=\"keyboard\">\n\n {\n\n if props.message.is_empty() && !props.is_hidden {\n\n html! {}\n\n } else {\n\n html! {\n\n <Message\n\n message={props.message.clone()}\n\n is_unknown={props.is_unknown}\n\n is_winner={props.is_winner}\n\n is_guessing={props.is_guessing}\n\n is_hidden={props.is_hidden}\n", "file_path": "src/components/keyboard.rs", "rank": 11, "score": 28026.84948893711 }, { "content": "#[function_component(Board)]\n\npub fn board(props: &Props) -> Html {\n\n html! {\n\n <>\n\n {\n\n if !props.previous_guesses.is_empty() && props.is_reset {\n\n html! {\n\n <PreviousBoard\n\n guesses={props.previous_guesses.clone()}\n\n max_guesses={props.max_guesses}\n\n word_length={props.word_length}\n\n />\n\n }\n\n } else {\n\n html! {}\n\n }\n\n }\n\n <div class={classes!(\n\n props.is_reset.then(|| \"slide-in\"),\n\n props.is_reset.then(|| format!(\"slide-in-{}\", props.previous_guesses.len())),\n\n format!(\"board-{}\", props.max_guesses))}>{\n", "file_path": "src/components/board.rs", "rank": 12, "score": 28026.84948893711 }, { "content": "#[function_component(PreviousBoard)]\n\npub fn previous_board(props: &PreviousBoardProps) -> Html {\n\n html! {\n\n <div class={classes!(\"slide-out\", format!(\"slide-out-{}\", props.guesses.len()), format!(\"board-{}\", props.max_guesses))}>\n\n { props.guesses.iter().map(|guess| {\n\n html! {\n\n <div class={format!(\"row-{}\", props.word_length)}>\n\n {\n\n (0..props.word_length).map(|tile_index| {\n\n let (character, tile_state) = guess\n\n .get(tile_index)\n\n .unwrap_or(&(' ', TileState::Unknown));\n\n\n\n html! {\n\n <div class={classes!(\"tile\", tile_state.to_string())}>\n\n { character }\n\n </div>\n\n }\n\n }).collect::<Html>()\n\n }\n\n </div>\n\n }\n\n }).collect::<Html>() }\n\n </div>\n\n }\n\n}\n", "file_path": "src/components/board.rs", "rank": 18, "score": 25513.779130731735 }, { "content": "#[function_component(KeyboardButton)]\n\npub fn keyboard_button(props: &KeyboardButtonProps) -> Html {\n\n if !props.is_hidden {\n\n match props.key_state {\n\n KeyState::Single(state) => {\n\n html! {\n\n <button data-nosnippet=\"\" class={classes!(\"keyboard-button\", state.to_string())} onmousedown={props.onkeypress.clone()}>\n\n { props.character }\n\n </button>\n\n }\n\n }\n\n KeyState::Quadruple(states) => {\n\n let background = format!(\n\n \"background: conic-gradient(var(--{top_right}) 0deg, var(--{top_right}) 90deg, var(--{bottom_right}) 90deg, var(--{bottom_right}) 180deg, var(--{bottom_left}) 180deg, var(--{bottom_left}) 270deg, var(--{top_left}) 270deg, var(--{top_left}) 360deg);\",\n\n top_left=states[0],\n\n top_right=states[1],\n\n bottom_left=states[2],\n\n bottom_right=states[3],\n\n );\n\n\n\n html! {\n", "file_path": "src/components/keyboard.rs", "rank": 19, "score": 25513.779130731735 }, { "content": "#[function_component(MenuModal)]\n\npub fn menu_modal(props: &MenuModalProps) -> Html {\n\n let callback = props.callback.clone();\n\n let today = Local::now().naive_local().date();\n\n let toggle_menu = onmousedown!(callback, Msg::ToggleMenu);\n\n\n\n let change_word_length_5 = onmousedown!(callback, Msg::ChangeWordLength(5));\n\n let change_word_length_6 = onmousedown!(callback, Msg::ChangeWordLength(6));\n\n\n\n let change_game_mode_classic = onmousedown!(callback, Msg::ChangeGameMode(GameMode::Classic));\n\n let change_game_mode_relay = onmousedown!(callback, Msg::ChangeGameMode(GameMode::Relay));\n\n let change_game_mode_daily =\n\n onmousedown!(callback, Msg::ChangeGameMode(GameMode::DailyWord(today)));\n\n let change_game_mode_quadruple =\n\n onmousedown!(callback, Msg::ChangeGameMode(GameMode::Quadruple));\n\n\n\n let change_word_list_easy = onmousedown!(callback, Msg::ChangeWordList(WordList::Easy));\n\n let change_word_list_common = onmousedown!(callback, Msg::ChangeWordList(WordList::Common));\n\n let change_word_list_full = onmousedown!(callback, Msg::ChangeWordList(WordList::Full));\n\n\n\n let change_allow_profanities_yes = onmousedown!(callback, Msg::ChangeAllowProfanities(true));\n", "file_path": "src/components/modal.rs", "rank": 20, "score": 25513.779130731735 }, { "content": "#[function_component(HelpModal)]\n\npub fn help_modal(props: &HelpModalProps) -> Html {\n\n let callback = props.callback.clone();\n\n let toggle_help = onmousedown!(callback, Msg::ToggleHelp);\n\n\n\n html! {\n\n <div class=\"modal\">\n\n <span onmousedown={toggle_help} class=\"modal-close\">{\"✖\"}</span>\n\n <p>{\"Arvaa kätketty \"}<i>{\"sanuli\"}</i>{\" kuudella yrityksellä.\"}</p>\n\n <p>{\"Jokaisen yrityksen jälkeen arvatut kirjaimet vaihtavat väriään.\"}</p>\n\n\n\n <div class=\"row-5 example\">\n\n <div class={classes!(\"tile\", \"correct\")}>{\"K\"}</div>\n\n <div class={classes!(\"tile\", \"absent\")}>{\"O\"}</div>\n\n <div class={classes!(\"tile\", \"present\")}>{\"I\"}</div>\n\n <div class={classes!(\"tile\", \"absent\")}>{\"R\"}</div>\n\n <div class={classes!(\"tile\", \"absent\")}>{\"A\"}</div>\n\n </div>\n\n\n\n <p>\n\n {\n", "file_path": "src/components/modal.rs", "rank": 21, "score": 25513.779130731735 }, { "content": "\n\n pub is_emojis_copied: bool,\n\n pub is_link_copied: bool,\n\n\n\n pub game_mode: GameMode,\n\n\n\n pub message: String,\n\n pub word: String,\n\n pub last_guess: String,\n\n\n\n pub keyboard: HashMap<char, KeyState>,\n\n}\n\n\n\n#[function_component(Keyboard)]\n", "file_path": "src/components/keyboard.rs", "rank": 22, "score": 14.660260546931188 }, { "content": "use std::collections::HashMap;\n\nuse yew::prelude::*;\n\n\n\nuse crate::manager::{GameMode, KeyState, TileState};\n\nuse crate::Msg;\n\n\n\nuse crate::components::message::Message;\n\n\n\nconst KEYBOARD_0: [char; 10] = ['Q', 'W', 'E', 'R', 'T', 'Y', 'U', 'I', 'O', 'P'];\n\nconst KEYBOARD_1: [char; 11] = ['A', 'S', 'D', 'F', 'G', 'H', 'J', 'K', 'L', 'Ö', 'Ä'];\n\nconst KEYBOARD_2: [char; 7] = ['Z', 'X', 'C', 'V', 'B', 'N', 'M'];\n\n\n\n#[derive(Properties, PartialEq)]\n\npub struct Props {\n\n pub callback: Callback<Msg>,\n\n\n\n pub is_unknown: bool,\n\n pub is_winner: bool,\n\n pub is_guessing: bool,\n\n pub is_hidden: bool,\n", "file_path": "src/components/keyboard.rs", "rank": 23, "score": 14.481907896758486 }, { "content": "use yew::prelude::*;\n\n\n\n#[derive(Properties, Clone, PartialEq)]\n\npub struct Props {\n\n pub on_toggle_menu_cb: Callback<MouseEvent>,\n\n pub on_toggle_help_cb: Callback<MouseEvent>,\n\n pub title: String,\n\n}\n\n\n\n#[function_component(Header)]\n", "file_path": "src/components/header.rs", "rank": 24, "score": 13.973907662039148 }, { "content": "use chrono::Local;\n\nuse yew::prelude::*;\n\n\n\nuse crate::manager::{GameMode, Theme, WordList};\n\nuse crate::Msg;\n\n\n\nconst FORMS_LINK_TEMPLATE_ADD: &str = \"https://docs.google.com/forms/d/e/1FAIpQLSfH8gs4sq-Ynn8iGOvlc99J_zOG2rJEC4m8V0kCgF_en3RHFQ/viewform?usp=pp_url&entry.461337706=Lis%C3%A4yst%C3%A4&entry.560255602=\";\n\nconst CHANGELOG_URL: &str = \"https://github.com/Cadiac/sanuli/blob/master/CHANGELOG.md\";\n\nconst VERSION: &str = \"v1.14\";\n\n\n\nmacro_rules! onmousedown {\n\n ( $cb:ident, $msg:expr ) => {{\n\n let $cb = $cb.clone();\n\n Callback::from(move |e: MouseEvent| {\n\n e.prevent_default();\n\n $cb.emit($msg);\n\n })\n\n }};\n\n}\n\n\n\n#[derive(Properties, Clone, PartialEq)]\n\npub struct HelpModalProps {\n\n pub theme: Theme,\n\n pub callback: Callback<Msg>,\n\n}\n\n\n\n#[function_component(HelpModal)]\n", "file_path": "src/components/modal.rs", "rank": 25, "score": 12.880182904898327 }, { "content": "};\n\n\n\nconst DAILY_WORDS: &str = include_str!(\"../daily-words.txt\");\n\n\n\n#[derive(Clone, PartialEq, Serialize, Deserialize)]\n\npub struct Sanuli {\n\n game_mode: GameMode,\n\n word_list: WordList,\n\n word_length: usize,\n\n max_guesses: usize,\n\n\n\n word: Vec<char>,\n\n guesses: Vec<Vec<(char, TileState)>>,\n\n current_guess: usize,\n\n streak: usize,\n\n\n\n is_guessing: bool,\n\n is_winner: bool,\n\n is_unknown: bool,\n\n is_reset: bool,\n", "file_path": "src/sanuli.rs", "rank": 26, "score": 12.83251805605044 }, { "content": "const PROFANITIES: &str = include_str!(\"../profanities.txt\");\n\n\n\npub const DEFAULT_WORD_LENGTH: usize = 5;\n\npub const DEFAULT_MAX_GUESSES: usize = 6;\n\npub const DEFAULT_ALLOW_PROFANITIES: bool = false;\n\npub const DAILY_WORD_LEN: usize = 5;\n\n\n\npub type WordLists = HashMap<(WordList, usize), HashSet<Vec<char>>>;\n\n\n\n#[derive(PartialEq, Copy, Clone)]\n\npub enum KeyState {\n\n Quadruple([TileState; 4]),\n\n Single(TileState),\n\n}\n\n\n", "file_path": "src/manager.rs", "rank": 27, "score": 12.797237646815226 }, { "content": "use rand::seq::SliceRandom;\n\nuse std::collections::HashMap;\n\nuse std::rc::Rc;\n\n\n\nuse gloo_storage::{errors::StorageError, LocalStorage, Storage};\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse crate::game::{Board, Game, DEFAULT_ALLOW_PROFANITIES, DEFAULT_WORD_LENGTH, SUCCESS_EMOJIS};\n\nuse crate::manager::{GameMode, KeyState, Theme, TileState, WordList, WordLists};\n\nuse crate::sanuli::Sanuli;\n\n\n\nconst MAX_GUESSES: usize = 9;\n\n\n\n#[derive(Clone, PartialEq, Serialize, Deserialize)]\n\npub struct Neluli {\n\n word_list: WordList,\n\n word_length: usize,\n\n boards: Vec<Sanuli>,\n\n streak: usize,\n\n message: String,\n", "file_path": "src/neluli.rs", "rank": 28, "score": 12.199551126357083 }, { "content": "\n\n fn is_guessing(&self) -> bool {\n\n self.is_guessing\n\n }\n\n fn is_winner(&self) -> bool {\n\n self.is_winner\n\n }\n\n fn is_reset(&self) -> bool {\n\n self.is_reset\n\n }\n\n fn is_hidden(&self) -> bool {\n\n self.is_hidden\n\n }\n\n fn is_unknown(&self) -> bool {\n\n self.is_unknown\n\n }\n\n fn message(&self) -> String {\n\n self.message.clone()\n\n }\n\n fn previous_guesses(&self) -> Vec<Vec<(char, TileState)>> {\n", "file_path": "src/sanuli.rs", "rank": 29, "score": 12.119015447026012 }, { "content": " Self {\n\n word_list,\n\n word_length,\n\n\n\n boards,\n\n streak: 0,\n\n\n\n message: String::new(),\n\n\n\n allow_profanities: DEFAULT_ALLOW_PROFANITIES,\n\n word_lists,\n\n }\n\n }\n\n\n\n pub fn new_or_rehydrate(\n\n word_list: WordList,\n\n word_length: usize,\n\n allow_profanities: bool,\n\n word_lists: Rc<WordLists>,\n\n ) -> Self {\n", "file_path": "src/neluli.rs", "rank": 30, "score": 12.021082073411874 }, { "content": " false\n\n }\n\n fn is_hidden(&self) -> bool {\n\n false\n\n }\n\n fn is_unknown(&self) -> bool {\n\n false\n\n }\n\n fn message(&self) -> String {\n\n self.message.clone()\n\n }\n\n fn previous_guesses(&self) -> Vec<Vec<(char, TileState)>> {\n\n Vec::new()\n\n }\n\n\n\n fn set_allow_profanities(&mut self, is_allowed: bool) {\n\n self.allow_profanities = is_allowed;\n\n }\n\n\n\n fn title(&self) -> String {\n", "file_path": "src/neluli.rs", "rank": 31, "score": 12.003697672690006 }, { "content": "use std::collections::HashMap;\n\n\n\nuse gloo_storage::errors::StorageError;\n\n\n\npub type KnownStates = HashMap<(char, usize), CharacterState>;\n\npub type KnownCounts = HashMap<char, CharacterCount>;\n\n\n\nuse crate::manager::{\n\n CharacterCount, CharacterState, GameMode, KeyState, Theme, TileState, WordList,\n\n};\n\n\n\npub const SUCCESS_EMOJIS: [&str; 9] = [\"🥳\", \"🤩\", \"🤗\", \"🎉\", \"😊\", \"😺\", \"😎\", \"👏\", \":3\"];\n\npub const DEFAULT_WORD_LENGTH: usize = 5;\n\npub const DEFAULT_MAX_GUESSES: usize = 6;\n\npub const DEFAULT_ALLOW_PROFANITIES: bool = false;\n\n\n", "file_path": "src/game.rs", "rank": 32, "score": 11.926581522897784 }, { "content": "use yew::prelude::*;\n\n\n\nuse crate::manager::TileState;\n\n\n\n#[derive(Properties, PartialEq)]\n\npub struct Props {\n\n pub is_guessing: bool,\n\n pub is_reset: bool,\n\n pub is_hidden: bool,\n\n\n\n pub guesses: Vec<Vec<(char, TileState)>>,\n\n pub previous_guesses: Vec<Vec<(char, TileState)>>,\n\n pub current_guess: usize,\n\n pub max_guesses: usize,\n\n pub word_length: usize,\n\n}\n\n\n\n#[function_component(Board)]\n", "file_path": "src/components/board.rs", "rank": 33, "score": 11.27787953843083 }, { "content": "\n\n fn last_guess(&self) -> String;\n\n fn boards(&self) -> Vec<Board>;\n\n fn streak(&self) -> usize;\n\n\n\n fn is_guessing(&self) -> bool;\n\n fn is_reset(&self) -> bool;\n\n fn is_hidden(&self) -> bool;\n\n fn is_winner(&self) -> bool;\n\n fn is_unknown(&self) -> bool;\n\n\n\n fn message(&self) -> String;\n\n fn previous_guesses(&self) -> Vec<Vec<(char, TileState)>>;\n\n}\n\n\n\nimpl PartialEq for dyn Game {\n\n fn eq(&self, other: &Self) -> bool {\n\n self.title() == other.title()\n\n && self.game_mode() == other.game_mode()\n\n && self.word_list() == other.word_list()\n", "file_path": "src/game.rs", "rank": 34, "score": 10.879415192037005 }, { "content": " is_unknown={game.is_unknown()}\n\n is_winner={game.is_winner()}\n\n is_guessing={game.is_guessing()}\n\n is_hidden={game.is_hidden()}\n\n is_emojis_copied={self.is_emojis_copied}\n\n is_link_copied={self.is_link_copied}\n\n game_mode={game.game_mode().clone()}\n\n message={game.message()}\n\n word={game.word().iter().collect::<String>()}\n\n last_guess={last_guess}\n\n keyboard={keyboard_state}\n\n />\n\n\n\n {\n\n if self.is_help_visible {\n\n html! { <HelpModal theme={self.manager.theme} callback={link.callback(move |msg| msg)} /> }\n\n } else {\n\n html! {}\n\n }\n\n }\n", "file_path": "src/main.rs", "rank": 35, "score": 10.751151899446416 }, { "content": "use std::collections::HashMap;\n\nuse std::collections::HashSet;\n\nuse std::fmt;\n\nuse std::mem;\n\nuse std::rc::Rc;\n\nuse std::str::FromStr;\n\n\n\nuse chrono::{Local, NaiveDate};\n\nuse gloo_storage::{errors::StorageError, LocalStorage, Storage};\n\nuse serde::{Deserialize, Serialize};\n\nuse wasm_bindgen::JsValue;\n\nuse web_sys::{window, Window};\n\n\n\nuse crate::game::Game;\n\nuse crate::neluli::Neluli;\n\nuse crate::sanuli::Sanuli;\n\n\n\nconst EASY_WORDS: &str = include_str!(\"../easy-words.txt\");\n\nconst COMMON_WORDS: &str = include_str!(\"../common-words.txt\");\n\nconst FULL_WORDS: &str = include_str!(\"../full-words.txt\");\n", "file_path": "src/manager.rs", "rank": 36, "score": 10.673743793227324 }, { "content": " && self.word_length() == other.word_length()\n\n && self.max_guesses() == other.max_guesses()\n\n && self.boards() == other.boards()\n\n && self.streak() == other.streak()\n\n && self.is_reset() == other.is_reset()\n\n && self.is_hidden() == other.is_hidden()\n\n && self.message() == other.message()\n\n && self.previous_guesses() == other.previous_guesses()\n\n }\n\n}\n\n\n\n#[derive(PartialEq)]\n\npub struct Board {\n\n pub guesses: Vec<Vec<(char, TileState)>>,\n\n pub current_guess: usize,\n\n pub is_guessing: bool,\n\n}\n\n\n\n// Common game logic\n\n\n", "file_path": "src/game.rs", "rank": 37, "score": 10.560641671823934 }, { "content": " fn is_game_ended(&self) -> bool {\n\n self.boards.iter().all(|board| !board.is_guessing())\n\n }\n\n\n\n fn clear_message(&mut self) {\n\n self.message = String::new();\n\n }\n\n\n\n fn set_game_end_message(&mut self) {\n\n if self.is_winner() {\n\n self.message = format!(\n\n \"Löysit sanulit! {}\",\n\n SUCCESS_EMOJIS.choose(&mut rand::thread_rng()).unwrap()\n\n );\n\n } else {\n\n let words: Vec<_> = self\n\n .boards\n\n .iter()\n\n .filter(|game| !game.is_winner())\n\n .map(|game| game.word().iter().collect::<String>())\n", "file_path": "src/neluli.rs", "rank": 38, "score": 10.397276961322198 }, { "content": " match self.word_lists.get(&(WordList::Full, self.word_length)) {\n\n Some(list) => list.contains(word),\n\n None => false,\n\n }\n\n }\n\n\n\n fn is_correct_word(&self) -> bool {\n\n self.guesses[self.current_guess]\n\n .iter()\n\n .map(|(c, _)| *c)\n\n .collect::<Vec<char>>()\n\n == self.word\n\n }\n\n\n\n pub fn is_game_ended(&self) -> bool {\n\n self.is_winner || self.current_guess == self.max_guesses - 1\n\n }\n\n\n\n fn clear_message(&mut self) {\n\n self.is_unknown = false;\n", "file_path": "src/sanuli.rs", "rank": 39, "score": 10.118866912872505 }, { "content": "#[derive(Clone, PartialEq, Serialize, Deserialize)]\n\npub enum CharacterCount {\n\n AtLeast(usize),\n\n Exactly(usize),\n\n}\n\n\n\n#[derive(PartialEq, Serialize, Deserialize)]\n\npub struct Manager {\n\n pub current_game_mode: GameMode,\n\n pub current_word_list: WordList,\n\n pub current_word_length: usize,\n\n pub allow_profanities: bool,\n\n\n\n pub previous_game: (GameMode, WordList, usize),\n\n\n\n pub theme: Theme,\n\n\n\n pub max_streak: usize,\n\n pub total_played: usize,\n\n pub total_solved: usize,\n", "file_path": "src/manager.rs", "rank": 40, "score": 9.913340290454524 }, { "content": " TileState::Present => match theme {\n\n Theme::Colorblind => \"🟦\",\n\n _ => \"🟨\",\n\n },\n\n TileState::Absent => \"⬛\",\n\n TileState::Unknown => \"⬜\",\n\n })\n\n .collect::<String>();\n\n\n\n message += &guess_string;\n\n message += \"\\n\";\n\n }\n\n }\n\n\n\n Some(message)\n\n }\n\n\n\n fn share_link(&self) -> Option<String> {\n\n let game_str = format!(\n\n \"{}|{}\",\n", "file_path": "src/sanuli.rs", "rank": 41, "score": 9.712875235427868 }, { "content": "\n\n return Some(game);\n\n }\n\n\n\n pub fn new_or_rehydrate(\n\n game_mode: GameMode,\n\n word_list: WordList,\n\n word_length: usize,\n\n allow_profanities: bool,\n\n word_lists: Rc<WordLists>,\n\n ) -> Self {\n\n if let Ok(game) = Self::rehydrate(\n\n game_mode,\n\n word_list,\n\n word_length,\n\n allow_profanities,\n\n word_lists.clone(),\n\n ) {\n\n game\n\n } else {\n", "file_path": "src/sanuli.rs", "rank": 42, "score": 9.504204735918847 }, { "content": " is_emojis_copied={props.is_emojis_copied}\n\n is_link_copied={props.is_link_copied}\n\n last_guess={props.last_guess.clone()}\n\n word={props.word.clone()}\n\n game_mode={props.game_mode}\n\n callback={props.callback.clone()}\n\n />\n\n }\n\n }\n\n }\n\n\n\n <div class=\"keyboard-row\">\n\n {\n\n KEYBOARD_0.iter().map(|key| {\n\n let callback = props.callback.clone();\n\n let onkeypress = Callback::from(move |e: MouseEvent| {\n\n e.prevent_default();\n\n callback.emit(Msg::KeyPress(*key));\n\n });\n\n\n", "file_path": "src/components/keyboard.rs", "rank": 43, "score": 9.33409547327147 }, { "content": " pub word_length: usize,\n\n pub game_mode: GameMode,\n\n pub current_word_list: WordList,\n\n pub allow_profanities: bool,\n\n pub theme: Theme,\n\n\n\n pub max_streak: usize,\n\n pub total_played: usize,\n\n pub total_solved: usize,\n\n}\n\n\n", "file_path": "src/components/modal.rs", "rank": 44, "score": 9.135558155296229 }, { "content": " .unwrap()\n\n .chars()\n\n .collect()\n\n }\n\n\n\n pub fn is_guess_correct_length(&self) -> bool {\n\n self.guesses[self.current_guess].len() == self.word_length\n\n }\n\n\n\n pub fn is_guess_accepted_word(&self) -> bool {\n\n // Always allow correct words, even if they aren't on the list\n\n if self.is_correct_word() {\n\n return true;\n\n }\n\n\n\n let word: &Vec<char> = &self.guesses[self.current_guess]\n\n .iter()\n\n .map(|(c, _)| *c)\n\n .collect();\n\n\n", "file_path": "src/sanuli.rs", "rank": 45, "score": 8.931554443100348 }, { "content": " #[serde(skip)]\n\n is_hidden: bool,\n\n\n\n message: String,\n\n\n\n #[serde(skip)]\n\n previous_guesses: Vec<Vec<(char, TileState)>>,\n\n\n\n #[serde(skip)]\n\n allow_profanities: bool,\n\n #[serde(skip)]\n\n word_lists: Rc<WordLists>,\n\n #[serde(skip)]\n\n known_states: Vec<KnownStates>,\n\n #[serde(skip)]\n\n known_counts: Vec<KnownCounts>,\n\n}\n\n\n\nimpl Default for Sanuli {\n\n fn default() -> Self {\n", "file_path": "src/sanuli.rs", "rank": 46, "score": 8.911950931997458 }, { "content": " guesses: self.guesses.clone(),\n\n current_guess: self.current_guess,\n\n is_guessing: self.is_guessing,\n\n };\n\n\n\n vec![board]\n\n }\n\n fn word(&self) -> Vec<char> {\n\n self.word.clone()\n\n }\n\n\n\n fn last_guess(&self) -> String {\n\n match self.guesses.get(self.current_guess) {\n\n Some(guess) => guess.iter().map(|(c, _)| c).collect::<String>(),\n\n None => String::new(),\n\n }\n\n }\n\n fn streak(&self) -> usize {\n\n self.streak\n\n }\n", "file_path": "src/sanuli.rs", "rank": 47, "score": 8.788826960663062 }, { "content": " }\n\n\n\n // Skip the leading \"?\"\n\n for param in qs.chars().skip(1).collect::<String>().split(\"&\") {\n\n let mut parts = param.split(\"=\");\n\n\n\n let key = parts.next()?;\n\n let value = parts.next()?;\n\n\n\n if key == \"peli\" && !value.is_empty() {\n\n // Replace URL safe characters back to +/=\n\n let base64 = value.replace(\"-\", \"+\").replace(\".\", \"/\").replace(\"_\", \"=\");\n\n\n\n let game_str = window.atob(&base64).ok()?;\n\n\n\n let game = Sanuli::from_shared_link(&game_str, self.word_lists.clone());\n\n\n\n // Remove the query string\n\n window\n\n .history()\n", "file_path": "src/manager.rs", "rank": 48, "score": 8.63705390807382 }, { "content": " self.message = String::new();\n\n }\n\n\n\n fn set_game_end_message(&mut self) {\n\n if self.is_winner {\n\n if let GameMode::DailyWord(_) = self.game_mode {\n\n self.message = format!(\n\n \"Löysit päivän sanulin! {}\",\n\n SUCCESS_EMOJIS.choose(&mut rand::thread_rng()).unwrap()\n\n );\n\n } else {\n\n self.message = format!(\n\n \"Löysit sanan! {}\",\n\n SUCCESS_EMOJIS.choose(&mut rand::thread_rng()).unwrap()\n\n );\n\n }\n\n } else {\n\n self.message = format!(\"Sana oli \\\"{}\\\"\", self.word.iter().collect::<String>());\n\n }\n\n }\n", "file_path": "src/sanuli.rs", "rank": 49, "score": 8.618234911322487 }, { "content": " </button>\n\n }\n\n } else if matches!(props.game_mode, GameMode::DailyWord(_) | GameMode::Shared) {\n\n let callback = props.callback.clone();\n\n let onmousedown = Callback::from(move |e: MouseEvent| {\n\n e.prevent_default();\n\n callback.emit(Msg::ChangePreviousGameMode);\n\n });\n\n\n\n html! {\n\n <button data-nosnippet=\"\" class={classes!(\"keyboard-button\", \"keyboard-button-submit\", \"correct\")}\n\n onmousedown={onmousedown}>\n\n { \"TAKAISIN\" }\n\n </button>\n\n }\n\n } else {\n\n let callback = props.callback.clone();\n\n let onmousedown = Callback::from(move |e: MouseEvent| {\n\n e.prevent_default();\n\n callback.emit(Msg::NextWord);\n", "file_path": "src/components/keyboard.rs", "rank": 50, "score": 8.467855990428514 }, { "content": " known_counts,\n\n guesses,\n\n previous_guesses: Vec::new(),\n\n current_guess: 0,\n\n streak: 0,\n\n }\n\n }\n\n\n\n pub fn from_shared_link(game_str: &str, word_lists: Rc<WordLists>) -> Option<Self> {\n\n let max_guesses = DEFAULT_MAX_GUESSES;\n\n\n\n let mut parts = game_str.split(\"|\");\n\n let word = parts.next()?.chars().collect::<Vec<_>>();\n\n let word_length = word.len();\n\n\n\n let guesses_str = parts.next()?;\n\n\n\n let mut guesses = guesses_str\n\n .chars()\n\n .map(|c| (c, TileState::Unknown))\n", "file_path": "src/sanuli.rs", "rank": 51, "score": 8.316383883675785 }, { "content": " self.previous_guesses.clone()\n\n }\n\n\n\n fn set_allow_profanities(&mut self, is_allowed: bool) {\n\n self.allow_profanities = is_allowed;\n\n }\n\n\n\n fn title(&self) -> String {\n\n if let GameMode::DailyWord(date) = self.game_mode {\n\n format!(\"Päivän sanuli #{}\", Self::get_daily_word_index(date) + 1)\n\n } else if self.game_mode == GameMode::Shared {\n\n \"Jaettu sanuli\".to_owned()\n\n } else if self.streak > 0 {\n\n format!(\"Sanuli — Putki: {}\", self.streak)\n\n } else {\n\n \"Sanuli\".to_owned()\n\n }\n\n }\n\n\n\n fn next_word(&mut self) {\n", "file_path": "src/sanuli.rs", "rank": 52, "score": 8.292303763629263 }, { "content": " self.boards.iter().flat_map(|game| game.boards()).collect()\n\n }\n\n fn word(&self) -> Vec<char> {\n\n Vec::new()\n\n }\n\n\n\n fn streak(&self) -> usize {\n\n self.streak\n\n }\n\n fn last_guess(&self) -> String {\n\n String::new()\n\n }\n\n\n\n fn is_guessing(&self) -> bool {\n\n self.boards.iter().any(|board| board.is_guessing())\n\n }\n\n fn is_winner(&self) -> bool {\n\n self.boards.iter().all(|board| board.is_winner())\n\n }\n\n fn is_reset(&self) -> bool {\n", "file_path": "src/neluli.rs", "rank": 53, "score": 8.096950670594172 }, { "content": "use rand::seq::SliceRandom;\n\nuse std::collections::HashMap;\n\nuse std::mem;\n\nuse std::rc::Rc;\n\n\n\nuse chrono::NaiveDate;\n\nuse gloo_storage::{errors::StorageError, LocalStorage, Storage};\n\nuse serde::{Deserialize, Serialize};\n\nuse web_sys::{window, Window};\n\n\n\npub type KnownStates = HashMap<(char, usize), CharacterState>;\n\npub type KnownCounts = HashMap<char, CharacterCount>;\n\n\n\nuse crate::game;\n\nuse crate::game::{\n\n Board, Game, DEFAULT_ALLOW_PROFANITIES, DEFAULT_MAX_GUESSES, DEFAULT_WORD_LENGTH,\n\n SUCCESS_EMOJIS,\n\n};\n\nuse crate::manager::{\n\n CharacterCount, CharacterState, GameMode, KeyState, Theme, TileState, WordList, WordLists,\n", "file_path": "src/sanuli.rs", "rank": 54, "score": 8.079914663738748 }, { "content": " let chars = word.chars();\n\n let word_length = chars.clone().count();\n\n word_lists\n\n .entry((WordList::Profanities, word_length))\n\n .or_insert_with(HashSet::new)\n\n .insert(chars.collect());\n\n }\n\n\n\n Rc::new(word_lists)\n\n}\n\n\n\n#[derive(PartialEq, Eq, Hash, Clone, Copy, Serialize, Deserialize)]\n\npub enum WordList {\n\n Full,\n\n Common,\n\n Easy,\n\n Profanities,\n\n Daily,\n\n}\n\n\n", "file_path": "src/manager.rs", "rank": 55, "score": 7.964488005374175 }, { "content": " ChangeWordLength(usize),\n\n ChangeWordList(WordList),\n\n ChangeAllowProfanities(bool),\n\n ChangeTheme(Theme),\n\n ShareEmojis,\n\n ShareLink,\n\n RevealHiddenTiles,\n\n ResetGame,\n\n}\n\n\n\npub struct App {\n\n manager: Manager,\n\n is_help_visible: bool,\n\n is_menu_visible: bool,\n\n is_emojis_copied: bool,\n\n is_link_copied: bool,\n\n keyboard_listener: Option<Closure<dyn Fn(KeyboardEvent)>>,\n\n}\n\n\n\nimpl Component for App {\n", "file_path": "src/main.rs", "rank": 56, "score": 7.950830627949225 }, { "content": " self.word.iter().collect::<String>(),\n\n self.guesses\n\n .iter()\n\n .flat_map(|guess| guess.iter().map(|(c, _)| c))\n\n .collect::<String>(),\n\n );\n\n let window: Window = window().expect(\"window not available\");\n\n let share_str = window.btoa(&game_str).ok()?;\n\n\n\n let base_url = window.location().origin().ok()?;\n\n\n\n // Replace +/= at the base64 with URL safe characters\n\n let safe_str = share_str\n\n .replace(\"+\", \"-\")\n\n .replace(\"/\", \".\")\n\n .replace(\"=\", \"_\");\n\n\n\n return Some(format!(\"{}/?peli={}\", base_url, safe_str));\n\n }\n\n\n", "file_path": "src/sanuli.rs", "rank": 57, "score": 7.9348878408896315 }, { "content": " guesses={board.guesses.clone()}\n\n is_guessing={board.is_guessing}\n\n current_guess={board.current_guess}\n\n is_reset={game.is_reset()}\n\n is_hidden={game.is_hidden()}\n\n previous_guesses={game.previous_guesses().clone()}\n\n max_guesses={game.max_guesses()}\n\n word_length={game.word_length()}\n\n />\n\n }\n\n }).collect::<Html>()}\n\n </div>\n\n </div>\n\n },\n\n _ => html! {}\n\n }\n\n }\n\n\n\n <Keyboard\n\n callback={link.callback(move |msg| msg)}\n", "file_path": "src/main.rs", "rank": 58, "score": 7.810843411596387 }, { "content": " allow_profanities,\n\n &word_lists,\n\n )\n\n };\n\n\n\n Self {\n\n game_mode,\n\n word_list,\n\n word_lists,\n\n word_length,\n\n max_guesses,\n\n word,\n\n allow_profanities,\n\n is_guessing: true,\n\n is_winner: false,\n\n is_unknown: false,\n\n is_reset: false,\n\n is_hidden: false,\n\n message: String::new(),\n\n known_states,\n", "file_path": "src/sanuli.rs", "rank": 59, "score": 7.658721608234688 }, { "content": " });\n\n\n\n html! {\n\n <button data-nosnippet=\"\" class={classes!(\"keyboard-button\", \"keyboard-button-submit\", \"correct\")}\n\n onmousedown={onmousedown}>\n\n { \"UUSI?\" }\n\n </button>\n\n }\n\n }\n\n }\n\n <div class=\"spacer\" />\n\n <div class=\"spacer\" />\n\n </div>\n\n </div>\n\n }\n\n}\n\n\n\n#[derive(Properties, PartialEq)]\n\npub struct KeyboardButtonProps {\n\n pub onkeypress: Callback<MouseEvent>,\n\n pub character: char,\n\n pub is_hidden: bool,\n\n pub key_state: KeyState,\n\n}\n\n\n", "file_path": "src/components/keyboard.rs", "rank": 60, "score": 7.646482314998046 }, { "content": " &self.known_states,\n\n &self.known_counts,\n\n );\n\n self.guesses[self.current_guess].push((character, tile_state));\n\n }\n\n\n\n fn pop_character(&mut self) {\n\n if !self.is_guessing || self.guesses[self.current_guess].is_empty() {\n\n return;\n\n }\n\n\n\n self.clear_message();\n\n self.guesses[self.current_guess].pop();\n\n }\n\n\n\n fn share_emojis(&self, theme: Theme) -> Option<String> {\n\n let mut message = String::new();\n\n\n\n if let GameMode::DailyWord(date) = self.game_mode {\n\n let index = Self::get_daily_word_index(date) + 1;\n", "file_path": "src/sanuli.rs", "rank": 61, "score": 7.620183052877232 }, { "content": " serde_json::to_string(&GameMode::Quadruple).unwrap(),\n\n serde_json::to_string(&word_list).unwrap(),\n\n word_length\n\n );\n\n\n\n let mut game: Self = LocalStorage::get(game_key)?;\n\n\n\n for board in game.boards.iter_mut() {\n\n board.set_word_lists(word_lists.clone());\n\n board.set_allow_profanities(allow_profanities);\n\n }\n\n\n\n game.allow_profanities = allow_profanities;\n\n game.word_lists = word_lists;\n\n\n\n game.refresh();\n\n\n\n Ok(game)\n\n }\n\n\n", "file_path": "src/neluli.rs", "rank": 62, "score": 7.396336530115268 }, { "content": "\n\n fn rehydrate(\n\n game_mode: GameMode,\n\n word_list: WordList,\n\n word_length: usize,\n\n allow_profanities: bool,\n\n word_lists: Rc<WordLists>,\n\n ) -> Result<Self, StorageError> {\n\n let game_key = &format!(\n\n \"game|{}|{}|{}\",\n\n serde_json::to_string(&game_mode).unwrap(),\n\n serde_json::to_string(&word_list).unwrap(),\n\n word_length\n\n );\n\n\n\n let mut game: Self = LocalStorage::get(game_key)?;\n\n game.allow_profanities = allow_profanities;\n\n game.word_lists = word_lists;\n\n\n\n game.refresh();\n", "file_path": "src/sanuli.rs", "rank": 63, "score": 7.372689136439627 }, { "content": " Self::new(\n\n game_mode,\n\n word_list,\n\n word_length,\n\n DEFAULT_MAX_GUESSES,\n\n allow_profanities,\n\n word_lists,\n\n )\n\n }\n\n }\n\n\n\n pub fn set_word_lists(&mut self, word_lists: Rc<WordLists>) {\n\n self.word_lists = word_lists;\n\n }\n\n\n\n fn get_word(\n\n game_mode: GameMode,\n\n word_list: WordList,\n\n word_length: usize,\n\n allow_profanities: bool,\n", "file_path": "src/sanuli.rs", "rank": 64, "score": 7.370223914623653 }, { "content": "\n\n #[serde(skip)]\n\n allow_profanities: bool,\n\n #[serde(skip)]\n\n word_lists: Rc<WordLists>,\n\n}\n\n\n\nimpl Default for Neluli {\n\n fn default() -> Self {\n\n Neluli::new(\n\n WordList::default(),\n\n DEFAULT_WORD_LENGTH,\n\n DEFAULT_ALLOW_PROFANITIES,\n\n Rc::new(HashMap::new()),\n\n )\n\n }\n\n}\n\n\n\nimpl Neluli {\n\n pub fn new(\n", "file_path": "src/neluli.rs", "rank": 65, "score": 7.224965976135963 }, { "content": " word_list: WordList,\n\n word_length: usize,\n\n allow_profanities: bool,\n\n word_lists: Rc<WordLists>,\n\n ) -> Self {\n\n let boards = vec![\n\n Sanuli::new(\n\n GameMode::Quadruple,\n\n word_list,\n\n word_length,\n\n MAX_GUESSES,\n\n allow_profanities,\n\n word_lists.clone(),\n\n ),\n\n Sanuli::new(\n\n GameMode::Quadruple,\n\n word_list,\n\n word_length,\n\n MAX_GUESSES,\n\n allow_profanities,\n", "file_path": "src/neluli.rs", "rank": 66, "score": 7.208182644726804 }, { "content": " if let Ok(game) = Self::rehydrate(\n\n word_list,\n\n word_length,\n\n allow_profanities,\n\n word_lists.clone(),\n\n ) {\n\n game\n\n } else {\n\n Self::new(word_list, word_length, allow_profanities, word_lists)\n\n }\n\n }\n\n\n\n fn rehydrate(\n\n word_list: WordList,\n\n word_length: usize,\n\n allow_profanities: bool,\n\n word_lists: Rc<WordLists>,\n\n ) -> Result<Self, StorageError> {\n\n let game_key = &format!(\n\n \"game|{}|{}|{}\",\n", "file_path": "src/neluli.rs", "rank": 67, "score": 7.202900627331216 }, { "content": " word_lists,\n\n word_length,\n\n max_guesses,\n\n word,\n\n allow_profanities: true,\n\n is_guessing: false,\n\n is_winner: false,\n\n is_unknown: false,\n\n is_reset: false,\n\n is_hidden: true,\n\n message: String::new(),\n\n known_states,\n\n known_counts,\n\n guesses,\n\n previous_guesses: Vec::new(),\n\n current_guess,\n\n streak: 0,\n\n };\n\n\n\n game.refresh();\n", "file_path": "src/sanuli.rs", "rank": 68, "score": 7.045207339618643 }, { "content": "pub mod header;\n\npub mod keyboard;\n\npub mod message;\n\npub mod board;\n\npub mod modal;", "file_path": "src/components/mod.rs", "rank": 69, "score": 6.99786698949698 }, { "content": " Sanuli::new(\n\n GameMode::default(),\n\n WordList::default(),\n\n DEFAULT_WORD_LENGTH,\n\n DEFAULT_MAX_GUESSES,\n\n DEFAULT_ALLOW_PROFANITIES,\n\n Rc::new(HashMap::new()),\n\n )\n\n }\n\n}\n\n\n\nimpl Sanuli {\n\n pub fn new(\n\n game_mode: GameMode,\n\n word_list: WordList,\n\n word_length: usize,\n\n max_guesses: usize,\n\n allow_profanities: bool,\n\n word_lists: Rc<WordLists>,\n\n ) -> Self {\n", "file_path": "src/sanuli.rs", "rank": 70, "score": 6.933825930659632 }, { "content": "impl Default for WordList {\n\n fn default() -> Self {\n\n WordList::Common\n\n }\n\n}\n\n\n\n#[derive(PartialEq, Eq, Hash, Clone, Copy, Serialize, Deserialize)]\n\npub enum GameMode {\n\n Classic,\n\n Relay,\n\n DailyWord(NaiveDate),\n\n Shared,\n\n Quadruple,\n\n}\n\n\n\nimpl Default for GameMode {\n\n fn default() -> Self {\n\n GameMode::Classic\n\n }\n\n}\n", "file_path": "src/manager.rs", "rank": 71, "score": 6.923447132640254 }, { "content": " fn update(&mut self, ctx: &Context<Self>, msg: Self::Message) -> bool {\n\n match msg {\n\n Msg::KeyPress(c) => self.manager.push_character(c),\n\n Msg::Backspace => self.manager.pop_character(),\n\n Msg::Enter => {\n\n let link = ctx.link();\n\n\n\n if let Some(game) = &self.manager.game {\n\n if game.is_guessing() {\n\n link.send_message(Msg::Guess);\n\n } else {\n\n if matches!(game.game_mode(), GameMode::DailyWord(_) | GameMode::Shared) {\n\n link.send_message(Msg::ChangePreviousGameMode);\n\n } else {\n\n link.send_message(Msg::NextWord);\n\n }\n\n }\n\n }\n\n }\n\n Msg::Guess => self.manager.submit_guess(),\n", "file_path": "src/main.rs", "rank": 72, "score": 6.74577759571157 }, { "content": " word_lists.clone(),\n\n ),\n\n Sanuli::new(\n\n GameMode::Quadruple,\n\n word_list,\n\n word_length,\n\n MAX_GUESSES,\n\n allow_profanities,\n\n word_lists.clone(),\n\n ),\n\n Sanuli::new(\n\n GameMode::Quadruple,\n\n word_list,\n\n word_length,\n\n MAX_GUESSES,\n\n allow_profanities,\n\n word_lists.clone(),\n\n ),\n\n ];\n\n\n", "file_path": "src/neluli.rs", "rank": 73, "score": 6.7056844750085 }, { "content": " fn reveal_hidden_tiles(&mut self) {\n\n self.is_hidden = false;\n\n self.message = format!(\"Sana oli \\\"{}\\\"\", self.word.iter().collect::<String>());\n\n }\n\n\n\n fn reset(&mut self) {\n\n self.guesses = std::iter::repeat(Vec::with_capacity(self.word_length))\n\n .take(self.max_guesses)\n\n .collect::<Vec<_>>();\n\n\n\n self.current_guess = 0;\n\n\n\n self.is_guessing = true;\n\n self.is_winner = false;\n\n self.is_unknown = false;\n\n self.is_reset = false;\n\n self.is_hidden = false;\n\n self.message = \"Peli nollattu, arvaa sanuli!\".to_owned();\n\n\n\n self.known_states = std::iter::repeat(HashMap::new())\n", "file_path": "src/sanuli.rs", "rank": 74, "score": 6.493438384313866 }, { "content": " && matches!(self.current_game_mode, GameMode::DailyWord(_))\n\n {\n\n // Force the user to reset to the base game\n\n self.current_game_mode = GameMode::default();\n\n self.current_word_list = WordList::default();\n\n self.current_word_length = DEFAULT_WORD_LENGTH;\n\n } else {\n\n self.current_game_mode = game_mode;\n\n self.current_word_list = word_list;\n\n self.current_word_length = word_length;\n\n }\n\n\n\n self.switch_active_game();\n\n\n\n let _res = self.persist();\n\n let _res = self.game.as_mut().unwrap().persist();\n\n }\n\n\n\n pub fn change_allow_profanities(&mut self, is_allowed: bool) {\n\n self.allow_profanities = is_allowed;\n", "file_path": "src/manager.rs", "rank": 75, "score": 6.396014799661625 }, { "content": " self.word_lists.clone(),\n\n )),\n\n });\n\n\n\n self.game = Some(game);\n\n self.background_games.insert(previous_game, previous);\n\n }\n\n\n\n fn update_game_statistics(&mut self, is_winner: bool, streak: usize) {\n\n self.total_played += 1;\n\n\n\n if is_winner {\n\n self.total_solved += 1;\n\n\n\n if streak > self.max_streak {\n\n self.max_streak = streak;\n\n }\n\n }\n\n let _res = self.persist();\n\n }\n", "file_path": "src/manager.rs", "rank": 76, "score": 6.362685812238665 }, { "content": "\n\n#[derive(PartialEq, Clone, Copy, Serialize, Deserialize)]\n\npub enum Theme {\n\n Dark,\n\n Colorblind,\n\n}\n\n\n\nimpl Default for Theme {\n\n fn default() -> Self {\n\n Theme::Dark\n\n }\n\n}\n\n\n\n#[derive(Clone, PartialEq, Serialize, Deserialize)]\n\npub enum CharacterState {\n\n Correct,\n\n Absent,\n\n Unknown,\n\n}\n\n\n", "file_path": "src/manager.rs", "rank": 77, "score": 6.182731968681575 }, { "content": "\n\n #[cfg(web_sys_unstable_apis)]\n\n pub fn share_emojis(&self) -> Option<String> {\n\n self.game.as_ref()?.share_emojis(self.theme)\n\n }\n\n\n\n #[cfg(web_sys_unstable_apis)]\n\n pub fn share_link(&self) -> Option<String> {\n\n self.game.as_ref()?.share_link()\n\n }\n\n\n\n pub fn reveal_hidden_tiles(&mut self) {\n\n if let Some(game) = self.game.as_mut() {\n\n game.reveal_hidden_tiles();\n\n }\n\n }\n\n\n\n pub fn reset_game(&mut self) {\n\n if let Some(game) = self.game.as_mut() {\n\n game.reset();\n", "file_path": "src/manager.rs", "rank": 78, "score": 6.091894131072445 }, { "content": " let game_key = &format!(\n\n \"game|{}|{}|{}\",\n\n serde_json::to_string(&GameMode::Quadruple).unwrap(),\n\n serde_json::to_string(&self.word_list).unwrap(),\n\n self.word_length\n\n );\n\n\n\n LocalStorage::set(game_key, self)\n\n }\n\n}\n", "file_path": "src/neluli.rs", "rank": 79, "score": 5.971891863377913 }, { "content": "\n\n for word in COMMON_WORDS.lines() {\n\n let chars = word.chars();\n\n let word_length = chars.clone().count();\n\n\n\n if word_length == 6 {\n\n // TODO: Fake 6-letter easy words from common words, get rid of this if the list is created\n\n word_lists\n\n .entry((WordList::Easy, 6))\n\n .or_insert_with(HashSet::new)\n\n .insert(chars.clone().collect());\n\n }\n\n\n\n word_lists\n\n .entry((WordList::Common, word_length))\n\n .or_insert_with(HashSet::new)\n\n .insert(chars.collect());\n\n }\n\n\n\n for word in PROFANITIES.lines() {\n", "file_path": "src/manager.rs", "rank": 80, "score": 5.951503349216237 }, { "content": "\n\n #[serde(skip)]\n\n pub game: Option<Box<dyn Game>>,\n\n #[serde(skip)]\n\n pub background_games: HashMap<(GameMode, WordList, usize), Box<dyn Game>>,\n\n #[serde(skip)]\n\n pub word_lists: Rc<WordLists>,\n\n}\n\n\n\nimpl Default for Manager {\n\n fn default() -> Self {\n\n Self {\n\n current_game_mode: GameMode::default(),\n\n current_word_list: WordList::default(),\n\n current_word_length: DEFAULT_WORD_LENGTH,\n\n allow_profanities: DEFAULT_ALLOW_PROFANITIES,\n\n\n\n previous_game: (\n\n GameMode::default(),\n\n WordList::default(),\n", "file_path": "src/manager.rs", "rank": 81, "score": 5.923093263480229 }, { "content": "#[derive(Clone, Copy, PartialEq, Serialize, Deserialize)]\n\npub enum TileState {\n\n Correct,\n\n Absent,\n\n Present,\n\n Unknown,\n\n}\n\n\n\nimpl fmt::Display for TileState {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n TileState::Correct => write!(f, \"correct\"),\n\n TileState::Absent => write!(f, \"absent\"),\n\n TileState::Present => write!(f, \"present\"),\n\n TileState::Unknown => write!(f, \"unknown\"),\n\n }\n\n }\n\n}\n\n\n\nimpl FromStr for Theme {\n", "file_path": "src/manager.rs", "rank": 82, "score": 5.688807091370764 }, { "content": " let _res = self.persist();\n\n let _res = self.game.as_ref().unwrap().persist();\n\n }\n\n\n\n pub fn change_word_list(&mut self, new_list: WordList) {\n\n if self.current_word_list == new_list {\n\n return;\n\n }\n\n\n\n self.current_word_list = new_list;\n\n self.switch_active_game();\n\n\n\n let _res = self.persist();\n\n let _res = self.game.as_ref().unwrap().persist();\n\n }\n\n\n\n pub fn change_previous_game_mode(&mut self) {\n\n let (game_mode, word_list, word_length) = self.previous_game;\n\n\n\n if matches!(game_mode, GameMode::DailyWord(_))\n", "file_path": "src/manager.rs", "rank": 83, "score": 5.634847005106565 }, { "content": " } else {\n\n *character\n\n }\n\n }\n\n </div>\n\n }\n\n }).collect::<Html>()\n\n }\n\n </div>\n\n }\n\n }).collect::<Html>()\n\n }\n\n </div>\n\n </>\n\n }\n\n}\n\n\n\n#[derive(Properties, PartialEq)]\n\npub struct PreviousBoardProps {\n\n pub guesses: Vec<Vec<(char, TileState)>>,\n\n pub max_guesses: usize,\n\n pub word_length: usize,\n\n}\n\n\n", "file_path": "src/components/board.rs", "rank": 84, "score": 5.577946048016491 }, { "content": " </div>\n\n }\n\n } else {\n\n html! {\n\n <MenuModal\n\n callback={link.callback(move |msg| msg)}\n\n game_mode={self.manager.current_game_mode}\n\n word_length={self.manager.current_word_length}\n\n current_word_list={self.manager.current_word_list}\n\n allow_profanities={self.manager.allow_profanities}\n\n theme={self.manager.theme}\n\n max_streak={self.manager.max_streak}\n\n total_played={self.manager.total_played}\n\n total_solved={self.manager.total_solved}\n\n />\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 85, "score": 5.478276001434164 }, { "content": " );\n\n }\n\n }\n\n\n\n pub fn change_word_length(&mut self, new_length: usize) {\n\n if self.current_word_length == new_length {\n\n return;\n\n }\n\n\n\n self.current_word_length = new_length;\n\n self.switch_active_game();\n\n\n\n let _res = self.persist();\n\n if let Some(game) = self.game.as_mut() {\n\n let _res = game.persist();\n\n }\n\n }\n\n\n\n pub fn change_game_mode(&mut self, new_mode: GameMode) {\n\n if self.current_game_mode == new_mode {\n", "file_path": "src/manager.rs", "rank": 86, "score": 5.445782674196732 }, { "content": " self.current_guess,\n\n &self.known_states,\n\n &self.known_counts,\n\n ))\n\n }\n\n\n\n fn submit_guess(&mut self) {\n\n if !self.is_guess_correct_length() {\n\n self.message = \"Liian vähän kirjaimia!\".to_owned();\n\n return;\n\n }\n\n if !self.is_guess_accepted_word() {\n\n self.is_unknown = true;\n\n self.message = \"Ei sanulistalla.\".to_owned();\n\n return;\n\n }\n\n\n\n self.is_reset = false;\n\n self.clear_message();\n\n\n", "file_path": "src/sanuli.rs", "rank": 87, "score": 5.422613594390883 }, { "content": "\n\n {\n\n if self.is_menu_visible {\n\n html! {\n\n <MenuModal\n\n callback={link.callback(move |msg| msg)}\n\n game_mode={self.manager.current_game_mode}\n\n word_length={self.manager.current_word_length}\n\n current_word_list={self.manager.current_word_list}\n\n allow_profanities={self.manager.allow_profanities}\n\n theme={self.manager.theme}\n\n max_streak={self.manager.max_streak}\n\n total_played={self.manager.total_played}\n\n total_solved={self.manager.total_solved}\n\n />\n\n }\n\n } else {\n\n html! {}\n\n }\n\n }\n", "file_path": "src/main.rs", "rank": 88, "score": 5.41664809974871 }, { "content": " .collect();\n\n self.message = format!(\"Löytämättä jäi: \\\"{}\\\"\", words.join(\"\\\", \\\"\"));\n\n }\n\n }\n\n}\n\n\n\nimpl Game for Neluli {\n\n fn game_mode(&self) -> &GameMode {\n\n &GameMode::Quadruple\n\n }\n\n fn word_list(&self) -> &WordList {\n\n &self.word_list\n\n }\n\n fn word_length(&self) -> usize {\n\n self.word_length\n\n }\n\n fn max_guesses(&self) -> usize {\n\n MAX_GUESSES\n\n }\n\n fn boards(&self) -> Vec<Board> {\n", "file_path": "src/neluli.rs", "rank": 89, "score": 5.394866169254128 }, { "content": " {\"Päivän sanulit tulevat omalta listaltaan, joka on jotain tavallisen ja vaikean listan väliltä. Sanulin on aina sama kaikille pelaajille tiettynä päivänä.\"}\n\n </p>\n\n <p>\n\n {\"Sanuliketjussa jos arvaat sanulin, on se suoraan ensimmäinen arvaus seuraavaan peliin. Näin joudut sopeutumaan vaihtuviin alkuarvauksiin, ja peli on hieman vaikeampi.\"}\n\n </p>\n\n <p>\n\n {\"Nelulissa ratkaiset samalla kertaa neljää eri sanulia samoilla arvauksilla. Tavoite on saada kaikki neljä sanulia ratkaistua yhdeksällä arvauksella.\"}\n\n </p>\n\n <p>\n\n {\"Sanulistoja muokkailen aina välillä käyttäjien ehdotusten perusteella, ja voit jättää omat ehdotuksesi sanuleihin \"}\n\n <a class=\"link\" href={FORMS_LINK_TEMPLATE_ADD}>{\"täällä\"}</a>\n\n {\". Kiitos kaikille ehdotuksia jättäneille ja sanulistojen kasaamisessa auttaneille henkilöille!\"}\n\n </p>\n\n </div>\n\n }\n\n}\n\n\n\n#[derive(Properties, Clone, PartialEq)]\n\npub struct MenuModalProps {\n\n pub callback: Callback<Msg>,\n", "file_path": "src/components/modal.rs", "rank": 90, "score": 5.297821439675916 }, { "content": "extern crate wee_alloc;\n\n\n\nuse std::collections::HashMap;\n\nuse wasm_bindgen::{prelude::Closure, JsCast};\n\nuse web_sys::{window, Window};\n\nuse yew::prelude::*;\n\n\n\nmod components;\n\nmod game;\n\nmod manager;\n\nmod neluli;\n\nmod sanuli;\n\n\n\nuse components::{\n\n board::Board,\n\n header::Header,\n\n keyboard::Keyboard,\n\n modal::{HelpModal, MenuModal},\n\n};\n\nuse manager::{GameMode, KeyState, Manager, Theme, WordList};\n", "file_path": "src/main.rs", "rank": 91, "score": 5.24809989764139 }, { "content": "\n\n fn submit_guess(&mut self) {\n\n for board in self.boards.iter_mut() {\n\n if board.is_guessing() {\n\n if !board.is_guess_correct_length() {\n\n self.message = \"Liian vähän kirjaimia!\".to_owned();\n\n return;\n\n }\n\n\n\n if !board.is_guess_accepted_word() {\n\n self.message = \"Ei sanulistalla.\".to_owned();\n\n return;\n\n }\n\n\n\n board.submit_guess();\n\n }\n\n }\n\n\n\n if self.is_game_ended() {\n\n self.set_game_end_message();\n", "file_path": "src/neluli.rs", "rank": 92, "score": 5.2082926699263234 }, { "content": " }\n\n }\n\n\n\n pub fn next_word(&mut self) {\n\n if let Some(game) = self.game.as_mut() {\n\n game.next_word();\n\n }\n\n }\n\n\n\n pub fn submit_guess(&mut self) {\n\n if self.game.is_none() || !self.game.as_ref().unwrap().is_guessing() {\n\n return;\n\n }\n\n\n\n self.game.as_mut().unwrap().submit_guess();\n\n\n\n if !self.game.as_ref().unwrap().is_guessing() {\n\n self.update_game_statistics(\n\n self.game.as_ref().unwrap().is_winner(),\n\n self.game.as_ref().unwrap().streak(),\n", "file_path": "src/manager.rs", "rank": 93, "score": 5.200855190156667 }, { "content": " DEFAULT_WORD_LENGTH,\n\n ),\n\n\n\n theme: Theme::default(),\n\n\n\n max_streak: 0,\n\n total_played: 0,\n\n total_solved: 0,\n\n\n\n game: None,\n\n background_games: HashMap::new(),\n\n word_lists: Rc::new(HashMap::new()),\n\n }\n\n }\n\n}\n\n\n\nimpl Manager {\n\n pub fn new() -> Self {\n\n let word_lists = parse_all_words();\n\n\n", "file_path": "src/manager.rs", "rank": 94, "score": 5.157910574124117 }, { "content": " word_lists: &Rc<WordLists>,\n\n ) -> Vec<char> {\n\n if let GameMode::DailyWord(date) = game_mode {\n\n Self::get_daily_word(date)\n\n } else {\n\n Self::get_random_word(word_list, word_length, allow_profanities, word_lists)\n\n }\n\n }\n\n\n\n fn get_random_word(\n\n word_list: WordList,\n\n word_length: usize,\n\n allow_profanities: bool,\n\n word_lists: &Rc<WordLists>,\n\n ) -> Vec<char> {\n\n let mut words = word_lists\n\n .get(&(word_list, word_length))\n\n .unwrap()\n\n .iter()\n\n .collect::<Vec<_>>();\n", "file_path": "src/sanuli.rs", "rank": 95, "score": 5.147000278406577 }, { "content": "\n\n let key_state = props.keyboard.get(key).unwrap_or(&KeyState::Single(TileState::Unknown));\n\n\n\n html! {\n\n <KeyboardButton character={*key} is_hidden={props.is_hidden} onkeypress={onkeypress} key_state={*key_state}/>\n\n }\n\n }).collect::<Html>()\n\n }\n\n {\n\n if props.is_guessing {\n\n let callback = props.callback.clone();\n\n let onmousedown = Callback::from(move |e: MouseEvent| {\n\n e.prevent_default();\n\n callback.emit(Msg::Guess);\n\n });\n\n\n\n html! {\n\n <button data-nosnippet=\"\" class={classes!(\"keyboard-button\", \"keyboard-button-submit\")}\n\n onmousedown={onmousedown}>\n\n { \"ARVAA\" }\n", "file_path": "src/components/keyboard.rs", "rank": 96, "score": 5.082871041958599 }, { "content": " let guess_count = if self.is_winner {\n\n format!(\"{}\", self.current_guess + 1)\n\n } else {\n\n \"X\".to_owned()\n\n };\n\n\n\n message += &format!(\"Sanuli #{} {}/{}\", index, guess_count, self.max_guesses);\n\n message += \"\\n\\n\";\n\n\n\n for guess in self.guesses.iter() {\n\n if guess.is_empty() {\n\n continue;\n\n }\n\n let guess_string = guess\n\n .iter()\n\n .map(|(_, state)| match state {\n\n TileState::Correct => match theme {\n\n Theme::Colorblind => \"🟧\",\n\n _ => \"🟩\",\n\n },\n", "file_path": "src/sanuli.rs", "rank": 97, "score": 5.064029732353541 }, { "content": "\n\n let key_state = props.keyboard.get(key).unwrap_or(&KeyState::Single(TileState::Unknown));\n\n\n\n html! {\n\n <KeyboardButton character={*key} is_hidden={props.is_hidden} onkeypress={onkeypress} key_state={*key_state}/>\n\n }\n\n }).collect::<Html>()\n\n }\n\n </div>\n\n <div class=\"keyboard-row\">\n\n <div class=\"spacer\" />\n\n <div class=\"spacer\" />\n\n <div class=\"spacer\" />\n\n {\n\n KEYBOARD_2.iter().map(|key| {\n\n let callback = props.callback.clone();\n\n let onkeypress = Callback::from(move |e: MouseEvent| {\n\n e.prevent_default();\n\n callback.emit(Msg::KeyPress(*key));\n\n });\n", "file_path": "src/components/keyboard.rs", "rank": 98, "score": 5.053111788889189 }, { "content": " let change_allow_profanities_no = onmousedown!(callback, Msg::ChangeAllowProfanities(false));\n\n\n\n let change_theme_dark = onmousedown!(callback, Msg::ChangeTheme(Theme::Dark));\n\n let change_theme_colorblind = onmousedown!(callback, Msg::ChangeTheme(Theme::Colorblind));\n\n\n\n let is_hide_settings = matches!(props.game_mode, GameMode::DailyWord(_) | GameMode::Shared);\n\n\n\n html! {\n\n <div class=\"modal\">\n\n <span onmousedown={toggle_menu} class=\"modal-close\">{\"✖\"}</span>\n\n {if !is_hide_settings {\n\n html! {\n\n <>\n\n <div>\n\n <label class=\"label\">{\"Sanulien pituus:\"}</label>\n\n <div class=\"select-container\">\n\n <button class={classes!(\"select\", (props.word_length == 5).then(|| Some(\"select-active\")))}\n\n onmousedown={change_word_length_5}>\n\n {\"5 merkkiä\"}\n\n </button>\n", "file_path": "src/components/modal.rs", "rank": 99, "score": 4.9790768247874135 } ]
Rust
src/corroboy/gpu/background.rs
squidboylan/corroboy
4c264c2604eb0cc6830add3c9e98dee0cb054c40
use gfx_device_gl; use image::*; use piston_window; use piston_window::PistonWindow as Window; use piston_window::Texture; use piston_window::TextureSettings; use sdl2_window::Sdl2Window; use crate::corroboy::mmu::Mmu; struct Tile { raw_val: [[u8; 8]; 8], } impl Tile { pub fn new() -> Tile { Tile { raw_val: [[0; 8]; 8], } } #[allow(dead_code)] fn display_ascii(&self) { for i in 0..8 { for j in 0..8 { print!("{} ", self.raw_val[i][j]); } println!(""); } println!(""); } } pub struct Background { background_data_bot: usize, background_data_top: usize, bg_tiles: Vec<Tile>, bg_tile_map_bot: usize, bg_tile_map: [[u8; 32]; 32], window_tile_map_bot: usize, window_tile_map: [[u8; 32]; 32], window_enabled: bool, window_x: u8, window_y: u8, bg_palette: [usize; 4], pub base_tex: Texture<gfx_device_gl::Resources>, pub tex: Texture<gfx_device_gl::Resources>, base_pixel_map: [[usize; 160]; 144], pixel_map: [[usize; 160]; 144], last_pixel_map: [[usize; 160]; 144], pub enabled: u8, } impl Background { pub fn new(window: &mut Window<Sdl2Window>) -> Background { let mut factory = window.factory.clone(); let mut tiles = Vec::with_capacity(256); for _i in 0..256 { let new = Tile::new(); tiles.push(new); } Background { background_data_bot: 0, background_data_top: 0, bg_tiles: tiles, bg_tile_map_bot: 0, bg_tile_map: [[0; 32]; 32], bg_palette: [0; 4], window_tile_map_bot: 0, window_tile_map: [[0; 32]; 32], window_enabled: false, window_x: 0, window_y: 0, base_tex: Texture::empty(&mut factory).unwrap(), tex: Texture::empty(&mut factory).unwrap(), base_pixel_map: [[0; 160]; 144], pixel_map: [[0; 160]; 144], last_pixel_map: [[0; 160]; 144], enabled: 0, } } pub fn initialize(&mut self, mem: &mut Mmu) { let ff40 = mem.get_io_register(0xFF40); if (ff40 & 0b00010000) >> 4 == 0 { self.background_data_bot = 0x8800; self.background_data_top = 0x97FF; } else { self.background_data_bot = 0x8000; self.background_data_top = 0x8FFF; } if ff40 & 0b00001000 == 0 { self.bg_tile_map_bot = 0x9800; } else { self.bg_tile_map_bot = 0x9C00; } if ff40 & 0b00100000 == 0 { self.window_enabled = false; } else { self.window_enabled = true; } if ff40 & 0b01000000 == 0 { self.window_tile_map_bot = 0x9800; } else { self.window_tile_map_bot = 0x9C00; } self.window_y = mem.get_mem_u8(0xFF4A); self.window_x = mem.get_mem_u8(0xFF4B); self.build_tile_data(mem); } #[allow(dead_code)] fn print_bg_tile_map(&self) { for i in 0..32 { for j in 0..32 { print!("{} ", self.bg_tile_map[i][j]); } println!(""); } } #[allow(dead_code)] fn display_ascii(&self) { if self.bg_tiles.len() == 256 { for i in 0..32 { for k in 0..8 { for j in 0..32 { for l in 0..8 { print!( "{} ", self.bg_tiles[self.bg_tile_map[i][j] as usize].raw_val[k][l] ); } } println!(""); } } println!(""); } } pub fn generate_tex(&mut self, window: &mut Window<Sdl2Window>) { const SCREEN_SIZE_X: u32 = 160; const SCREEN_SIZE_Y: u32 = 144; let mut new_map = false; let mut x = 0; let mut y = 0; while y < SCREEN_SIZE_Y as usize && new_map == false { while x < SCREEN_SIZE_X as usize && new_map == false { if self.pixel_map[y][x] != self.last_pixel_map[y][x] { new_map = true; } x += 1; } x = 0; y += 1; } if new_map == true { let mut img: RgbaImage = ImageBuffer::new(SCREEN_SIZE_X, SCREEN_SIZE_Y); let mut base_img: RgbaImage = ImageBuffer::new(SCREEN_SIZE_X, SCREEN_SIZE_Y); let colors = [ [255, 255, 255, 255], [169, 169, 169, 255], [128, 128, 128, 255], [0, 0, 0, 255], [0, 0, 0, 0], ]; let mut x = 0; let mut y = 0; while y < SCREEN_SIZE_Y as usize { while x < SCREEN_SIZE_X as usize { let color = colors[self.pixel_map[y][x]]; self.last_pixel_map[y][x] = self.pixel_map[y][x]; img.put_pixel(x as u32, y as u32, Rgba { data: color }); let color = colors[self.base_pixel_map[y][x]]; base_img.put_pixel(x as u32, y as u32, Rgba { data: color }); x += 1; } x = 0; y += 1; } let mut tex_settings = TextureSettings::new(); tex_settings.set_mag(piston_window::Filter::Nearest); self.tex = Texture::from_image(&mut window.factory, &img, &tex_settings).unwrap(); self.base_tex = Texture::from_image(&mut window.factory, &base_img, &tex_settings).unwrap(); } } pub fn build_bg_tile_map(&mut self, mem: &mut Mmu) { for i in 0..32 { for j in 0..32 { self.bg_tile_map[i][j] = mem.get_mem_u8(self.bg_tile_map_bot + (i * 32) + j); } } } pub fn build_window_tile_map(&mut self, mem: &mut Mmu) { for i in 0..32 { for j in 0..32 { self.window_tile_map[i][j] = mem.get_mem_u8(self.window_tile_map_bot + (i * 32) + j); } } } pub fn set_bg_palette(&mut self, mem: &mut Mmu) { let ff47 = mem.get_io_register(0xFF47); self.bg_palette[0] = (ff47 & 0b00000011) as usize; self.bg_palette[1] = ((ff47 & 0b00001100) >> 2) as usize; self.bg_palette[2] = ((ff47 & 0b00110000) >> 4) as usize; self.bg_palette[3] = ((ff47 & 0b11000000) >> 6) as usize; } pub fn build_tile_data(&mut self, mem: &mut Mmu) { if self.background_data_bot == 0x8000 { for i in 0..256 { for j in 0..8 { let left = mem.get_vram(self.background_data_bot + (i * 16) + (j * 2)); let right = mem.get_vram(self.background_data_bot + (i * 16) + 1 + (j * 2)); self.bg_tiles[i].raw_val[j as usize][0] = ((right & 0b10000000) >> 6) + ((left & 0b10000000) >> 7); self.bg_tiles[i].raw_val[j as usize][1] = ((right & 0b01000000) >> 5) + ((left & 0b01000000) >> 6); self.bg_tiles[i].raw_val[j as usize][2] = ((right & 0b00100000) >> 4) + ((left & 0b00100000) >> 5); self.bg_tiles[i].raw_val[j as usize][3] = ((right & 0b00010000) >> 3) + ((left & 0b00010000) >> 4); self.bg_tiles[i].raw_val[j as usize][4] = ((right & 0b00001000) >> 2) + ((left & 0b00001000) >> 3); self.bg_tiles[i].raw_val[j as usize][5] = ((right & 0b00000100) >> 1) + ((left & 0b00000100) >> 2); self.bg_tiles[i].raw_val[j as usize][6] = (right & 0b00000010) + ((left & 0b00000010) >> 1); self.bg_tiles[i].raw_val[j as usize][7] = ((right & 0b00000001) << 1) + (left & 0b00000001); } } } else { let curr = 0x9000; for i in 0..128 { for j in 0..8 { let left = mem.get_vram(curr + (i * 16) + (j * 2)); let right = mem.get_vram(curr + (i * 16) + 1 + (j * 2)); self.bg_tiles[i].raw_val[j as usize][0] = ((right & 0b10000000) >> 6) + ((left & 0b10000000) >> 7); self.bg_tiles[i].raw_val[j as usize][1] = ((right & 0b01000000) >> 5) + ((left & 0b01000000) >> 6); self.bg_tiles[i].raw_val[j as usize][2] = ((right & 0b00100000) >> 4) + ((left & 0b00100000) >> 5); self.bg_tiles[i].raw_val[j as usize][3] = ((right & 0b00010000) >> 3) + ((left & 0b00010000) >> 4); self.bg_tiles[i].raw_val[j as usize][4] = ((right & 0b00001000) >> 2) + ((left & 0b00001000) >> 3); self.bg_tiles[i].raw_val[j as usize][5] = ((right & 0b00000100) >> 1) + ((left & 0b00000100) >> 2); self.bg_tiles[i].raw_val[j as usize][6] = (right & 0b00000010) + ((left & 0b00000010) >> 1); self.bg_tiles[i].raw_val[j as usize][7] = ((right & 0b00000001) << 1) + (left & 0b00000001); } } let curr = 0x8800; for i in 128..256 { for j in 0..8 { let left = mem.get_vram(curr + ((i - 128) * 16) + (j * 2)); let right = mem.get_vram(curr + ((i - 128) * 16) + 1 + (j * 2)); self.bg_tiles[i].raw_val[j as usize][0] = ((right & 0b10000000) >> 6) + ((left & 0b10000000) >> 7); self.bg_tiles[i].raw_val[j as usize][1] = ((right & 0b01000000) >> 5) + ((left & 0b01000000) >> 6); self.bg_tiles[i].raw_val[j as usize][2] = ((right & 0b00100000) >> 4) + ((left & 0b00100000) >> 5); self.bg_tiles[i].raw_val[j as usize][3] = ((right & 0b00010000) >> 3) + ((left & 0b00010000) >> 4); self.bg_tiles[i].raw_val[j as usize][4] = ((right & 0b00001000) >> 2) + ((left & 0b00001000) >> 3); self.bg_tiles[i].raw_val[j as usize][5] = ((right & 0b00000100) >> 1) + ((left & 0b00000100) >> 2); self.bg_tiles[i].raw_val[j as usize][6] = (right & 0b00000010) + ((left & 0b00000010) >> 1); self.bg_tiles[i].raw_val[j as usize][7] = ((right & 0b00000001) << 1) + (left & 0b00000001); } } } } pub fn update_background_line(&mut self, line_lcd: u8, mem: &mut Mmu) { self.enabled = mem.get_io_register(0xFF40) & 0x01; let scy = mem.get_io_register(0xFF42); let scx = mem.get_io_register(0xFF43); if self.enabled == 1 { let line = line_lcd + scy; let tile_y = ((line / 8) % 32) as usize; let line_in_tile = (line % 8) as usize; for i in 0..160 { let x = i + scx; let tile_x = ((x / 8) % 32) as usize; let x_in_tile = (x % 8) as usize; let tile_num = self.bg_tile_map[tile_y][tile_x] as usize; let palette_num = self.bg_tiles[tile_num].raw_val[line_in_tile][x_in_tile] as usize; if palette_num != 0 { let pixel_val = self.bg_palette[palette_num]; self.pixel_map[line_lcd as usize][i as usize] = pixel_val; } else { self.pixel_map[line_lcd as usize][i as usize] = 4; } let base_pixel_val = self.bg_palette[0]; self.base_pixel_map[line_lcd as usize][i as usize] = base_pixel_val; } } else { for i in 0..160 { let pixel_val = self.bg_palette[0]; self.pixel_map[line_lcd as usize][i as usize] = 4; self.base_pixel_map[line_lcd as usize][i as usize] = pixel_val; } } } pub fn update_window_line(&mut self, line_lcd: u8) { if self.window_enabled == true { if self.window_y <= line_lcd { let y = line_lcd - self.window_y; let line_in_tile = (y % 8) as usize; let tile_y = ((y / 8) % 32) as usize; for i in 0..160 { if i + 7 >= self.window_x { let x = i - self.window_x + 7; let tile_x = ((x / 8) % 32) as usize; let x_in_tile = (x % 8) as usize; let tile_num = self.window_tile_map[tile_y][tile_x] as usize; let palette_num = self.bg_tiles[tile_num].raw_val[line_in_tile][x_in_tile] as usize; let pixel_val = self.bg_palette[palette_num]; self.pixel_map[line_lcd as usize][i as usize] = pixel_val; } } } } } }
use gfx_device_gl; use image::*; use piston_window; use piston_window::PistonWindow as Window; use piston_window::Texture; use piston_window::TextureSettings; use sdl2_window::Sdl2Window; use crate::corroboy::mmu::Mmu; struct Tile { raw_val: [[u8; 8]; 8], } impl Tile { pub fn new() -> Tile { Tile { raw_val: [[0; 8]; 8], } } #[allow(dead_code)] fn display_ascii(&self) { for i in 0..8 { for j in 0..8 { print!("{} ", self.raw_val[i][j]); } println!(""); } println!(""); } } pub struct Background { background_data_bot: usize, background_data_top: usize, bg_tiles: Vec<Tile>, bg_tile_map_bot: usize, bg_tile_map: [[u8; 32]; 32], window_tile_map_bot: usize, window_tile_map: [[u8; 32]; 32], window_enabled: bool, window_x: u8, window_y: u8, bg_palette: [usize; 4], pub base_tex: Texture<gfx_device_gl::Resources>, pub tex: Texture<gfx_device_gl::Resources>, base_pixel_map: [[usize; 160]; 144], pixel_map: [[usize; 160]; 144], last_pixel_map: [[usize; 160]; 144], pub enabled: u8, } impl Background { pub fn new(window: &mut Window<Sdl2Window>) -> Background { let mut factory = window.factory.clone(); let mut tiles = Vec::with_capacity(256); for _i in 0..256 { let new = Tile::new(); tiles.push(new); } Background { background_data_bot: 0, background_data_top: 0, bg_tiles: tiles, bg_tile_map_bot: 0, bg_tile_map: [[0; 32]; 32], bg_palette: [0; 4], window_tile_map_bot: 0, window_tile_map: [[0; 32]; 32], window_enabled: false, window_x: 0, window_y: 0, base_tex: Texture::empty(&mut factory).unwrap(), tex: Texture::empty(&mut factory).unwrap(), base_pixel_map: [[0; 160]; 144], pixel_map: [[0; 160]; 144], last_pixel_map: [[0; 160]; 144], enabled: 0, } } pub fn initialize(&mut self, mem: &mut Mmu) { let ff40 = mem.get_io_register(0xFF40); if (ff40 & 0b00010000) >> 4 == 0 { self.background_data_bot = 0x8800; self.background_data_top = 0x97FF; } else { self.background_data_bot = 0x8000; self.background_data_top = 0x8FFF; } if ff40 & 0b00001000 == 0 { self.bg_tile_map_bot = 0x9800; } else { self.bg_tile_map_bot = 0x9C00; } if ff40 & 0b00100000 == 0 { self.window_enabled = false; } else { self.window_enabled = true; } if ff40 & 0b01000000 == 0 { self.window_tile_map_bot = 0x9800; } else { self.window_tile_map_bot = 0x9C00; } self.window_y = mem.get_mem_u8(0xFF4A); self.window_x = mem.get_mem_u8(0xFF4B); self.build_tile_data(mem); } #[allow(dead_code)] fn print_bg_tile_map(&self) { for i in 0..32 { for j in 0..32 { print!("{} ", self.bg_tile_map[i][j]); } println!(""); } } #[allow(dead_code)] fn display_ascii(&self) { if self.bg_tiles.len() == 256 { for i in 0..32 { for k in 0..8 { for j in 0..32 { for l in 0..8 { print!( "{} ", self.bg_tiles[self.bg_tile_map[i][j] as usize].raw_val[k][l] ); } } println!(""); } } println!(""); } } pub fn generate_tex(&mut self, window: &mut Window<Sdl2Window>) { const SCREEN_SIZE_X: u32 = 160; const SCREEN_SIZE_Y: u32 = 144; let mut new_map = false; let mut x = 0; let mut y = 0; while y < SCREEN_SIZE_Y as usize && new_map == false { while x < SCREEN_SIZE_X as usize && new_map == false { if self.pixel_map[y][x] != self.last_pixel_map[y][x] { new_map = true; } x += 1; } x = 0; y += 1; } if new_map == true { let mut img: RgbaImage = ImageBuffer::new(SCREEN_SIZE_X, SCREEN_SIZE_Y); let mut base_img: RgbaImage = ImageBuffer::new(SCREEN_SIZE_X, SCREEN_SIZE_Y); let colors = [ [255, 255, 255, 255], [169, 169, 169, 255], [128, 128, 128, 255], [0, 0, 0, 255], [0, 0, 0, 0], ]; let mut x = 0; let mut y = 0; while y < SCREEN_SIZE_Y as usize { while x < SCREEN_SIZE_X as usize { let color = colors[self.pixel_map[y][x]]; self.last_pixel_map[y][x] = self.pixel_map[y][x]; img.put_pixel(x as u32, y as u32, Rgba { data: color }); let color = colors[self.base_pixel_map[y][x]]; base_img.put_pixel(x as u32, y as u32, Rgba { data: color }); x += 1; } x = 0; y += 1; } let mut tex_settings = TextureSettings::new(); tex_settings.set_mag(piston_window::Filter::Nearest); self.tex = Texture::from_image(&mut window.factory, &img, &tex_settings).unwrap(); self.base_tex = Texture::from_image(&mut window.factory, &base_img, &tex_settings).unwrap(); } } pub fn build_bg_tile_map(&mut self, mem: &mut Mmu) { for i in 0..32 { for j in 0..32 { self.bg_tile_map[i][j] = mem.get_mem_u8(self.bg_tile_map_bot + (i * 32) + j); } } } pub fn build_window_tile_map(&mut self, mem: &mut Mmu) { for i in 0..32 { for j in 0..32 { self.window_tile_map[i][j] = mem.get_mem_u8(self.window_tile_map_bot + (i * 32) + j); } } } pub fn set_bg_palette(&mut self, mem: &mut Mmu) { let ff47 = mem.get_io_register(0xFF47); self.bg_palette[0] = (ff47 & 0b00000011) as usize; self.bg_palette[1] = ((ff47 & 0b00001100) >> 2) as usize;
pub fn build_tile_data(&mut self, mem: &mut Mmu) { if self.background_data_bot == 0x8000 { for i in 0..256 { for j in 0..8 { let left = mem.get_vram(self.background_data_bot + (i * 16) + (j * 2)); let right = mem.get_vram(self.background_data_bot + (i * 16) + 1 + (j * 2)); self.bg_tiles[i].raw_val[j as usize][0] = ((right & 0b10000000) >> 6) + ((left & 0b10000000) >> 7); self.bg_tiles[i].raw_val[j as usize][1] = ((right & 0b01000000) >> 5) + ((left & 0b01000000) >> 6); self.bg_tiles[i].raw_val[j as usize][2] = ((right & 0b00100000) >> 4) + ((left & 0b00100000) >> 5); self.bg_tiles[i].raw_val[j as usize][3] = ((right & 0b00010000) >> 3) + ((left & 0b00010000) >> 4); self.bg_tiles[i].raw_val[j as usize][4] = ((right & 0b00001000) >> 2) + ((left & 0b00001000) >> 3); self.bg_tiles[i].raw_val[j as usize][5] = ((right & 0b00000100) >> 1) + ((left & 0b00000100) >> 2); self.bg_tiles[i].raw_val[j as usize][6] = (right & 0b00000010) + ((left & 0b00000010) >> 1); self.bg_tiles[i].raw_val[j as usize][7] = ((right & 0b00000001) << 1) + (left & 0b00000001); } } } else { let curr = 0x9000; for i in 0..128 { for j in 0..8 { let left = mem.get_vram(curr + (i * 16) + (j * 2)); let right = mem.get_vram(curr + (i * 16) + 1 + (j * 2)); self.bg_tiles[i].raw_val[j as usize][0] = ((right & 0b10000000) >> 6) + ((left & 0b10000000) >> 7); self.bg_tiles[i].raw_val[j as usize][1] = ((right & 0b01000000) >> 5) + ((left & 0b01000000) >> 6); self.bg_tiles[i].raw_val[j as usize][2] = ((right & 0b00100000) >> 4) + ((left & 0b00100000) >> 5); self.bg_tiles[i].raw_val[j as usize][3] = ((right & 0b00010000) >> 3) + ((left & 0b00010000) >> 4); self.bg_tiles[i].raw_val[j as usize][4] = ((right & 0b00001000) >> 2) + ((left & 0b00001000) >> 3); self.bg_tiles[i].raw_val[j as usize][5] = ((right & 0b00000100) >> 1) + ((left & 0b00000100) >> 2); self.bg_tiles[i].raw_val[j as usize][6] = (right & 0b00000010) + ((left & 0b00000010) >> 1); self.bg_tiles[i].raw_val[j as usize][7] = ((right & 0b00000001) << 1) + (left & 0b00000001); } } let curr = 0x8800; for i in 128..256 { for j in 0..8 { let left = mem.get_vram(curr + ((i - 128) * 16) + (j * 2)); let right = mem.get_vram(curr + ((i - 128) * 16) + 1 + (j * 2)); self.bg_tiles[i].raw_val[j as usize][0] = ((right & 0b10000000) >> 6) + ((left & 0b10000000) >> 7); self.bg_tiles[i].raw_val[j as usize][1] = ((right & 0b01000000) >> 5) + ((left & 0b01000000) >> 6); self.bg_tiles[i].raw_val[j as usize][2] = ((right & 0b00100000) >> 4) + ((left & 0b00100000) >> 5); self.bg_tiles[i].raw_val[j as usize][3] = ((right & 0b00010000) >> 3) + ((left & 0b00010000) >> 4); self.bg_tiles[i].raw_val[j as usize][4] = ((right & 0b00001000) >> 2) + ((left & 0b00001000) >> 3); self.bg_tiles[i].raw_val[j as usize][5] = ((right & 0b00000100) >> 1) + ((left & 0b00000100) >> 2); self.bg_tiles[i].raw_val[j as usize][6] = (right & 0b00000010) + ((left & 0b00000010) >> 1); self.bg_tiles[i].raw_val[j as usize][7] = ((right & 0b00000001) << 1) + (left & 0b00000001); } } } } pub fn update_background_line(&mut self, line_lcd: u8, mem: &mut Mmu) { self.enabled = mem.get_io_register(0xFF40) & 0x01; let scy = mem.get_io_register(0xFF42); let scx = mem.get_io_register(0xFF43); if self.enabled == 1 { let line = line_lcd + scy; let tile_y = ((line / 8) % 32) as usize; let line_in_tile = (line % 8) as usize; for i in 0..160 { let x = i + scx; let tile_x = ((x / 8) % 32) as usize; let x_in_tile = (x % 8) as usize; let tile_num = self.bg_tile_map[tile_y][tile_x] as usize; let palette_num = self.bg_tiles[tile_num].raw_val[line_in_tile][x_in_tile] as usize; if palette_num != 0 { let pixel_val = self.bg_palette[palette_num]; self.pixel_map[line_lcd as usize][i as usize] = pixel_val; } else { self.pixel_map[line_lcd as usize][i as usize] = 4; } let base_pixel_val = self.bg_palette[0]; self.base_pixel_map[line_lcd as usize][i as usize] = base_pixel_val; } } else { for i in 0..160 { let pixel_val = self.bg_palette[0]; self.pixel_map[line_lcd as usize][i as usize] = 4; self.base_pixel_map[line_lcd as usize][i as usize] = pixel_val; } } } pub fn update_window_line(&mut self, line_lcd: u8) { if self.window_enabled == true { if self.window_y <= line_lcd { let y = line_lcd - self.window_y; let line_in_tile = (y % 8) as usize; let tile_y = ((y / 8) % 32) as usize; for i in 0..160 { if i + 7 >= self.window_x { let x = i - self.window_x + 7; let tile_x = ((x / 8) % 32) as usize; let x_in_tile = (x % 8) as usize; let tile_num = self.window_tile_map[tile_y][tile_x] as usize; let palette_num = self.bg_tiles[tile_num].raw_val[line_in_tile][x_in_tile] as usize; let pixel_val = self.bg_palette[palette_num]; self.pixel_map[line_lcd as usize][i as usize] = pixel_val; } } } } } }
self.bg_palette[2] = ((ff47 & 0b00110000) >> 4) as usize; self.bg_palette[3] = ((ff47 & 0b11000000) >> 6) as usize; }
function_block-function_prefix_line
[ { "content": "pub fn ret_z(mem: &mut Mmu, flags: u8, pc: &mut u16, sp: &mut u16) -> bool {\n\n if flags & 0b10000000 != 0 {\n\n *pc = mem.pop_u16(sp);\n\n return true;\n\n }\n\n return false;\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/ret.rs", "rank": 0, "score": 298418.85650025465 }, { "content": "pub fn ret_c(mem: &mut Mmu, flags: u8, pc: &mut u16, sp: &mut u16) -> bool {\n\n if flags & 0b00010000 != 0 {\n\n *pc = mem.pop_u16(sp);\n\n return true;\n\n }\n\n return false;\n\n}\n", "file_path": "src/corroboy/cpu/ops/ret.rs", "rank": 1, "score": 298418.8565002546 }, { "content": "pub fn ret_nc(mem: &mut Mmu, flags: u8, pc: &mut u16, sp: &mut u16) -> bool {\n\n if flags & 0b00010000 == 0 {\n\n *pc = mem.pop_u16(sp);\n\n return true;\n\n }\n\n return false;\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/ret.rs", "rank": 2, "score": 294582.42137963156 }, { "content": "pub fn ret_nz(mem: &mut Mmu, flags: u8, pc: &mut u16, sp: &mut u16) -> bool {\n\n if flags & 0b10000000 == 0 {\n\n *pc = mem.pop_u16(sp);\n\n return true;\n\n }\n\n return false;\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/ret.rs", "rank": 3, "score": 294582.4213796315 }, { "content": "pub fn sla_mem(mem: &mut Mmu, hl: u16, flags: &mut u8) {\n\n let val = mem.get_mem_u8(hl as usize);\n\n let new_val = val << 1;\n\n *flags &= 0b10010000;\n\n if val & 0b10000000 != 0 {\n\n *flags |= 0b00010000;\n\n } else {\n\n *flags &= 0b11101111;\n\n }\n\n mem.set_mem_u8(hl as usize, new_val);\n\n if new_val == 0 {\n\n *flags |= 0b10000000;\n\n } else {\n\n *flags &= 0b01111111;\n\n }\n\n}\n", "file_path": "src/corroboy/cpu/ops/sl.rs", "rank": 4, "score": 285890.28027583536 }, { "content": "pub fn rr_mem(mem: &mut Mmu, hl: u16, flags: &mut u8) {\n\n let tmp = (*flags & 0b00010000) >> 4;\n\n let val = mem.get_mem_u8(hl as usize);\n\n let new_val = (val >> 1) + (tmp << 7);\n\n *flags &= 0b10010000;\n\n if val & 0b00000001 == 1 {\n\n *flags |= 0b00010000;\n\n } else {\n\n *flags &= 0b11101111;\n\n }\n\n mem.set_mem_u8(hl as usize, new_val);\n\n if new_val == 0 {\n\n *flags |= 0b10000000;\n\n } else {\n\n *flags &= 0b01111111;\n\n }\n\n}\n\n\n\n// RRC n\n\n\n", "file_path": "src/corroboy/cpu/ops/rr.rs", "rank": 5, "score": 285890.2802758353 }, { "content": "pub fn dec_mem(mem: &mut Mmu, hl: u16, flags: &mut u8) {\n\n let val = mem.get_mem_u8(hl as usize) - 1;\n\n mem.set_mem_u8(hl as usize, val);\n\n *flags |= 0b01000000;\n\n if val == 0 {\n\n *flags |= 0b10000000;\n\n } else {\n\n *flags &= 0b01110000;\n\n }\n\n if val & 0b00001111 == 0b00001111 {\n\n *flags |= 0b00100000;\n\n } else {\n\n *flags &= 0b11010000;\n\n }\n\n}\n\n\n\n// DEC nn\n\n\n", "file_path": "src/corroboy/cpu/ops/dec.rs", "rank": 6, "score": 285890.2802758353 }, { "content": "pub fn srl_mem(mem: &mut Mmu, hl: u16, flags: &mut u8) {\n\n let val = mem.get_mem_u8(hl as usize);\n\n let new_val = val >> 1;\n\n *flags &= 0b10010000;\n\n if val & 0b00000001 == 1 {\n\n *flags |= 0b00010000;\n\n } else {\n\n *flags &= 0b11101111;\n\n }\n\n mem.set_mem_u8(hl as usize, new_val);\n\n if new_val == 0 {\n\n *flags |= 0b10000000;\n\n } else {\n\n *flags &= 0b01111111;\n\n }\n\n}\n\n\n\n// SRA n\n\n\n", "file_path": "src/corroboy/cpu/ops/sr.rs", "rank": 7, "score": 285890.2802758353 }, { "content": "pub fn rl_mem(mem: &mut Mmu, hl: u16, flags: &mut u8) {\n\n let tmp = (*flags & 0b00010000) >> 4;\n\n let val = mem.get_mem_u8(hl as usize);\n\n let new_val = (val << 1) + tmp;\n\n *flags &= 0b10010000;\n\n if val & 0b10000000 != 0 {\n\n *flags |= 0b00010000;\n\n } else {\n\n *flags &= 0b11101111;\n\n }\n\n mem.set_mem_u8(hl as usize, new_val);\n\n if new_val == 0 {\n\n *flags |= 0b10000000;\n\n } else {\n\n *flags &= 0b01111111;\n\n }\n\n}\n\n\n\n// CB extended ops\n\n// RLC A rotate A left old bit 7 in C flag\n\n\n", "file_path": "src/corroboy/cpu/ops/rl.rs", "rank": 8, "score": 285890.2802758353 }, { "content": "pub fn inc_mem(mem: &mut Mmu, hl: u16, flags: &mut u8) {\n\n let val = mem.get_mem_u8(hl as usize);\n\n\n\n if val & 0b00001111 == 0b00001111 {\n\n *flags |= 0b00100000;\n\n } else {\n\n *flags &= 0b11010000;\n\n }\n\n\n\n let val = val + 1;\n\n mem.set_mem_u8(hl as usize, val);\n\n *flags &= 0b10110000;\n\n if val == 0 {\n\n *flags |= 0b10000000;\n\n } else {\n\n *flags &= 0b01110000;\n\n }\n\n}\n\n\n\n// INC nn\n\n\n", "file_path": "src/corroboy/cpu/ops/inc.rs", "rank": 9, "score": 285890.2802758353 }, { "content": "pub fn rrc_mem(mem: &mut Mmu, hl: u16, flags: &mut u8) {\n\n let val = mem.get_mem_u8(hl as usize);\n\n let tmp = val & 0b00000001;\n\n let new_val = (val >> 1) | (tmp << 7);\n\n *flags &= 0b10010000;\n\n if tmp == 1 {\n\n *flags |= 0b00010000;\n\n } else {\n\n *flags &= 0b11101111;\n\n }\n\n mem.set_mem_u8(hl as usize, new_val);\n\n if new_val == 0 {\n\n *flags |= 0b10000000;\n\n } else {\n\n *flags &= 0b01111111;\n\n }\n\n}\n", "file_path": "src/corroboy/cpu/ops/rr.rs", "rank": 10, "score": 285890.2802758353 }, { "content": "pub fn rlc_mem(mem: &mut Mmu, hl: u16, flags: &mut u8) {\n\n let val = mem.get_mem_u8(hl as usize);\n\n let tmp = (val & 0b10000000) >> 7;\n\n let new_val = (val << 1) | tmp;\n\n *flags &= 0b10010000;\n\n if val & 0b10000000 != 0 {\n\n *flags |= 0b00010000;\n\n } else {\n\n *flags &= 0b11101111;\n\n }\n\n mem.set_mem_u8(hl as usize, new_val);\n\n if new_val == 0 {\n\n *flags |= 0b10000000;\n\n } else {\n\n *flags &= 0b01111111;\n\n }\n\n}\n", "file_path": "src/corroboy/cpu/ops/rl.rs", "rank": 11, "score": 285890.2802758353 }, { "content": "pub fn sra_mem(mem: &mut Mmu, hl: u16, flags: &mut u8) {\n\n let val = mem.get_mem_u8(hl as usize);\n\n let new_val = (val >> 1) + (val & 0b10000000);\n\n *flags &= 0b10010000;\n\n if val & 0b00000001 == 1 {\n\n *flags |= 0b00010000;\n\n } else {\n\n *flags &= 0b11101111;\n\n }\n\n mem.set_mem_u8(hl as usize, new_val);\n\n if new_val == 0 {\n\n *flags |= 0b10000000;\n\n } else {\n\n *flags &= 0b01111111;\n\n }\n\n}\n", "file_path": "src/corroboy/cpu/ops/sr.rs", "rank": 12, "score": 285890.2802758353 }, { "content": "pub fn swap_mem(mem: &mut Mmu, hl: u16, flags: &mut u8) {\n\n *flags &= 0;\n\n let old_val = mem.get_mem_u8(hl as usize);\n\n let new_val = (old_val >> 4) + (old_val << 4);\n\n mem.set_mem_u8(hl as usize, new_val);\n\n if new_val == 0 {\n\n *flags |= 0b10000000;\n\n }\n\n}\n", "file_path": "src/corroboy/cpu/ops/swap.rs", "rank": 13, "score": 285890.28027583536 }, { "content": "pub fn call_z(val: u16, flags: u8, pc: &mut u16, sp: &mut u16, mem: &mut Mmu) -> bool {\n\n if flags & 0b10000000 != 0 {\n\n mem.push_u16(sp, *pc);\n\n *pc = val;\n\n return true;\n\n }\n\n return false;\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/jump.rs", "rank": 14, "score": 283108.468991096 }, { "content": "pub fn call_c(val: u16, flags: u8, pc: &mut u16, sp: &mut u16, mem: &mut Mmu) -> bool {\n\n if flags & 0b00010000 != 0 {\n\n mem.push_u16(sp, *pc);\n\n *pc = val;\n\n return true;\n\n }\n\n return false;\n\n}\n", "file_path": "src/corroboy/cpu/ops/jump.rs", "rank": 15, "score": 283108.4689910961 }, { "content": "pub fn ldi_a_hl(a: &mut u8, hl: &mut u16, mem: &mut Mmu) {\n\n *a = mem.get_mem_u8(*hl as usize);\n\n *hl += 1;\n\n}\n\n\n\n// LDI (HL), A\n\n// Load A into (HL) and then increment HL\n\n\n", "file_path": "src/corroboy/cpu/ops/ld.rs", "rank": 16, "score": 280774.1406819025 }, { "content": "pub fn ldd_a_hl(a: &mut u8, hl: &mut u16, mem: &mut Mmu) {\n\n *a = mem.get_mem_u8(*hl as usize);\n\n *hl -= 1;\n\n}\n\n\n\n// LDD (HL), A\n\n// Load A into (HL) and then decrement HL\n\n\n", "file_path": "src/corroboy/cpu/ops/ld.rs", "rank": 17, "score": 280774.1406819025 }, { "content": "pub fn ldd_hl_a(a: &mut u8, hl: &mut u16, mem: &mut Mmu) {\n\n mem.set_mem_u8(*hl as usize, *a);\n\n *hl -= 1;\n\n}\n\n\n\n// LDHL SP,n\n\n\n", "file_path": "src/corroboy/cpu/ops/ld.rs", "rank": 18, "score": 280774.1406819025 }, { "content": "pub fn ldi_hl_a(a: &mut u8, hl: &mut u16, mem: &mut Mmu) {\n\n mem.set_mem_u8(*hl as usize, *a);\n\n *hl += 1;\n\n}\n\n\n\n// LDD A, (HL)\n\n// Load (HL) into A and then decrement HL\n\n\n", "file_path": "src/corroboy/cpu/ops/ld.rs", "rank": 19, "score": 280774.1406819025 }, { "content": "pub fn call_nz(val: u16, flags: u8, pc: &mut u16, sp: &mut u16, mem: &mut Mmu) -> bool {\n\n if flags & 0b10000000 == 0 {\n\n mem.push_u16(sp, *pc);\n\n *pc = val;\n\n return true;\n\n }\n\n return false;\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/jump.rs", "rank": 20, "score": 279592.7611595528 }, { "content": "pub fn call_nc(val: u16, flags: u8, pc: &mut u16, sp: &mut u16, mem: &mut Mmu) -> bool {\n\n if flags & 0b00010000 == 0 {\n\n mem.push_u16(sp, *pc);\n\n *pc = val;\n\n return true;\n\n }\n\n return false;\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/jump.rs", "rank": 21, "score": 279592.76115955284 }, { "content": "#[inline(always)]\n\npub fn res_mem(mem: &mut Mmu, bit: u8, hl: u16) {\n\n let new_val = mem.get_mem_u8(hl as usize) & (0b11111111 - (1 << bit));\n\n mem.set_mem_u8(hl as usize, new_val);\n\n}\n", "file_path": "src/corroboy/cpu/ops/res.rs", "rank": 22, "score": 276651.9678569618 }, { "content": "pub fn ld_mem(loc: u16, mem: &mut Mmu, val: u8) {\n\n mem.set_mem_u8(loc as usize, val);\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/ld.rs", "rank": 23, "score": 276651.9678569618 }, { "content": "#[inline(always)]\n\npub fn set_mem(mem: &mut Mmu, bit: u8, hl: u16) {\n\n let new_val = mem.get_mem_u8(hl as usize) | (1 << bit);\n\n mem.set_mem_u8(hl as usize, new_val);\n\n}\n", "file_path": "src/corroboy/cpu/ops/set.rs", "rank": 24, "score": 276651.9678569618 }, { "content": "#[inline(always)]\n\nfn get_mode(mem: &mut Mmu) -> u8 {\n\n mem.get_io_register(0xFF41) & 0b00000011\n\n}\n\n\n\n#[inline(always)]\n", "file_path": "src/corroboy/gpu/mod.rs", "rank": 25, "score": 264050.0524673829 }, { "content": "#[inline(always)]\n\nfn get_current_state(mem: &mut Mmu) -> u8 {\n\n (mem.get_io_register(0xFF40) & 0b10000000) >> 7\n\n}\n", "file_path": "src/corroboy/gpu/mod.rs", "rank": 26, "score": 259616.06293100998 }, { "content": "#[inline(always)]\n\nfn get_curr_line(mem: &mut Mmu) -> u8 {\n\n mem.get_io_register(0xFF44)\n\n}\n\n\n", "file_path": "src/corroboy/gpu/mod.rs", "rank": 27, "score": 259616.06293100995 }, { "content": "// Mode must be 0 - 3, other values will break the game\n\nfn set_mode(mem: &mut Mmu, mode: u8) {\n\n let tmp = mem.get_io_register(0xFF41) & 0b11111100;\n\n mem.set_io_register(0xFF41, tmp + mode);\n\n}\n\n\n", "file_path": "src/corroboy/gpu/mod.rs", "rank": 28, "score": 253109.88518833683 }, { "content": "#[inline(always)]\n\nfn set_curr_line(mem: &mut Mmu, line_num: u8) {\n\n mem.set_io_register(0xFF44, line_num);\n\n}\n\n\n", "file_path": "src/corroboy/gpu/mod.rs", "rank": 29, "score": 244989.77934287215 }, { "content": "pub fn ret(mem: &mut Mmu, pc: &mut u16, sp: &mut u16) {\n\n *pc = mem.pop_u16(sp);\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/ret.rs", "rank": 30, "score": 238457.78981400846 }, { "content": "pub fn jr_c(val: u8, flags: u8, pc: &mut u16) -> bool {\n\n if flags & 0b00010000 != 0 {\n\n *pc = (*pc as i16 + ((val as i8) as i16)) as u16;\n\n return true;\n\n }\n\n return false;\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/jump.rs", "rank": 31, "score": 235629.7841264326 }, { "content": "pub fn jr_z(val: u8, flags: u8, pc: &mut u16) -> bool {\n\n if flags & 0b10000000 != 0 {\n\n *pc = (*pc as i16 + ((val as i8) as i16)) as u16;\n\n return true;\n\n }\n\n return false;\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/jump.rs", "rank": 32, "score": 235629.7841264326 }, { "content": "pub fn pop(reg: &mut u16, mem: &Mmu, sp: &mut u16) {\n\n *reg = mem.pop_u16(sp);\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/pop.rs", "rank": 33, "score": 235430.18395879143 }, { "content": "pub fn push(reg: u16, mem: &mut Mmu, sp: &mut u16) {\n\n mem.push_u16(sp, reg);\n\n}\n", "file_path": "src/corroboy/cpu/ops/push.rs", "rank": 34, "score": 235430.18395879143 }, { "content": "pub fn jr_nc(val: u8, flags: u8, pc: &mut u16) -> bool {\n\n if flags & 0b00010000 == 0 {\n\n *pc = (*pc as i16 + ((val as i8) as i16)) as u16;\n\n return true;\n\n }\n\n return false;\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/jump.rs", "rank": 35, "score": 232273.8142407547 }, { "content": "pub fn jr_nz(val: u8, flags: u8, pc: &mut u16) -> bool {\n\n if flags & 0b10000000 == 0 {\n\n *pc = (*pc as i16 + ((val as i8) as i16)) as u16;\n\n return true;\n\n }\n\n return false;\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/jump.rs", "rank": 36, "score": 232273.8142407547 }, { "content": "pub fn pop_af(reg: &mut u16, mem: &Mmu, sp: &mut u16) {\n\n *reg = mem.pop_u16(sp) & 0xFFF0;\n\n}\n", "file_path": "src/corroboy/cpu/ops/pop.rs", "rank": 37, "score": 232089.2224017394 }, { "content": "/// Disassemble the opcode at the location passed in as an argument\n\npub fn display_disassembly(mem: &Mmu, loc: usize) {\n\n let mut opcode: u16 = mem.get_mem_u8(loc) as u16;\n\n\n\n // Get 2 byte opcode if it starts with 0xCB\n\n if opcode == 0xCB {\n\n opcode = ((opcode) << 8) + mem.get_mem_u8(loc + 1) as u16;\n\n }\n\n print!(\"0x{:04x}: \", loc);\n\n match opcode {\n\n 0x00 => println!(\"nop\"),\n\n 0x01 => println!(\"ld BC,0x{:04x}\", mem.get_mem_u16(loc + 1)),\n\n 0x08 => println!(\"ld (0x{:04x}),SP\", mem.get_mem_u16(loc + 1)),\n\n 0x11 => println!(\"ld DE,0x{:04x}\", mem.get_mem_u16(loc + 1)),\n\n 0x21 => println!(\"ld HL,0x{:04x}\", mem.get_mem_u16(loc + 1)),\n\n 0x31 => println!(\"ld SP,0x{:04x}\", mem.get_mem_u16(loc + 1)),\n\n 0xFA => println!(\n\n \"ld A,(0x{:04x}) ; val = 0x{:04x}\",\n\n mem.get_mem_u16(loc + 1),\n\n mem.get_mem_u8(mem.get_mem_u16(loc + 1) as usize)\n\n ),\n", "file_path": "src/corroboy/disassembler/mod.rs", "rank": 38, "score": 231770.75846371226 }, { "content": "pub fn ld_mem_16bit(loc: u16, mem: &mut Mmu, val: u16) {\n\n mem.set_mem_u16(loc as usize, val);\n\n}\n\n\n\n// LDI A, (HL)\n\n// Load (HL) into A and then increment HL\n\n\n", "file_path": "src/corroboy/cpu/ops/ld.rs", "rank": 39, "score": 228675.41000342212 }, { "content": "#[inline(always)]\n\npub fn or(reg: u8, a: &mut u8, flags: &mut u8) {\n\n *a = *a | reg;\n\n if *a == 0 {\n\n *flags = *flags | 0b10000000;\n\n } else {\n\n *flags = *flags & 0b01111111;\n\n }\n\n *flags = *flags & 0b10001111;\n\n}\n", "file_path": "src/corroboy/cpu/ops/or.rs", "rank": 40, "score": 226893.23050399774 }, { "content": "#[inline(always)]\n\npub fn and(val: u8, a: &mut u8, flags: &mut u8) {\n\n *a = val & *a;\n\n *flags = 0b00000000;\n\n if *a == 0 {\n\n *flags = *flags | 0b10000000;\n\n } else {\n\n *flags = *flags & 0b01111111;\n\n }\n\n *flags = *flags | 0b00100000;\n\n}\n", "file_path": "src/corroboy/cpu/ops/and.rs", "rank": 41, "score": 226893.23050399774 }, { "content": "pub fn rst(loc: u16, mem: &mut Mmu, sp: &mut u16, pc: &mut u16) {\n\n mem.push_u16(sp, *pc);\n\n *pc = loc;\n\n}\n", "file_path": "src/corroboy/cpu/ops/rst.rs", "rank": 42, "score": 225480.05305614506 }, { "content": "pub fn call(val: u16, pc: &mut u16, sp: &mut u16, mem: &mut Mmu) {\n\n mem.push_u16(sp, *pc);\n\n *pc = val;\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/jump.rs", "rank": 43, "score": 225480.05305614503 }, { "content": "pub fn cpl(a: &mut u8, flags: &mut u8) {\n\n *a = !*a;\n\n *flags |= 0b01100000;\n\n}\n\n\n\n// CCF\n\n// compliment carry flag\n\n\n", "file_path": "src/corroboy/cpu/ops/misc.rs", "rank": 44, "score": 223844.28619642946 }, { "content": "// This implementation of the DAA instruction is based off of\n\n// https://github.com/VelocityRa/rustboy implementation, which is licensed\n\n// under the MIT license, which is available in the LICENSE file in the root\n\n// of the repo\n\npub fn daa(a: &mut u8, flags: &mut u8) {\n\n let mut corr = 0;\n\n if *flags & 0b00100000 != 0 {\n\n corr |= 0x06;\n\n }\n\n if *flags & 0b00010000 != 0 {\n\n corr |= 0x60;\n\n }\n\n if *flags & 0b01000000 == 0 {\n\n if *a & 0b00001111 > 9 {\n\n corr |= 0x06;\n\n }\n\n if *a > 0x99 {\n\n corr |= 0x60;\n\n *flags |= 0b00010000;\n\n }\n\n\n\n *a += corr;\n\n } else {\n\n *a -= corr;\n", "file_path": "src/corroboy/cpu/ops/misc.rs", "rank": 45, "score": 223844.28619642946 }, { "content": "pub fn sbc(val: u8, a: &mut u8, flags: &mut u8) {\n\n let old_a = *a;\n\n let c = (*flags & 0b00010000) >> 4;\n\n *a = *a - val - c;\n\n\n\n *flags = 0b01000000;\n\n\n\n if *a == 0 {\n\n *flags |= 0b10000000;\n\n }\n\n\n\n if val == 0xFF && c == 1 {\n\n *flags |= 0b00010000;\n\n } else if old_a < *a {\n\n *flags |= 0b00010000;\n\n }\n\n\n\n if val & 0x0F == 0x0F && c == 1 {\n\n *flags |= 0b00100000;\n\n } else if old_a & 0x0F < *a & 0x0F {\n\n *flags |= 0b00100000;\n\n }\n\n //else if (old_a & 0x0F) - (val & 0x0F) - c > 0x0F { *flags |= 0b00100000; }\n\n}\n", "file_path": "src/corroboy/cpu/ops/sbc.rs", "rank": 46, "score": 221080.7063819326 }, { "content": "pub fn adc(val: u8, a: &mut u8, flags: &mut u8) {\n\n let old_a = *a;\n\n let c = (*flags & 0b00010000) >> 4;\n\n let val_c = val + c;\n\n *a += val_c;\n\n\n\n *flags = 0;\n\n\n\n if *a == 0 {\n\n *flags |= 0b10000000;\n\n }\n\n\n\n if val == 0xFF && c == 1 {\n\n *flags |= 0b00010000;\n\n } else if old_a > *a {\n\n *flags |= 0b00010000;\n\n }\n\n\n\n if val & 0x0F == 0x0F && c == 1 {\n\n *flags |= 0b00100000;\n\n } else if ((old_a & 0x0F) + (val_c & 0x0F)) & 0xF0 != 0 {\n\n *flags |= 0b00100000;\n\n }\n\n}\n", "file_path": "src/corroboy/cpu/ops/adc.rs", "rank": 47, "score": 221080.70638193263 }, { "content": "#[inline(always)]\n\npub fn xor(reg: u8, a: &mut u8, flags: &mut u8) {\n\n *a = *a ^ reg;\n\n if *a == 0 {\n\n *flags = *flags | 0b10000000;\n\n } else {\n\n *flags = *flags & 0b01111111;\n\n }\n\n *flags = *flags & 0b10001111;\n\n}\n", "file_path": "src/corroboy/cpu/ops/xor.rs", "rank": 48, "score": 221080.7063819326 }, { "content": "pub fn rla(reg: &mut u8, flags: &mut u8) {\n\n let tmp = (*flags & 0b00010000) >> 4;\n\n *flags = 0;\n\n if *reg & 0b10000000 != 0 {\n\n *flags |= 0b00010000;\n\n } else {\n\n *flags &= 0b11101111;\n\n }\n\n *reg = (*reg << 1) + tmp;\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/rl.rs", "rank": 49, "score": 220775.5410585778 }, { "content": "#[inline(always)]\n\npub fn dec(reg: &mut u8, flags: &mut u8) {\n\n *reg -= 1;\n\n *flags |= 0b01000000;\n\n if *reg == 0 {\n\n *flags |= 0b10000000;\n\n } else {\n\n *flags &= 0b01110000;\n\n }\n\n if *reg & 0b00001111 == 0b00001111 {\n\n *flags |= 0b00100000;\n\n } else {\n\n *flags &= 0b11010000;\n\n }\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/dec.rs", "rank": 50, "score": 220775.54105857783 }, { "content": "pub fn rra(reg: &mut u8, flags: &mut u8) {\n\n let tmp = (*flags & 0b00010000) >> 4;\n\n *flags = 0;\n\n if *reg & 0b00000001 == 1 {\n\n *flags |= 0b00010000;\n\n } else {\n\n *flags &= 0b11101111;\n\n }\n\n *reg = (*reg >> 1) + (tmp << 7);\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/rr.rs", "rank": 51, "score": 220775.54105857777 }, { "content": "pub fn rlca(reg: &mut u8, flags: &mut u8) {\n\n *flags = 0;\n\n\n\n *reg = reg.rotate_left(1);\n\n\n\n if *reg & 0x01 == 1 {\n\n *flags |= 0b00010000;\n\n }\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/rl.rs", "rank": 52, "score": 220775.5410585778 }, { "content": "pub fn rrca(reg: &mut u8, flags: &mut u8) {\n\n *flags = 0;\n\n\n\n *reg = reg.rotate_right(1);\n\n\n\n if *reg & 0x80 != 0 {\n\n *flags |= 0b00010000;\n\n }\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/rr.rs", "rank": 53, "score": 220775.5410585778 }, { "content": "#[inline(always)]\n\npub fn inc(reg: &mut u8, flags: &mut u8) {\n\n *flags &= 0b10110000;\n\n\n\n if *reg & 0b00001111 == 0b00001111 {\n\n *flags |= 0b00100000;\n\n } else {\n\n *flags &= 0b11010000;\n\n }\n\n\n\n *reg += 1;\n\n if *reg == 0 {\n\n *flags |= 0b10000000;\n\n } else {\n\n *flags &= 0b01110000;\n\n }\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/inc.rs", "rank": 54, "score": 220775.54105857783 }, { "content": "pub fn swap(reg: &mut u8, flags: &mut u8) {\n\n *flags &= 0;\n\n *reg = (*reg >> 4) + (*reg << 4);\n\n if *reg == 0 {\n\n *flags |= 0b10000000;\n\n }\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/swap.rs", "rank": 55, "score": 220775.54105857777 }, { "content": "pub fn sub(val: u8, reg: &mut u8, flags: &mut u8) {\n\n let old_reg = *reg;\n\n *reg -= val;\n\n *flags |= 0b01000000;\n\n if *reg > old_reg {\n\n *flags |= 0b00010000;\n\n } else {\n\n *flags &= 0b11100000;\n\n }\n\n\n\n if old_reg & 0b00001111 >= val & 0b00001111 {\n\n *flags &= 0b11011111;\n\n } else {\n\n *flags |= 0b00100000;\n\n }\n\n\n\n if *reg == 0 {\n\n *flags |= 0b10000000;\n\n } else {\n\n *flags &= 0b01111111;\n\n }\n\n}\n", "file_path": "src/corroboy/cpu/ops/sub.rs", "rank": 56, "score": 218365.46591327718 }, { "content": "pub fn add(val: u8, reg: &mut u8, flags: &mut u8) {\n\n let old_reg = *reg;\n\n *reg += val;\n\n *flags &= 0b10111111;\n\n if old_reg > *reg {\n\n *flags |= 0b00010000;\n\n } else {\n\n *flags &= 0b11101111;\n\n }\n\n if (old_reg & 0b00001111) + (val & 0b00001111) >= 0b00010000 {\n\n *flags |= 0b00100000;\n\n } else {\n\n *flags &= 0b11011111;\n\n }\n\n if *reg != 0 {\n\n *flags &= 0b01111111;\n\n } else {\n\n *flags |= 0b10000000;\n\n }\n\n}\n\n\n\n// ADD sp,n\n\n\n", "file_path": "src/corroboy/cpu/ops/add.rs", "rank": 57, "score": 218365.46591327718 }, { "content": "pub fn rr_reg(reg: &mut u8, flags: &mut u8) {\n\n let tmp = (*flags & 0b00010000) >> 4;\n\n *flags &= 0b10010000;\n\n if *reg & 0b00000001 == 1 {\n\n *flags |= 0b00010000;\n\n } else {\n\n *flags &= 0b11101111;\n\n }\n\n *reg = (*reg >> 1) + (tmp << 7);\n\n if *reg == 0 {\n\n *flags |= 0b10000000;\n\n } else {\n\n *flags &= 0b01111111;\n\n }\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/rr.rs", "rank": 58, "score": 217849.80066173268 }, { "content": "pub fn rlc_reg(reg: &mut u8, flags: &mut u8) {\n\n *flags = 0;\n\n\n\n *reg = reg.rotate_left(1);\n\n\n\n if *reg & 0x01 == 1 {\n\n *flags |= 0b00010000;\n\n }\n\n\n\n if *reg == 0 {\n\n *flags |= 0b10000000;\n\n }\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/rl.rs", "rank": 59, "score": 217849.80066173262 }, { "content": "pub fn rl_reg(reg: &mut u8, flags: &mut u8) {\n\n let tmp = (*flags & 0b00010000) >> 4;\n\n *flags &= 0b10010000;\n\n if *reg & 0b10000000 != 0 {\n\n *flags |= 0b00010000;\n\n } else {\n\n *flags &= 0b11101111;\n\n }\n\n *reg = (*reg << 1) + tmp;\n\n if *reg == 0 {\n\n *flags |= 0b10000000;\n\n } else {\n\n *flags &= 0b01111111;\n\n }\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/rl.rs", "rank": 60, "score": 217849.80066173262 }, { "content": "pub fn sla_reg(reg: &mut u8, flags: &mut u8) {\n\n *flags &= 0b10010000;\n\n if *reg & 0b10000000 != 0 {\n\n *flags |= 0b00010000;\n\n } else {\n\n *flags &= 0b11101111;\n\n }\n\n *reg = *reg << 1;\n\n if *reg == 0 {\n\n *flags |= 0b10000000;\n\n } else {\n\n *flags &= 0b01111111;\n\n }\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/sl.rs", "rank": 61, "score": 217849.80066173262 }, { "content": "pub fn srl_reg(reg: &mut u8, flags: &mut u8) {\n\n *flags &= 0b10010000;\n\n if *reg & 0b00000001 == 1 {\n\n *flags |= 0b00010000;\n\n } else {\n\n *flags &= 0b11101111;\n\n }\n\n *reg = *reg >> 1;\n\n if *reg == 0 {\n\n *flags |= 0b10000000;\n\n } else {\n\n *flags &= 0b01111111;\n\n }\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/sr.rs", "rank": 62, "score": 217849.80066173262 }, { "content": "pub fn sra_reg(reg: &mut u8, flags: &mut u8) {\n\n *flags &= 0b10010000;\n\n if *reg & 0b00000001 == 1 {\n\n *flags |= 0b00010000;\n\n } else {\n\n *flags &= 0b11101111;\n\n }\n\n *reg = (*reg >> 1) + (*reg & 0b10000000);\n\n if *reg == 0 {\n\n *flags |= 0b10000000;\n\n } else {\n\n *flags &= 0b01111111;\n\n }\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/sr.rs", "rank": 63, "score": 217849.80066173262 }, { "content": "pub fn rrc_reg(reg: &mut u8, flags: &mut u8) {\n\n let tmp = *reg & 0b00000001;\n\n *flags &= 0b10010000;\n\n if tmp == 1 {\n\n *flags |= 0b00010000;\n\n } else {\n\n *flags &= 0b11101111;\n\n }\n\n *reg = (*reg >> 1) | (tmp << 7);\n\n if *reg == 0 {\n\n *flags |= 0b10000000;\n\n } else {\n\n *flags &= 0b01111111;\n\n }\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/rr.rs", "rank": 64, "score": 217849.80066173262 }, { "content": "pub fn jp_c(val: u16, flags: u8, pc: &mut u16) -> bool {\n\n if flags & 0b00010000 != 0 {\n\n *pc = val;\n\n return true;\n\n }\n\n return false;\n\n}\n\n\n\n// JR n\n\n\n", "file_path": "src/corroboy/cpu/ops/jump.rs", "rank": 65, "score": 217041.0311959196 }, { "content": "pub fn jp_z(val: u16, flags: u8, pc: &mut u16) -> bool {\n\n if flags & 0b10000000 != 0 {\n\n *pc = val;\n\n return true;\n\n }\n\n return false;\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/jump.rs", "rank": 66, "score": 217041.0311959196 }, { "content": "pub fn jp_nc(val: u16, flags: u8, pc: &mut u16) -> bool {\n\n if flags & 0b00010000 == 0 {\n\n *pc = val;\n\n return true;\n\n }\n\n return false;\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/jump.rs", "rank": 67, "score": 213683.63251452413 }, { "content": "pub fn jp_nz(val: u16, flags: u8, pc: &mut u16) -> bool {\n\n if flags & 0b10000000 == 0 {\n\n *pc = val;\n\n return true;\n\n }\n\n return false;\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/jump.rs", "rank": 68, "score": 213683.63251452413 }, { "content": "#[inline(always)]\n\npub fn cp(val: u8, a: u8, flags: &mut u8) {\n\n let tmp = a - val;\n\n if tmp == 0 {\n\n *flags |= 0b10000000;\n\n } else {\n\n *flags &= 0b01111111;\n\n }\n\n // Set n flag\n\n *flags = *flags | 0b01000000;\n\n if a < val {\n\n *flags |= 0b00010000;\n\n } else {\n\n *flags &= 0b11101111;\n\n }\n\n if (a & 0b00001111) >= (val & 0b00001111) {\n\n *flags &= 0b11011111;\n\n } else {\n\n *flags |= 0b00100000;\n\n }\n\n}\n", "file_path": "src/corroboy/cpu/ops/cp.rs", "rank": 69, "score": 208833.27513201706 }, { "content": "pub fn ei(ime: &mut u8) {\n\n *ime = 1;\n\n}\n", "file_path": "src/corroboy/cpu/ops/misc.rs", "rank": 70, "score": 207300.8083096735 }, { "content": "pub fn ccf(flags: &mut u8) {\n\n if *flags & 0b00010000 != 0 {\n\n *flags &= 0b11101111;\n\n } else {\n\n *flags |= 0b00010000;\n\n }\n\n //if *flags & 0b00100000 != 0 { *flags &= 0b11011111; }\n\n //else { *flags |= 0b00100000; }\n\n *flags &= 0b10010000;\n\n}\n\n\n\n// SCF\n\n// set carry flag\n\n\n", "file_path": "src/corroboy/cpu/ops/misc.rs", "rank": 71, "score": 207300.8083096735 }, { "content": "pub fn stop(halt: &mut u8) {\n\n *halt = 1;\n\n}\n\n\n\n// Disable and enable interrupts\n\n\n", "file_path": "src/corroboy/cpu/ops/misc.rs", "rank": 72, "score": 207300.8083096735 }, { "content": "pub fn scf(flags: &mut u8) {\n\n *flags &= 0b10010000;\n\n *flags |= 0b00010000;\n\n}\n\n\n\n// HALT - power down cpu until an interrupt occurs.\n\n\n", "file_path": "src/corroboy/cpu/ops/misc.rs", "rank": 73, "score": 207300.8083096735 }, { "content": "pub fn di(ime: &mut u8) {\n\n *ime = 0;\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/misc.rs", "rank": 74, "score": 207300.8083096735 }, { "content": "pub fn halt(halt: &mut u8) {\n\n *halt = 1;\n\n}\n\n\n\n// This should halt the GPU and CPU but currently only halts the CPU\n\n\n", "file_path": "src/corroboy/cpu/ops/misc.rs", "rank": 75, "score": 207300.8083096735 }, { "content": "pub fn ld_reg(val: u8, reg: &mut u8) {\n\n *reg = val;\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/ld.rs", "rank": 76, "score": 206390.729566307 }, { "content": "#[inline(always)]\n\npub fn set_reg(reg: &mut u8, bit: u8) {\n\n *reg |= 1 << bit;\n\n}\n\n\n\n// SET n,(HL)\n\n\n", "file_path": "src/corroboy/cpu/ops/set.rs", "rank": 77, "score": 206390.729566307 }, { "content": "#[inline(always)]\n\npub fn res_reg(reg: &mut u8, bit: u8) {\n\n *reg &= 0b11111111 - (1 << bit);\n\n}\n\n\n\n// RES n,HL\n\n\n", "file_path": "src/corroboy/cpu/ops/res.rs", "rank": 78, "score": 206390.729566307 }, { "content": "#[inline(always)]\n\npub fn bit(val: u8, bit: u8, flags: &mut u8) {\n\n *flags &= 0b00110000;\n\n *flags |= 0b00100000;\n\n if val & (1 << bit) == 0 {\n\n *flags |= 0b10000000;\n\n }\n\n}\n", "file_path": "src/corroboy/cpu/ops/bit.rs", "rank": 79, "score": 205968.51830300968 }, { "content": "pub fn add_sp_param(val: u8, sp: &mut u16, flags: &mut u8) {\n\n let old_sp = *sp;\n\n let val_i16 = val as i8 as i16;\n\n *sp = (*sp as i16 + val_i16) as u16;\n\n\n\n *flags = 0;\n\n\n\n if (val_i16 as u16 ^ old_sp ^ *sp) & 0x10 != 0 {\n\n *flags |= 0b00100000;\n\n }\n\n if (val_i16 as u16 ^ old_sp ^ *sp) & 0x100 != 0 {\n\n *flags |= 0b00010000;\n\n }\n\n}\n", "file_path": "src/corroboy/cpu/ops/add.rs", "rank": 80, "score": 202846.47258301062 }, { "content": "pub fn ldhl_sp_n(val: u8, sp: u16, hl: &mut u16, flags: &mut u8) {\n\n let val_i16 = val as i8 as i16;\n\n\n\n *flags = 0;\n\n *hl = (sp as i16 + val_i16) as u16;\n\n\n\n if (val_i16 as u16 ^ sp ^ *hl) & 0x10 != 0 {\n\n *flags |= 0b00100000;\n\n }\n\n if (val_i16 as u16 ^ sp ^ *hl) & 0x100 != 0 {\n\n *flags |= 0b00010000;\n\n }\n\n}\n", "file_path": "src/corroboy/cpu/ops/ld.rs", "rank": 81, "score": 194432.88426284713 }, { "content": "pub fn jr(val: u8, pc: &mut u16) {\n\n *pc = (*pc as i16 + ((val as i8) as i16)) as u16;\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/jump.rs", "rank": 82, "score": 190969.20476535865 }, { "content": "pub fn add_16bit(val: u16, reg: &mut u16, flags: &mut u8) {\n\n let old_reg = *reg;\n\n *reg += val;\n\n *flags &= 0b10111111;\n\n if old_reg > *reg {\n\n *flags |= 0b00010000;\n\n } else {\n\n *flags &= 0b11101111;\n\n }\n\n if (old_reg & 0b0000111111111111) + (val & 0b0000111111111111) >= 0b0001000000000000 {\n\n *flags |= 0b00100000;\n\n } else {\n\n *flags &= 0b11011111;\n\n }\n\n}\n\n\n\n// ADD A, n\n\n\n", "file_path": "src/corroboy/cpu/ops/add.rs", "rank": 83, "score": 186809.7116191442 }, { "content": "pub fn inc_16bit(reg: &mut u16) {\n\n *reg += 1;\n\n}\n", "file_path": "src/corroboy/cpu/ops/inc.rs", "rank": 85, "score": 151557.0328276382 }, { "content": "pub fn dec_16bit(reg: &mut u16) {\n\n *reg -= 1;\n\n}\n", "file_path": "src/corroboy/cpu/ops/dec.rs", "rank": 86, "score": 151557.0328276382 }, { "content": "pub fn jp(val: u16, pc: &mut u16) {\n\n *pc = val;\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/jump.rs", "rank": 87, "score": 142083.66128628113 }, { "content": "pub fn ld_reg_16bit(val: u16, reg: &mut u16) {\n\n *reg = val;\n\n}\n\n\n", "file_path": "src/corroboy/cpu/ops/ld.rs", "rank": 88, "score": 137500.4755655905 }, { "content": "#[test]\n\npub fn timer_11() {\n\n let mut timer = Timer::new();\n\n let mut mem = Mmu::new(None);\n\n\n\n mem.set_mem_u8(0xFF07, 0b00000111);\n\n\n\n for i in 0..(64 * 258) {\n\n assert_eq!(((i / 64) % 256) as u8, mem.get_mem_u8(0xFF05));\n\n timer.update(&mut mem);\n\n }\n\n\n\n assert_eq!(mem.get_mem_u8(0xFF0F) & 0b00000100, 0b00000100);\n\n}\n", "file_path": "src/corroboy/timer/tests.rs", "rank": 89, "score": 119351.68104894187 }, { "content": "#[test]\n\npub fn timer_01() {\n\n let mut timer = Timer::new();\n\n let mut mem = Mmu::new(None);\n\n\n\n mem.set_mem_u8(0xFF07, 0b00000101);\n\n\n\n for i in 0..(4 * 258) {\n\n assert_eq!(((i / 4) % 256) as u8, mem.get_mem_u8(0xFF05));\n\n timer.update(&mut mem);\n\n }\n\n\n\n assert_eq!(mem.get_mem_u8(0xFF0F) & 0b00000100, 0b00000100);\n\n}\n\n\n", "file_path": "src/corroboy/timer/tests.rs", "rank": 90, "score": 119351.68104894187 }, { "content": "#[test]\n\npub fn timer_00() {\n\n let mut timer = Timer::new();\n\n let mut mem = Mmu::new(None);\n\n\n\n mem.set_mem_u8(0xFF07, 0b00000100);\n\n\n\n for i in 0..(256 * 258) {\n\n assert_eq!(((i / 256) % 256) as u8, mem.get_mem_u8(0xFF05));\n\n timer.update(&mut mem);\n\n }\n\n\n\n assert_eq!(mem.get_mem_u8(0xFF0F) & 0b00000100, 0b00000100);\n\n}\n\n\n", "file_path": "src/corroboy/timer/tests.rs", "rank": 91, "score": 119351.68104894187 }, { "content": "#[test]\n\npub fn timer_10() {\n\n let mut timer = Timer::new();\n\n let mut mem = Mmu::new(None);\n\n\n\n mem.set_mem_u8(0xFF07, 0b00000110);\n\n\n\n for i in 0..(16 * 258) {\n\n assert_eq!(((i / 16) % 256) as u8, mem.get_mem_u8(0xFF05));\n\n timer.update(&mut mem);\n\n }\n\n\n\n assert_eq!(mem.get_mem_u8(0xFF0F) & 0b00000100, 0b00000100);\n\n}\n\n\n", "file_path": "src/corroboy/timer/tests.rs", "rank": 92, "score": 119351.68104894187 }, { "content": "#[test]\n\npub fn test_joypad() {\n\n let mut joypad = Joypad::new();\n\n let mut mem = Mmu::new(None);\n\n\n\n joypad.press_input(Button::Keyboard(Key::Up));\n\n joypad.press_input(Button::Keyboard(Key::Down));\n\n\n\n mem.set_mem_u8(0xFF00, 0b00010000);\n\n joypad.update(&mut mem);\n\n assert_eq!(mem.get_mem_u8(0xFF00), 0b11011111);\n\n\n\n mem.set_mem_u8(0xFF00, 0b00100000);\n\n joypad.update(&mut mem);\n\n assert_eq!(mem.get_mem_u8(0xFF00), 0b11100011);\n\n\n\n joypad.release_input(Button::Keyboard(Key::Up));\n\n joypad.release_input(Button::Keyboard(Key::Down));\n\n\n\n mem.set_mem_u8(0xFF00, 0b00100000);\n\n joypad.update(&mut mem);\n", "file_path": "src/corroboy/joypad/tests.rs", "rank": 93, "score": 117071.64910497391 }, { "content": "#[test]\n\npub fn timer_div() {\n\n let mut timer = Timer::new();\n\n let mut mem = Mmu::new(None);\n\n\n\n for i in 0..10000 {\n\n assert_eq!((i / 64) as u8, mem.get_mem_u8(0xFF04));\n\n assert_eq!(0, mem.get_mem_u8(0xFF05));\n\n timer.update(&mut mem);\n\n }\n\n}\n\n\n", "file_path": "src/corroboy/timer/tests.rs", "rank": 94, "score": 117071.64910497391 }, { "content": "#[test]\n\nfn mmu_ram() {\n\n let mut derp = Mmu::new(None);\n\n derp.set_mem_u8(0xC000, 255);\n\n assert_eq!(derp.get_mem_u8(0xC000), 255);\n\n\n\n derp.set_mem_u8(0xC001, 10);\n\n assert_eq!(derp.get_mem_u8(0xC001), 10);\n\n\n\n derp.set_mem_u8(0xC002, 1);\n\n assert_eq!(derp.get_mem_u8(0xC002), 1);\n\n\n\n derp.set_mem_u16(0xC003, 0x1234);\n\n assert_eq!(derp.get_mem_u8(0xC003), 0x34);\n\n assert_eq!(derp.get_mem_u8(0xC004), 0x12);\n\n assert_eq!(derp.get_mem_u16(0xC003), 0x1234);\n\n}\n\n\n", "file_path": "src/corroboy/mmu/tests.rs", "rank": 95, "score": 103136.64849927172 }, { "content": "struct Timer {\n\n // When ticks gets to 1048576 (cpu ticks/s) it is reset and seconds is increased\n\n ticks: usize,\n\n seconds: u8,\n\n minutes: u8,\n\n hours: u8,\n\n days: u8,\n\n\n\n // See hardware documentation for what this register is for\n\n reg5: u8,\n\n\n\n // Whether or not the timer should update\n\n latch: bool,\n\n\n\n latch_seconds: u8,\n\n latch_minutes: u8,\n\n latch_hours: u8,\n\n latch_days: u8,\n\n\n\n // See hardware documentation for what this register is for\n", "file_path": "src/corroboy/mmu/cartridge/mbc3.rs", "rank": 96, "score": 102311.20205171907 }, { "content": "#[test]\n\nfn stack_functions() {\n\n let mut derp = Mmu::new(None);\n\n let mut sp: u16 = 0xDFFF;\n\n\n\n derp.push_u16(&mut sp, 0x3210);\n\n assert_eq!(derp.pop_u16(&mut sp), 0x3210);\n\n}\n", "file_path": "src/corroboy/mmu/tests.rs", "rank": 97, "score": 93493.79733241367 }, { "content": "pub trait Cartridge {\n\n fn read(&self, location: usize) -> u8;\n\n fn write(&mut self, location: usize, val: u8);\n\n fn save_ram(&mut self);\n\n // By default there is no timer so do nothing\n\n fn update_timer(&mut self, _ticks: usize) {}\n\n}\n", "file_path": "src/corroboy/mmu/cartridge/mod.rs", "rank": 98, "score": 92672.21380688847 }, { "content": "#[test]\n\nfn test_mbc1() {\n\n // Fill cart data with 0 - 255\n\n let mut data: Vec<u8> = Vec::with_capacity(1024 * 32);\n\n for j in 0..8 {\n\n for _i in 0..16384 {\n\n data.push(j as u8);\n\n }\n\n }\n\n\n\n // Create cart with 32768B of ram, no battery and no save file\n\n let mut cart = Mbc1::new(data, 32768, false, &None);\n\n\n\n // Test that the data reads correctly\n\n for i in 0..16384 {\n\n assert_eq!(cart.read(i), 0);\n\n }\n\n for j in 1..8 {\n\n cart.write(0x2000, j);\n\n for i in 0..16384 {\n\n assert_eq!(cart.read(0x4000 + i), j as u8);\n", "file_path": "src/corroboy/mmu/cartridge/mbc1.rs", "rank": 99, "score": 91225.98902955263 } ]
Rust
crates/holochain/tests/gossip_test.rs
zdb999/holochain
45dd3f827caea18f41f77486ca2c37149a18b4ca
use ::fixt::prelude::*; use hdk3::prelude::*; use holochain::conductor::{ api::{AppInterfaceApi, AppRequest, AppResponse, RealAppInterfaceApi}, dna_store::MockDnaStore, }; use holochain::core::ribosome::ZomeCallInvocation; use holochain::{ fixt::*, test_utils::{install_app, setup_app}, }; use holochain_types::app::InstalledCell; use holochain_types::cell::CellId; use holochain_types::dna::DnaDef; use holochain_types::dna::DnaFile; use holochain_types::test_utils::fake_agent_pubkey_1; use holochain_types::{observability, test_utils::fake_agent_pubkey_2}; use holochain_wasm_test_utils::TestWasm; use holochain_zome_types::ExternInput; use matches::assert_matches; use test_wasm_common::{AnchorInput, TestString}; #[tokio::test(threaded_scheduler)] async fn gossip_test() { observability::test_run().ok(); const NUM: usize = 1; let dna_file = DnaFile::new( DnaDef { name: "need_for_speed_test".to_string(), uuid: "ba1d046d-ce29-4778-914b-47e6010d2faf".to_string(), properties: SerializedBytes::try_from(()).unwrap(), zomes: vec![TestWasm::Anchor.into()].into(), }, vec![TestWasm::Anchor.into()], ) .await .unwrap(); let alice_agent_id = fake_agent_pubkey_1(); let alice_cell_id = CellId::new(dna_file.dna_hash().to_owned(), alice_agent_id.clone()); let alice_installed_cell = InstalledCell::new(alice_cell_id.clone(), "alice_handle".into()); let bob_agent_id = fake_agent_pubkey_2(); let bob_cell_id = CellId::new(dna_file.dna_hash().to_owned(), bob_agent_id.clone()); let bob_installed_cell = InstalledCell::new(bob_cell_id.clone(), "bob_handle".into()); let mut dna_store = MockDnaStore::new(); dna_store.expect_get().return_const(Some(dna_file.clone())); dna_store .expect_add_dnas::<Vec<_>>() .times(2) .return_const(()); dna_store .expect_add_entry_defs::<Vec<_>>() .times(2) .return_const(()); dna_store.expect_get_entry_def().return_const(None); let (_tmpdir, app_api, handle) = setup_app( vec![("alice app", vec![(alice_installed_cell, None)])], dna_store, ) .await; let anchor_invocation = |anchor: &str, cell_id, i: usize| { let anchor = AnchorInput(anchor.into(), i.to_string()); new_invocation(cell_id, "anchor", anchor) }; for i in 0..NUM { let invocation = anchor_invocation("alice", alice_cell_id.clone(), i).unwrap(); let response = call(&app_api, invocation).await; assert_matches!(response, AppResponse::ZomeCallInvocation(_)); } tokio::time::delay_for(std::time::Duration::from_secs(1)).await; let cell_data = vec![(bob_installed_cell, None)]; install_app("bob_app", cell_data, handle.clone()).await; tokio::time::delay_for(std::time::Duration::from_secs(1)).await; let invocation = new_invocation( bob_cell_id.clone(), "list_anchor_addresses", TestString("alice".into()), ) .unwrap(); let response = call(&app_api, invocation).await; match response { AppResponse::ZomeCallInvocation(r) => { let response: SerializedBytes = r.into_inner(); let hashes: EntryHashes = response.try_into().unwrap(); assert_eq!(hashes.0.len(), NUM); } _ => unreachable!(), } let shutdown = handle.take_shutdown_handle().await.unwrap(); handle.shutdown().await; shutdown.await.unwrap(); } async fn call(app_api: &RealAppInterfaceApi, invocation: ZomeCallInvocation) -> AppResponse { let request = AppRequest::ZomeCallInvocation(Box::new(invocation)); app_api.handle_app_request(request).await } fn new_invocation<P>( cell_id: CellId, func: &str, payload: P, ) -> Result<ZomeCallInvocation, SerializedBytesError> where P: TryInto<SerializedBytes, Error = SerializedBytesError>, { Ok(ZomeCallInvocation { cell_id: cell_id.clone(), zome_name: TestWasm::Anchor.into(), cap: Some(CapSecretFixturator::new(Unpredictable).next().unwrap()), fn_name: func.into(), payload: ExternInput::new(payload.try_into()?), provenance: cell_id.agent_pubkey().clone(), }) }
use ::fixt::prelude::*; use hdk3::prelude::*; use holochain::conductor::{ api::{AppInterfaceApi, AppRequest, AppResponse, RealAppInterfaceApi}, dna_store::MockDnaStore, }; use holochain::core::ribosome::ZomeCallInvocation; use holochain::{ fixt::*, test_utils::{install_app, setup_app}, }; use holochain_types::app::InstalledCell; use holochain_types::cell::CellId; use holochain_types::dna::DnaDef; use holochain_types::dna::DnaFile; use holochain_types::test_utils::fake_agent_pubkey_1; use holochain_types::{observability, test_utils::fake_agent_pubkey_2}; use holochain_wasm_test_utils::TestWasm; use holochain_zome_types::ExternInput; use matches::assert_matches; use test_wasm_common::{AnchorInput, TestString}; #[tokio::test(threaded_scheduler)] async fn gossip_test() { observability::test_run().ok(); const NUM: usize = 1; let dna_file = DnaFile::new( DnaDef { name: "need_for_speed_test".to_string(), uuid: "ba1d046d-ce29-4778-914b-47e6010d2faf".to_string(), properties: SerializedBytes::try_from(()).unwrap(), zomes: vec![TestWasm::Anchor.into()].into(), }, vec![TestWasm::Anchor.into()], ) .await .unwrap(); let alice_agent_id = fake_agent_pubkey_1(); let alice_cell_id = CellId::new(dna_file.dna_hash().to_owned(), alice_agent_id.clone()); let alice_installed_cell = InstalledCell::new(alice_cell_id.clone(), "alice_handle".into()); let bob_agent_id = fake_agent_pubkey_2(); let bob_cell_id = CellId::new(dna_file.dna_hash().to_owned(), bob_agent_id.clone()); let bob_installed_cell = InstalledCell::new(bob_cell_id.clone(), "bob_handle".into()); let mut dna_store = MockDnaStore::new(); dna_store.expect_get().return_const(Some(dna_file.clone())); dna_store .expect_add_dnas::<Vec<_>>() .times(2) .return_const(()); dna_store .expect_add_entry_defs::<Vec<_>>() .times(2) .return_const(()); dna_store.expect_get_entry_def().return_const(None); let (_tmpdir, app_api, handle) = setup_app( vec![("alice app", vec![(alice_installed_cell, None)])], dna_store, ) .await; let anchor_invocation = |anchor: &str, cell_id, i: usize| { let anchor = AnchorInput(anchor.into(), i.to_string()); new_invocation(cell_id, "anchor", anchor) }; for i in 0..NUM { let invocation = anchor_invocation("alice", alice_cell_id.clone(), i).unwrap(); let response = call(&app_api, invocation).await; assert_matches!(response, AppResponse::ZomeCallInvocation(_)); } tokio::time::delay_for(std::time::Duration::from_secs(1)).await; let cell_data = vec![(bob_installed_cell, None)]; install_app("bob_app", cell_data, handle.clone()).await; tokio::time::delay_for(std::time::Duration::from_secs(1)).await; let invocation = new_invocation( bob_cell_id.clone(), "list_anchor_addresses", TestString("alice".into()), ) .unwrap(); let response = call(&app_api, invocation).await; match response { AppResponse::ZomeCallInvocation(r) => { let response: SerializedBytes = r.into_inner(); let hashes: EntryHashes = response.try_into().unwrap(); assert_eq!(hashes.0.len(), NUM); } _ => unreachable!(), } let shutdown = handle.take_shutdown_handle().await.unwrap(); handle.shutdown().await; shutdown.await.unwrap(); } async fn call(app_api: &RealAppInterfaceApi, invocation: ZomeCallInvocation) -> AppResponse { let request = AppRequest::ZomeCallInvocation(Box::new(invocation)); app_api.handle_app_request(request).await } fn new_invocation<P>( cell_id: CellId, func: &str, payload: P, ) -> Result<ZomeCallInvocation, SerializedBytesError> where P: TryInto<Serialized
Bytes, Error = SerializedBytesError>, { Ok(ZomeCallInvocation { cell_id: cell_id.clone(), zome_name: TestWasm::Anchor.into(), cap: Some(CapSecretFixturator::new(Unpredictable).next().unwrap()), fn_name: func.into(), payload: ExternInput::new(payload.try_into()?), provenance: cell_id.agent_pubkey().clone(), }) }
function_block-function_prefixed
[ { "content": "/// Helper to create a zome invocation for tests\n\npub fn new_invocation<P, Z: Into<ZomeName>>(\n\n cell_id: &CellId,\n\n func: &str,\n\n payload: P,\n\n zome_name: Z,\n\n) -> Result<ZomeCallInvocation, SerializedBytesError>\n\nwhere\n\n P: TryInto<SerializedBytes, Error = SerializedBytesError>,\n\n{\n\n Ok(ZomeCallInvocation {\n\n cell_id: cell_id.clone(),\n\n zome_name: zome_name.into(),\n\n cap: Some(CapSecretFixturator::new(Unpredictable).next().unwrap()),\n\n fn_name: func.into(),\n\n payload: ExternInput::new(payload.try_into()?),\n\n provenance: cell_id.agent_pubkey().clone(),\n\n })\n\n}\n", "file_path": "crates/holochain/src/test_utils.rs", "rank": 0, "score": 352311.1981148878 }, { "content": "/// A fixture example dna for unit testing.\n\npub fn fake_dna_zomes(uuid: &str, zomes: Vec<(ZomeName, DnaWasm)>) -> DnaFile {\n\n let mut dna = DnaDef {\n\n name: \"test\".to_string(),\n\n properties: JsonProperties::new(serde_json::json!({\"p\": \"hi\"}))\n\n .try_into()\n\n .unwrap(),\n\n uuid: uuid.to_string(),\n\n zomes: Vec::new(),\n\n };\n\n tokio_safe_block_on::tokio_safe_block_forever_on(async move {\n\n let mut wasm_code = Vec::new();\n\n for (zome_name, wasm) in zomes {\n\n let wasm = crate::dna::wasm::DnaWasmHashed::from_content(wasm).await;\n\n let (wasm, wasm_hash) = wasm.into_inner();\n\n dna.zomes.push((zome_name, Zome { wasm_hash }));\n\n wasm_code.push(wasm);\n\n }\n\n DnaFile::new(dna, wasm_code).await\n\n })\n\n .unwrap()\n", "file_path": "crates/types/src/test_utils.rs", "rank": 1, "score": 325621.3025926933 }, { "content": "/// A fixture EntryHash for unit testing.\n\npub fn fake_entry_hash(name: u8) -> EntryHash {\n\n fake_holo_hash(name, hash_type::Entry::new())\n\n}\n\n\n", "file_path": "crates/zome_types/src/test_utils.rs", "rank": 2, "score": 300635.5788523882 }, { "content": "fn to_zome_name(zomes_to_invoke: ZomesToInvoke) -> AppValidationResult<ZomeName> {\n\n match zomes_to_invoke {\n\n ZomesToInvoke::All => Err(AppValidationError::LinkMultipleZomes),\n\n ZomesToInvoke::One(zn) => Ok(zn),\n\n }\n\n}\n\n\n\nasync fn validate_op(\n\n op: DhtOp,\n\n conductor_api: &impl CellConductorApiT,\n\n workspace: &mut AppValidationWorkspace,\n\n network: &HolochainP2pCell,\n\n) -> AppValidationOutcome<Outcome> {\n\n // Get the workspace for the validation calls\n\n let workspace_lock = workspace.validation_workspace();\n\n\n\n // Create the element\n\n let element = get_element(op)?;\n\n\n\n // Check for caps\n", "file_path": "crates/holochain/src/core/workflow/app_validation_workflow.rs", "rank": 3, "score": 296421.08930582175 }, { "content": "fn zome_id_to_zome_name(zome_id: ZomeId, dna_file: &DnaFile) -> AppValidationResult<ZomeName> {\n\n let zome_index = u8::from(zome_id) as usize;\n\n Ok(dna_file\n\n .dna()\n\n .zomes\n\n .get(zome_index)\n\n .ok_or_else(|| AppValidationError::ZomeId(zome_id))?\n\n .0\n\n .clone())\n\n}\n\n\n\n/// Either get the app entry type\n\n/// from this entry or from the dependency.\n\nasync fn get_app_entry_type(\n\n element: &Element,\n\n cascade: Cascade<'_>,\n\n) -> AppValidationOutcome<Option<AppEntryType>> {\n\n match element.header().entry_data() {\n\n Some((_, et)) => match et.clone() {\n\n EntryType::App(aet) => Ok(Some(aet)),\n", "file_path": "crates/holochain/src/core/workflow/app_validation_workflow.rs", "rank": 4, "score": 282855.99866151495 }, { "content": "fn get_zome_name(entry_type: &AppEntryType, dna_file: &DnaFile) -> AppValidationResult<ZomeName> {\n\n zome_id_to_zome_name(entry_type.zome_id(), dna_file)\n\n}\n\n\n", "file_path": "crates/holochain/src/core/workflow/app_validation_workflow.rs", "rank": 6, "score": 276361.69321135833 }, { "content": "/// A fixture DnaHash for unit testing.\n\npub fn fake_dna_hash(name: u8) -> DnaHash {\n\n fake_holo_hash(name, hash_type::Dna::new())\n\n}\n\n\n", "file_path": "crates/zome_types/src/test_utils.rs", "rank": 7, "score": 266428.2656480977 }, { "content": "/// A fixture HeaderHash for unit testing.\n\npub fn fake_header_hash(name: u8) -> HeaderHash {\n\n fake_holo_hash(name, hash_type::Header::new())\n\n}\n\n\n", "file_path": "crates/zome_types/src/test_utils.rs", "rank": 8, "score": 266428.2656480977 }, { "content": "/// A fixture DhtOpHash for unit testing.\n\npub fn fake_dht_op_hash(name: u8) -> DhtOpHash {\n\n fake_holo_hash(name, hash_type::DhtOp::new())\n\n}\n\n\n", "file_path": "crates/zome_types/src/test_utils.rs", "rank": 9, "score": 261383.66141826665 }, { "content": "/// Cancels a delete because this data is still needed\n\npub fn reintegrate_single_data<P: PrefixType>(op: DhtOpLight, element_store: &mut ElementBuf<P>) {\n\n tracing::debug!(\"reintegrate\");\n\n match op {\n\n DhtOpLight::StoreElement(header, maybe_entry, _) => {\n\n cancel_delete(header, maybe_entry, element_store);\n\n }\n\n DhtOpLight::StoreEntry(new_entry_header, entry, _) => {\n\n cancel_delete(new_entry_header, Some(entry), element_store);\n\n }\n\n DhtOpLight::RegisterAgentActivity(header, _) => {\n\n cancel_delete(header, None, element_store);\n\n }\n\n DhtOpLight::RegisterUpdatedBy(entry_update, _, _) => {\n\n cancel_delete(entry_update, None, element_store);\n\n }\n\n DhtOpLight::RegisterDeletedEntryHeader(element_delete, _) => {\n\n cancel_delete(element_delete, None, element_store);\n\n }\n\n DhtOpLight::RegisterDeletedBy(element_delete, _) => {\n\n cancel_delete(element_delete, None, element_store);\n\n }\n\n DhtOpLight::RegisterAddLink(link_add, _) => {\n\n cancel_delete(link_add, None, element_store);\n\n }\n\n DhtOpLight::RegisterRemoveLink(link_remove, _) => {\n\n cancel_delete(link_remove, None, element_store);\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/holochain/src/core/workflow/integrate_dht_ops_workflow/disintegrate.rs", "rank": 10, "score": 261275.84794909056 }, { "content": "/// Store a DhtOp's data in an element buf without dependency checks\n\npub fn disintegrate_single_data<P: PrefixType>(op: DhtOpLight, element_store: &mut ElementBuf<P>) {\n\n tracing::debug!(\"disintegrate\");\n\n match op {\n\n DhtOpLight::StoreElement(header, maybe_entry, _) => {\n\n delete_data(header, maybe_entry, element_store);\n\n }\n\n DhtOpLight::StoreEntry(new_entry_header, entry, _) => {\n\n delete_data(new_entry_header, Some(entry), element_store);\n\n }\n\n DhtOpLight::RegisterAgentActivity(header, _) => {\n\n delete_data(header, None, element_store);\n\n }\n\n DhtOpLight::RegisterUpdatedBy(entry_update, _, _) => {\n\n delete_data(entry_update, None, element_store);\n\n }\n\n DhtOpLight::RegisterDeletedEntryHeader(element_delete, _) => {\n\n delete_data(element_delete, None, element_store);\n\n }\n\n DhtOpLight::RegisterDeletedBy(element_delete, _) => {\n\n delete_data(element_delete, None, element_store);\n", "file_path": "crates/holochain/src/core/workflow/integrate_dht_ops_workflow/disintegrate.rs", "rank": 11, "score": 261275.84794909056 }, { "content": "/// Attempt to get an anchor by its hash.\n\n/// Returns None if the hash doesn't point to an anchor.\n\n/// We can't do anything fancy like ensure the anchor if not exists because we only have a hash.\n\npub fn get_anchor(anchor_address: holo_hash::EntryHash) -> Result<Option<Anchor>, HdkError> {\n\n Ok(match get!(anchor_address)?.and_then(|el| el.into()) {\n\n Some(Entry::App(eb)) => {\n\n let path = Path::try_from(SerializedBytes::from(eb))?;\n\n Some(Anchor::try_from(&path)?)\n\n }\n\n _ => None,\n\n })\n\n}\n\n\n", "file_path": "crates/hdk/src/hash_path/anchor.rs", "rank": 12, "score": 256518.07967587025 }, { "content": "pub fn spawn_output(holochain: &mut Child) {\n\n let stdout = holochain.stdout.take();\n\n let stderr = holochain.stderr.take();\n\n tokio::task::spawn(async move {\n\n if let Some(stdout) = stdout {\n\n let mut reader = BufReader::new(stdout).lines();\n\n while let Ok(Some(line)) = reader.next_line().await {\n\n trace!(\"holochain bin stdout: {}\", line);\n\n }\n\n }\n\n });\n\n tokio::task::spawn(async move {\n\n if let Some(stderr) = stderr {\n\n let mut reader = BufReader::new(stderr).lines();\n\n while let Ok(Some(line)) = reader.next_line().await {\n\n trace!(\"holochain bin stderr: {}\", line);\n\n }\n\n }\n\n });\n\n}\n\n\n\npub async fn check_started(holochain: &mut Child) {\n\n let started = tokio::time::timeout(std::time::Duration::from_secs(1), holochain).await;\n\n if let Ok(status) = started {\n\n panic!(\"Holochain failed to start. status: {:?}\", status);\n\n }\n\n}\n\n\n", "file_path": "crates/holochain/tests/websocket.rs", "rank": 13, "score": 254847.80715007568 }, { "content": "#[hdk_extern]\n\nfn anchor(input: AnchorInput) -> ExternResult<EntryHash> {\n\n hdk3::prelude::anchor(input.0, input.1)\n\n}\n\n\n", "file_path": "crates/test_utils/wasm/wasm_workspace/anchor/src/lib.rs", "rank": 14, "score": 253133.9631135164 }, { "content": "/// Simple string interface to simple string based paths.\n\n/// a.k.a \"the anchor pattern\" that predates paths by a few years.\n\npub fn anchor(anchor_type: String, anchor_text: String) -> Result<holo_hash::EntryHash, HdkError> {\n\n let path: Path = (&Anchor {\n\n anchor_type,\n\n anchor_text: Some(anchor_text),\n\n })\n\n .into();\n\n path.ensure()?;\n\n Ok(path.hash()?)\n\n}\n\n\n", "file_path": "crates/hdk/src/hash_path/anchor.rs", "rank": 15, "score": 252222.28363235173 }, { "content": "fn append_location(mut base: Vec<u8>) -> Vec<u8> {\n\n let mut loc_bytes = holo_dht_location_bytes(&base);\n\n base.append(&mut loc_bytes);\n\n base\n\n}\n\n\n\nfixturator!(\n\n AgentPubKey;\n\n curve Empty AgentPubKey::from_raw_bytes(ThirtySixHashBytesFixturator::new_indexed(Empty, self.0.index).next().unwrap());\n\n curve Unpredictable AgentPubKey::from_raw_bytes(ThirtySixHashBytesFixturator::new_indexed(Unpredictable, self.0.index).next().unwrap());\n\n curve Predictable {\n\n // these agent keys match what the mock keystore spits out for the first two agents\n\n // don't mess with this unless you also update the keystore!!!\n\n let agents = vec![\n\n AgentPubKey::try_from(\"uhCAkmrkoAHPVf_eufG7eC5fm6QKrW5pPMoktvG5LOC0SnJ4vV1Uv\")\n\n .unwrap(),\n\n AgentPubKey::try_from(\"uhCAke1j8Z2a-_min0h0pGuEMcYlo_V1l1mt9OtBuywKmHlg4L_R-\")\n\n .unwrap(),\n\n ];\n\n agents[self.0.index % agents.len()].clone()\n", "file_path": "crates/holo_hash/src/fixt.rs", "rank": 16, "score": 252011.65040457906 }, { "content": "fn fake_holo_hash<T: holo_hash::HashType>(name: u8, hash_type: T) -> HoloHash<T> {\n\n HoloHash::from_raw_bytes_and_type([name; 36].to_vec(), hash_type)\n\n}\n\n\n", "file_path": "crates/zome_types/src/test_utils.rs", "rank": 17, "score": 248938.46465826905 }, { "content": "#[hdk_extern]\n\nfn create_channel(name: ChannelName) -> ExternResult<EntryHash> {\n\n debug!(format!(\"channel name {:?}\", name))?;\n\n let path = channels_path();\n\n let channel = Channel::new(name.into());\n\n let channel_hash = hash_entry!(&channel)?;\n\n let sb: SerializedBytes = channel_hash.clone().try_into().unwrap();\n\n create_entry!(&channel)?;\n\n debug!(format!(\"sb in channel {:?}\", sb))?;\n\n create_link!(hash_entry!(&path)?, channel_hash.clone())?;\n\n Ok(channel_hash)\n\n}\n\n\n", "file_path": "crates/test_utils/wasm/wasm_workspace/ser_regression/src/lib.rs", "rank": 18, "score": 243644.62418748485 }, { "content": "/// A fixture example dna for unit testing.\n\npub fn fake_dna_file(uuid: &str) -> DnaFile {\n\n fake_dna_zomes(uuid, vec![(\"test\".into(), vec![].into())])\n\n}\n\n\n", "file_path": "crates/types/src/test_utils.rs", "rank": 19, "score": 242732.31732625258 }, { "content": "fn make_validate_invocation(\n\n zomes_to_invoke: ZomesToInvoke,\n\n element: Element,\n\n) -> ValidateInvocation {\n\n ValidateInvocation {\n\n zomes_to_invoke,\n\n element: Arc::new(element),\n\n validation_package: None,\n\n entry_def_id: None,\n\n }\n\n}\n\n\n\nfixturator!(\n\n ValidateInvocation;\n\n vanilla fn make_validate_invocation(ZomesToInvoke, Element);\n\n);\n\n\n\nfixturator!(\n\n ValidateCreateLinkInvocation;\n\n constructor fn new(ZomeName, CreateLink, Entry, Entry);\n\n);\n\n\n\nfixturator!(\n\n ValidateDeleteLinkInvocation;\n\n constructor fn new(ZomeName, DeleteLink);\n\n);\n\n\n", "file_path": "crates/holochain/src/fixt.rs", "rank": 20, "score": 242268.7257562282 }, { "content": "fn path(s: &str) -> ExternResult<EntryHash> {\n\n let path = Path::from(s);\n\n path.ensure()?;\n\n Ok(path.hash()?)\n\n}\n\n\n", "file_path": "crates/test_utils/wasm/wasm_workspace/query/src/lib.rs", "rank": 21, "score": 238046.09409589687 }, { "content": "fn path(s: &str) -> ExternResult<EntryHash> {\n\n let path = Path::from(s);\n\n path.ensure()?;\n\n Ok(path.hash()?)\n\n}\n\n\n", "file_path": "crates/test_utils/wasm/wasm_workspace/link/src/lib.rs", "rank": 22, "score": 238046.09409589687 }, { "content": "/// A fixture ZomeCallInvocationPayload for unit testing.\n\npub fn fake_zome_invocation_payload() -> ExternInput {\n\n ExternInput::try_from(SerializedBytes::try_from(()).unwrap()).unwrap()\n\n}\n", "file_path": "crates/zome_types/src/test_utils.rs", "rank": 23, "score": 237821.9482632078 }, { "content": "// All others must be valid\n\nfn others((hash, i, el): &(DhtOpHash, IntegratedDhtOpsValue, Element), line: u32) {\n\n let s = format!(\"\\nline:{}\\n{:?}\\n{:?}\\n{:?}\", line, hash, i, el);\n\n match &i.op {\n\n // Register agent activity will be invalid if the previous header is invalid\n\n // This is very hard to track in these tests and this op also doesn't\n\n // go through app validation so it's more productive to skip it\n\n DhtOpLight::RegisterAgentActivity(_, _) => (),\n\n _ => assert_eq!(i.validation_status, ValidationStatus::Valid, \"{}\", s),\n\n }\n\n}\n\n\n", "file_path": "crates/holochain/src/core/workflow/app_validation_workflow/tests.rs", "rank": 24, "score": 234183.769226769 }, { "content": "fn get_zome_info<'a>(\n\n entry_type: &AppEntryType,\n\n dna_file: &'a DnaFile,\n\n) -> AppValidationResult<&'a (ZomeName, Zome)> {\n\n let zome_index = u8::from(entry_type.zome_id()) as usize;\n\n Ok(dna_file\n\n .dna()\n\n .zomes\n\n .get(zome_index)\n\n .ok_or_else(|| AppValidationError::ZomeId(entry_type.zome_id()))?)\n\n}\n\n\n", "file_path": "crates/holochain/src/core/workflow/app_validation_workflow.rs", "rank": 25, "score": 227786.0092045967 }, { "content": "/// Returns every entry hash in a vector from the root of an anchor.\n\n/// Hashes are sorted in the same way that paths sort children.\n\npub fn list_anchor_type_addresses() -> Result<Vec<holo_hash::EntryHash>, HdkError> {\n\n let links = Path::from(ROOT)\n\n .children()?\n\n .into_inner()\n\n .into_iter()\n\n .map(|link| link.target)\n\n .collect();\n\n Ok(links)\n\n}\n\n\n", "file_path": "crates/hdk/src/hash_path/anchor.rs", "rank": 26, "score": 223232.92934110895 }, { "content": "/// Returns every entry hash in a vector from the second level of an anchor.\n\n/// Uses the string argument to build the path from the root.\n\n/// Hashes are sorted in the same way that paths sort children.\n\npub fn list_anchor_addresses(anchor_type: String) -> Result<Vec<holo_hash::EntryHash>, HdkError> {\n\n let path: Path = (&Anchor {\n\n anchor_type,\n\n anchor_text: None,\n\n })\n\n .into();\n\n path.ensure()?;\n\n let links = path\n\n .children()?\n\n .into_inner()\n\n .into_iter()\n\n .map(|link| link.target)\n\n .collect();\n\n Ok(links)\n\n}\n\n\n", "file_path": "crates/hdk/src/hash_path/anchor.rs", "rank": 27, "score": 222333.75925301382 }, { "content": "#[hdk_extern]\n\nfn get_anchor(address: EntryHash) -> ExternResult<MaybeAnchor> {\n\n Ok(MaybeAnchor(hdk3::prelude::get_anchor(address)?))\n\n}\n\n\n", "file_path": "crates/test_utils/wasm/wasm_workspace/anchor/src/lib.rs", "rank": 28, "score": 217076.8081117327 }, { "content": "pub fn dump_kv(reader: &Reader, name: &str, db: rkv::SingleStore) -> DatabaseResult<()> {\n\n dump_iter(name, db.iter_start(reader)?)\n\n}\n\n\n", "file_path": "crates/diagnostics/src/display.rs", "rank": 29, "score": 217015.397893972 }, { "content": "#[hdk_extern]\n\nfn list_anchor_type_addresses(_: ()) -> ExternResult<EntryHashes> {\n\n Ok(EntryHashes(hdk3::prelude::list_anchor_type_addresses()?))\n\n}\n\n\n", "file_path": "crates/test_utils/wasm/wasm_workspace/anchor/src/lib.rs", "rank": 30, "score": 215309.50419342224 }, { "content": "pub fn run_link_validation_callback<I: Invocation + 'static>(\n\n invocation: ValidateLinkInvocation<I>,\n\n ribosome: &impl RibosomeT,\n\n workspace_lock: CallZomeWorkspaceLock,\n\n network: HolochainP2pCell,\n\n) -> AppValidationResult<Outcome> {\n\n let access = ValidateLinkHostAccess::new(workspace_lock, network);\n\n let validate = ribosome.run_validate_link(access, invocation)?;\n\n match validate {\n\n ValidateLinkResult::Valid => Ok(Outcome::Accepted),\n\n ValidateLinkResult::Invalid(reason) => Ok(Outcome::Rejected(reason)),\n\n ValidateLinkResult::UnresolvedDependencies(hashes) => Ok(Outcome::AwaitingDeps(hashes)),\n\n }\n\n}\n\n\n\npub struct AppValidationWorkspace {\n\n pub integrated_dht_ops: IntegratedDhtOpsStore,\n\n pub integration_limbo: IntegrationLimboStore,\n\n pub validation_limbo: ValidationLimboStore,\n\n // Integrated data\n", "file_path": "crates/holochain/src/core/workflow/app_validation_workflow.rs", "rank": 31, "score": 212925.64638962384 }, { "content": "pub fn dump_kvi(reader: &Reader, name: &str, db: rkv::IntegerStore<u32>) -> DatabaseResult<()> {\n\n dump_iter(name, db.iter_start(reader)?)\n\n}\n\n\n\n// TODO:\n\n// pub fn dump_kvv(reader: &Reader, name: &str, db: rkv::MultiStore) -> DatabaseResult<()> {\n\n// dump_iter_multi(name, db.iter_start(reader)?)\n\n// }\n\n\n\npub struct SizeStats {\n\n count: usize,\n\n total: usize,\n\n mean: Option<f32>,\n\n variance: Option<f32>,\n\n}\n\n\n\nimpl SizeStats {\n\n pub fn new(items: Vec<usize>) -> Self {\n\n let count = items.iter().count();\n\n let total = items.iter().sum();\n", "file_path": "crates/diagnostics/src/display.rs", "rank": 32, "score": 212378.4985663062 }, { "content": "pub fn wasm_call_n(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"wasm_call_n\");\n\n\n\n for n in vec![\n\n // 1 byte\n\n 1, // 1 kb\n\n 1_000, // 1 mb\n\n 1_000_000,\n\n ] {\n\n group.throughput(Throughput::Bytes(n as _));\n\n\n\n group.bench_function(BenchmarkId::from_parameter(n), |b| {\n\n // bytes\n\n let bytes = test_wasm_common::TestBytes::from(vec![0; n]);\n\n let sb: SerializedBytes = bytes.try_into().unwrap();\n\n\n\n TOKIO_RUNTIME.lock().unwrap().enter(move || {\n\n let ha = HOST_ACCESS_FIXTURATOR.lock().unwrap().next().unwrap();\n\n\n\n b.iter(|| {\n", "file_path": "crates/holochain/benches/bench.rs", "rank": 33, "score": 212178.37538979476 }, { "content": "pub fn test_signal(s: &str) -> Signal {\n\n SystemSignal::Test(s.to_string()).into()\n\n}\n\n\n\nimpl_from! {\n\n SystemSignal => Signal, |s| { Self::System(s) },\n\n}\n", "file_path": "crates/holochain/src/core/signal.rs", "rank": 34, "score": 209737.32522834075 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn hash_path_anchor_path() {\n\n for (atype, text, path_string) in vec![\n\n (\"foo\", None, \"hdk3anchor.foo\"),\n\n (\"foo\", Some(\"bar\".to_string()), \"hdk3anchor.foo.bar\"),\n\n ] {\n\n assert_eq!(\n\n Path::from(path_string),\n\n (&Anchor {\n\n anchor_type: atype.to_string(),\n\n anchor_text: text,\n\n })\n\n .into(),\n\n );\n\n }\n\n}\n\n\n", "file_path": "crates/hdk/src/hash_path/anchor.rs", "rank": 35, "score": 209692.072755392 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn hash_path_anchor_from_path() {\n\n let path = Path::from(vec![\n\n Component::from(vec![\n\n 104, 0, 0, 0, 100, 0, 0, 0, 107, 0, 0, 0, 51, 0, 0, 0, 97, 0, 0, 0, 110, 0, 0, 0, 99,\n\n 0, 0, 0, 104, 0, 0, 0, 111, 0, 0, 0, 114, 0, 0, 0,\n\n ]),\n\n Component::from(vec![102, 0, 0, 0, 111, 0, 0, 0, 111, 0, 0, 0]),\n\n Component::from(vec![98, 0, 0, 0, 97, 0, 0, 0, 114, 0, 0, 0]),\n\n ]);\n\n\n\n assert_eq!(\n\n Anchor::try_from(&path).unwrap(),\n\n Anchor {\n\n anchor_type: \"foo\".into(),\n\n anchor_text: Some(\"bar\".into()),\n\n },\n\n );\n\n}\n", "file_path": "crates/hdk/src/hash_path/anchor.rs", "rank": 36, "score": 209692.072755392 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn hash_path_anchor_entry_def() {\n\n assert_eq!(Path::entry_def_id(), Anchor::entry_def_id(),);\n\n\n\n assert_eq!(Path::crdt_type(), Anchor::crdt_type(),);\n\n\n\n assert_eq!(Path::required_validations(), Anchor::required_validations(),);\n\n\n\n assert_eq!(Path::entry_visibility(), Anchor::entry_visibility(),);\n\n\n\n assert_eq!(Path::entry_def(), Anchor::entry_def(),);\n\n}\n\n\n", "file_path": "crates/hdk/src/hash_path/anchor.rs", "rank": 37, "score": 207791.3411722565 }, { "content": "fn put_data<P: PrefixType>(\n\n signature: Signature,\n\n header: Header,\n\n maybe_entry: Option<Entry>,\n\n element_store: &mut ElementBuf<P>,\n\n) -> DhtOpConvertResult<()> {\n\n let signed_header = SignedHeaderHashed::from_content_sync(SignedHeader(header, signature));\n\n let maybe_entry_hashed = match maybe_entry {\n\n Some(entry) => Some(EntryHashed::from_content_sync(entry)),\n\n None => None,\n\n };\n\n element_store.put(signed_header, maybe_entry_hashed)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/holochain/src/core/workflow/integrate_dht_ops_workflow.rs", "rank": 38, "score": 207523.99710289677 }, { "content": "fn get_header<P: PrefixType>(\n\n hash: HeaderHash,\n\n element_store: &ElementBuf<P>,\n\n) -> DhtOpConvertResult<Header> {\n\n Ok(element_store\n\n .get_header(&hash)?\n\n .ok_or_else(|| DhtOpConvertError::MissingData(hash.into()))?\n\n .into_header_and_signature()\n\n .0\n\n .into_content())\n\n}\n\n\n\n/// After writing an Element to our chain, we want to integrate the meta ops\n\n/// inline, so that they are immediately available in the authored metadata.\n\n/// NB: We skip integrating the element data, since it is already available in\n\n/// our source chain.\n\npub async fn integrate_to_authored<C: MetadataBufT<AuthoredPrefix>>(\n\n element: &Element,\n\n element_store: &ElementBuf<AuthoredPrefix>,\n\n meta_store: &mut C,\n\n) -> DhtOpConvertResult<()> {\n\n // Produce the light directly\n\n for op in produce_op_lights_from_elements(vec![element]).await? {\n\n // we don't integrate element data, because it is already in our vault.\n\n integrate_single_metadata(op, element_store, meta_store)?\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/holochain/src/core/workflow/integrate_dht_ops_workflow.rs", "rank": 39, "score": 207523.99710289677 }, { "content": "/// internal PARSE for holo hash REPR\n\npub fn holo_hash_decode(prefix: &[u8], s: &str) -> Result<Vec<u8>, HoloHashError> {\n\n if &s[..1] != \"u\" {\n\n return Err(HoloHashError::NoU);\n\n }\n\n let s = match base64::decode_config(&s[1..], base64::URL_SAFE_NO_PAD) {\n\n Err(_) => return Err(HoloHashError::BadBase64),\n\n Ok(s) => s,\n\n };\n\n if s.len() != 39 {\n\n return Err(HoloHashError::BadSize);\n\n }\n\n if &s[..3] != prefix {\n\n return Err(HoloHashError::BadPrefix);\n\n }\n\n let s = &s[3..];\n\n let loc_bytes = holo_dht_location_bytes(&s[..32]);\n\n let loc_bytes: &[u8] = &loc_bytes;\n\n if loc_bytes != &s[32..] {\n\n return Err(HoloHashError::BadChecksum);\n\n }\n\n Ok(s.to_vec())\n\n}\n\n\n", "file_path": "crates/holo_hash/src/encode.rs", "rank": 40, "score": 205590.16451004104 }, { "content": "fn delete_data<P: PrefixType>(\n\n header_hash: HeaderHash,\n\n entry_hash: Option<EntryHash>,\n\n element_store: &mut ElementBuf<P>,\n\n) {\n\n element_store.delete(header_hash, entry_hash);\n\n}\n\n\n", "file_path": "crates/holochain/src/core/workflow/integrate_dht_ops_workflow/disintegrate.rs", "rank": 41, "score": 205566.10296464778 }, { "content": "fn cancel_delete<P: PrefixType>(\n\n header_hash: HeaderHash,\n\n entry_hash: Option<EntryHash>,\n\n element_store: &mut ElementBuf<P>,\n\n) {\n\n element_store.cancel_delete(header_hash, entry_hash);\n\n}\n", "file_path": "crates/holochain/src/core/workflow/integrate_dht_ops_workflow/disintegrate.rs", "rank": 42, "score": 205566.10296464778 }, { "content": "fn integrate_data_and_meta<P: PrefixType>(\n\n iv: IntegrationLimboValue,\n\n op: DhtOp,\n\n element_store: &mut ElementBuf<P>,\n\n meta_store: &mut MetadataBuf<P>,\n\n) -> DhtOpConvertResult<Outcome> {\n\n integrate_single_data(op, element_store)?;\n\n integrate_single_metadata(iv.op.clone(), element_store, meta_store)?;\n\n let integrated = IntegratedDhtOpsValue {\n\n validation_status: iv.validation_status,\n\n op: iv.op,\n\n when_integrated: Timestamp::now(),\n\n };\n\n debug!(\"integrating\");\n\n Ok(Outcome::Integrated(integrated))\n\n}\n\n\n\n/// Check if we have the required dependencies held before integrating.\n\nasync fn op_dependencies_held(\n\n op: &DhtOp,\n", "file_path": "crates/holochain/src/core/workflow/integrate_dht_ops_workflow.rs", "rank": 43, "score": 205566.10296464778 }, { "content": "#[hdk_extern]\n\nfn hash(path_string: TestString) -> ExternResult<EntryHash> {\n\n Path::from(path_string.0).hash()\n\n}\n\n\n", "file_path": "crates/test_utils/wasm/wasm_workspace/hash_path/src/lib.rs", "rank": 44, "score": 204442.09222730127 }, { "content": "pub fn unreachable(\n\n _ribosome: Arc<impl RibosomeT>,\n\n _call_context: Arc<CallContext>,\n\n _input: UnreachableInput,\n\n) -> RibosomeResult<UnreachableOutput> {\n\n unreachable!();\n\n}\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/unreachable.rs", "rank": 45, "score": 203010.33285696863 }, { "content": "pub fn property(\n\n _ribosome: Arc<impl RibosomeT>,\n\n _call_context: Arc<CallContext>,\n\n _input: PropertyInput,\n\n) -> RibosomeResult<PropertyOutput> {\n\n unimplemented!();\n\n}\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/property.rs", "rank": 46, "score": 203001.40104557705 }, { "content": "pub fn integrate_single_metadata<C, P>(\n\n op: DhtOpLight,\n\n element_store: &ElementBuf<P>,\n\n meta_store: &mut C,\n\n) -> DhtOpConvertResult<()>\n\nwhere\n\n P: PrefixType,\n\n C: MetadataBufT<P>,\n\n{\n\n match op {\n\n DhtOpLight::StoreElement(hash, _, _) => {\n\n let header = get_header(hash, element_store)?;\n\n meta_store.register_element_header(&header)?;\n\n }\n\n DhtOpLight::StoreEntry(hash, _, _) => {\n\n let new_entry_header = get_header(hash, element_store)?.try_into()?;\n\n // Reference to headers\n\n meta_store.register_header(new_entry_header)?;\n\n }\n\n DhtOpLight::RegisterAgentActivity(hash, _) => {\n", "file_path": "crates/holochain/src/core/workflow/integrate_dht_ops_workflow.rs", "rank": 47, "score": 202377.02469390794 }, { "content": "/// A fixture AgentPubKey for unit testing.\n\npub fn fake_agent_pub_key(name: u8) -> AgentPubKey {\n\n fake_holo_hash(name, hash_type::Agent::new())\n\n}\n\n\n", "file_path": "crates/zome_types/src/test_utils.rs", "rank": 48, "score": 201990.77142353117 }, { "content": "pub fn disintegrate_single_metadata<C, P>(\n\n op: DhtOpLight,\n\n element_store: &ElementBuf<P>,\n\n meta_store: &mut C,\n\n) -> DhtOpConvertResult<()>\n\nwhere\n\n P: PrefixType,\n\n C: MetadataBufT<P>,\n\n{\n\n match op {\n\n DhtOpLight::StoreElement(hash, _, _) => {\n\n meta_store.deregister_element_header(hash)?;\n\n }\n\n DhtOpLight::StoreEntry(hash, _, _) => {\n\n let new_entry_header = get_header(hash, element_store)?.try_into()?;\n\n // Reference to headers\n\n meta_store.deregister_header(new_entry_header)?;\n\n }\n\n DhtOpLight::RegisterAgentActivity(hash, _) => {\n\n let header = get_header(hash, element_store)?;\n", "file_path": "crates/holochain/src/core/workflow/integrate_dht_ops_workflow/disintegrate.rs", "rank": 49, "score": 200473.255331826 }, { "content": "/// Store a DhtOp's data in an element buf\n\npub fn integrate_single_data<P: PrefixType>(\n\n op: DhtOp,\n\n element_store: &mut ElementBuf<P>,\n\n) -> DhtOpConvertResult<()> {\n\n {\n\n match op {\n\n DhtOp::StoreElement(signature, header, maybe_entry) => {\n\n put_data(signature, header, maybe_entry.map(|e| *e), element_store)?;\n\n }\n\n DhtOp::StoreEntry(signature, new_entry_header, entry) => {\n\n put_data(\n\n signature,\n\n new_entry_header.into(),\n\n Some(*entry),\n\n element_store,\n\n )?;\n\n }\n\n DhtOp::RegisterAgentActivity(signature, header) => {\n\n put_data(signature, header, None, element_store)?;\n\n }\n", "file_path": "crates/holochain/src/core/workflow/integrate_dht_ops_workflow.rs", "rank": 50, "score": 200473.255331826 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn hash_path_root() {\n\n assert_eq!(ROOT, \"hdk3anchor\");\n\n}\n\n\n", "file_path": "crates/hdk/src/hash_path/anchor.rs", "rank": 51, "score": 200161.33520644426 }, { "content": "fn get_element_delete<P: PrefixType>(\n\n header_hash: HeaderHash,\n\n op_name: String,\n\n cas: &ElementBuf<P>,\n\n) -> DhtOpConvertResult<(header::Delete, Signature)> {\n\n let (header, sig) = cas\n\n .get_header(&header_hash)?\n\n .ok_or_else(|| DhtOpConvertError::MissingData(header_hash.into()))?\n\n .into_header_and_signature();\n\n match header.into_content() {\n\n Header::Delete(u) => Ok((u, sig)),\n\n h => Err(DhtOpConvertError::HeaderMismatch(\n\n format!(\"{:?}\", h),\n\n op_name,\n\n )),\n\n }\n\n}\n\n\n\n#[instrument(skip(cas))]\n\nasync fn get_entry_hash_for_header(\n\n header_hash: &HeaderHash,\n\n cas: &ElementBuf,\n\n) -> DhtOpConvertResult<EntryHash> {\n\n debug!(%header_hash);\n\n let entry = cas\n\n .get_header(header_hash)?\n\n .and_then(|e| e.header().entry_data().map(|(hash, _)| hash.clone()));\n\n entry.ok_or_else(|| DhtOpConvertError::MissingEntryDataForHeader(header_hash.clone()))\n\n}\n", "file_path": "crates/holochain/src/core/workflow/produce_dht_ops_workflow/dht_op_light.rs", "rank": 52, "score": 200008.4332311897 }, { "content": "fn spawn_listener_loop(mut server: WebsocketListener) -> tokio::task::JoinHandle<()> {\n\n tokio::task::spawn(async move {\n\n while let Some(maybe_con) = server.next().await {\n\n let (_send, mut recv) = maybe_con.unwrap();\n\n tracing::info!(\n\n test = \"incoming connection\",\n\n remote_addr = %recv.remote_addr(),\n\n );\n\n while let Some(msg) = recv.next().await {\n\n match msg {\n\n WebsocketMessage::Close(close) => {\n\n tracing::error!(error = ?close);\n\n break;\n\n }\n\n WebsocketMessage::Signal(data) => {\n\n let msg: TestMessage = data.try_into().unwrap();\n\n tracing::info!(\n\n test = \"incoming signal\",\n\n data = %msg.0,\n\n );\n", "file_path": "crates/websocket/tests/integration.rs", "rank": 53, "score": 199235.71792775113 }, { "content": "pub fn hash_entry(\n\n _ribosome: Arc<impl RibosomeT>,\n\n _call_context: Arc<CallContext>,\n\n input: HashEntryInput,\n\n) -> RibosomeResult<HashEntryOutput> {\n\n let entry: Entry = input.into_inner();\n\n\n\n let entry_hash = tokio_safe_block_on::tokio_safe_block_forever_on(async move {\n\n holochain_types::entry::EntryHashed::from_content_sync(entry)\n\n })\n\n .into_hash();\n\n\n\n Ok(HashEntryOutput::new(entry_hash))\n\n}\n\n\n\n#[cfg(test)]\n\n#[cfg(feature = \"slow_tests\")]\n\npub mod wasm_test {\n\n use super::*;\n\n use crate::core::ribosome::host_fn::hash_entry::hash_entry;\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/hash_entry.rs", "rank": 54, "score": 198923.61511464493 }, { "content": "pub fn zome_info(\n\n ribosome: Arc<impl RibosomeT>,\n\n call_context: Arc<CallContext>,\n\n _input: ZomeInfoInput,\n\n) -> RibosomeResult<ZomeInfoOutput> {\n\n Ok(ZomeInfoOutput::new(ZomeInfo {\n\n dna_name: ribosome.dna_file().dna().name.clone(),\n\n zome_name: call_context.zome_name.clone(),\n\n dna_hash: ribosome.dna_file().dna_hash().clone(), // @TODO\n\n zome_id: ribosome.zome_name_to_id(&call_context.zome_name)?,\n\n properties: SerializedBytes::try_from(()).unwrap(), // @TODO\n\n // @todo\n\n // public_token: \"\".into(), // @TODO\n\n }))\n\n}\n\n\n\n#[cfg(test)]\n\n#[cfg(feature = \"slow_tests\")]\n\npub mod test {\n\n\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/zome_info.rs", "rank": 55, "score": 198845.50355352677 }, { "content": "fn parse_time(samples: &mut usize, value: &dyn std::fmt::Debug) {\n\n let v = format!(\"{:?}\", value);\n\n if v.ends_with(\"ns\") {\n\n if let Ok(v) = v.trim_end_matches(\"ns\").parse::<f64>() {\n\n *samples = v as usize;\n\n }\n\n } else if v.ends_with(\"µs\") {\n\n if let Ok(v) = v.trim_end_matches(\"µs\").parse::<f64>() {\n\n *samples = (v * 1000.0) as usize;\n\n }\n\n } else if v.ends_with(\"ms\") {\n\n if let Ok(v) = v.trim_end_matches(\"ms\").parse::<f64>() {\n\n *samples = (v * 1000000.0) as usize;\n\n }\n\n } else if v.ends_with('s') {\n\n if let Ok(v) = v.trim_end_matches('s').parse::<f64>() {\n\n *samples = (v * 1000000000.0) as usize;\n\n }\n\n }\n\n}\n", "file_path": "crates/observability/src/flames.rs", "rank": 56, "score": 196985.95727982448 }, { "content": "/// Convert a DhtOpLight into a DhtOp (render all the hashes to values)\n\n/// This only checks the ElementVault so can only be used with ops that you are\n\n/// an authority or author of.\n\npub fn light_to_op<P: PrefixType>(\n\n op: DhtOpLight,\n\n cas: &ElementBuf<P>,\n\n) -> DhtOpConvertResult<DhtOp> {\n\n let op_name = format!(\"{:?}\", op);\n\n match op {\n\n DhtOpLight::StoreElement(h, _, _) => {\n\n let (header, entry) = cas\n\n .get_element(&h)?\n\n .ok_or_else(|| DhtOpConvertError::MissingData(h.into()))?\n\n .into_inner();\n\n // TODO: Could use this signature? Is it the same?\n\n // Should we not be storing the signature in the DhtOpLight?\n\n let (header, sig) = header.into_header_and_signature();\n\n let entry = entry.into_option().map(Box::new);\n\n Ok(DhtOp::StoreElement(sig, header.into_content(), entry))\n\n }\n\n DhtOpLight::StoreEntry(h, _, _) => {\n\n let (header, entry) = cas\n\n .get_element(&h)?\n", "file_path": "crates/holochain/src/core/workflow/produce_dht_ops_workflow/dht_op_light.rs", "rank": 57, "score": 196830.26053306513 }, { "content": "/// helper fnction function to load a `Config` from a toml string.\n\nfn config_from_toml<'a, T>(toml: &'a str) -> ConductorResult<T>\n\nwhere\n\n T: Deserialize<'a>,\n\n{\n\n toml::from_str::<T>(toml).map_err(ConductorError::DeserializationError)\n\n}\n\n\n\nimpl ConductorConfig {\n\n /// create a ConductorConfig struct from a toml file path\n\n pub fn load_toml(path: &Path) -> ConductorResult<ConductorConfig> {\n\n let config_toml = std::fs::read_to_string(path).map_err(|err| match err {\n\n e @ std::io::Error { .. } if e.kind() == std::io::ErrorKind::NotFound => {\n\n ConductorError::ConfigMissing(path.into())\n\n }\n\n _ => err.into(),\n\n })?;\n\n config_from_toml(&config_toml)\n\n }\n\n}\n\n\n", "file_path": "crates/holochain/src/conductor/config.rs", "rank": 58, "score": 196013.0063891075 }, { "content": "pub fn entry_type_properties(\n\n _ribosome: Arc<impl RibosomeT>,\n\n _call_context: Arc<CallContext>,\n\n _input: EntryTypePropertiesInput,\n\n) -> RibosomeResult<EntryTypePropertiesOutput> {\n\n unimplemented!();\n\n}\n", "file_path": "crates/holochain/src/core/ribosome/host_fn/entry_type_properties.rs", "rank": 59, "score": 195913.32979566135 }, { "content": "/// A fixture example CellId for unit testing.\n\npub fn fake_cell_id(name: u8) -> CellId {\n\n (fake_dna_hash(name), fake_agent_pubkey_1()).into()\n\n}\n\n\n", "file_path": "crates/types/src/test_utils.rs", "rank": 60, "score": 193674.9602719282 }, { "content": "pub fn write_config(mut path: PathBuf, config: &ConductorConfig) -> PathBuf {\n\n path.push(\"conductor_config.toml\");\n\n std::fs::write(path.clone(), toml::to_string(&config).unwrap()).unwrap();\n\n path\n\n}\n\n\n\n#[instrument(skip(holochain, response))]\n\nasync fn check_timeout<T>(\n\n holochain: &mut Child,\n\n response: impl Future<Output = Result<T, std::io::Error>>,\n\n timeout_millis: u64,\n\n) -> T {\n\n match tokio::time::timeout(std::time::Duration::from_millis(timeout_millis), response).await {\n\n Ok(response) => response.unwrap(),\n\n Err(_) => {\n\n holochain.kill().unwrap();\n\n error!(\"Timeout\");\n\n panic!(\"Timed out on request after {}\", timeout_millis);\n\n }\n\n }\n", "file_path": "crates/holochain/tests/websocket.rs", "rank": 61, "score": 190292.02062352607 }, { "content": "fn assert_length(hash: &[u8]) {\n\n if hash.len() != HOLO_HASH_SERIALIZED_LEN {\n\n panic!(\n\n \"invalid holo_hash byte count, expected: {}, found: {}. {:?}\",\n\n HOLO_HASH_SERIALIZED_LEN,\n\n hash.len(),\n\n hash\n\n );\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::*;\n\n\n\n #[cfg(not(feature = \"string-encoding\"))]\n\n fn assert_type<T: HashType>(t: &str, h: HoloHash<T>) {\n\n assert_eq!(3_688_618_971, h.get_loc());\n\n assert_eq!(\n\n \"[219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219, 219]\",\n", "file_path": "crates/holo_hash/src/hash.rs", "rank": 62, "score": 187425.97224189402 }, { "content": "fn extract_app_interfaces(\n\n legacy_interfaces: Vec<legacy::InterfaceConfig>,\n\n) -> Vec<AppInterfaceConfig> {\n\n legacy_interfaces\n\n .into_iter()\n\n .filter(|c| !c.admin)\n\n .filter_map(|c: legacy::InterfaceConfig| {\n\n convert_interface_driver(c.driver).map(|driver| AppInterfaceConfig {\n\n driver,\n\n signal_subscriptions: HashMap::new(),\n\n })\n\n })\n\n .collect()\n\n}\n\n\n\n#[cfg(test)]\n\npub mod tests {\n\n\n\n use super::*;\n\n use crate::conductor::{handle::MockConductorHandleT, paths::EnvironmentRootPath, Conductor};\n", "file_path": "crates/holochain/src/conductor/compat.rs", "rank": 63, "score": 186053.5126640191 }, { "content": "fn handle_completed_task(\n\n on_death: &OnDeath,\n\n task_result: ManagedTaskResult,\n\n) -> Option<ManagedTaskAdd> {\n\n on_death(task_result)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use crate::conductor::error::ConductorError;\n\n use anyhow::Result;\n\n use holochain_types::observability;\n\n\n\n #[tokio::test]\n\n async fn spawn_and_handle_dying_task() -> Result<()> {\n\n observability::test_run().ok();\n\n let (mut send_task_handle, main_task) = spawn_task_manager();\n\n let handle = tokio::spawn(async {\n\n Err(ConductorError::Todo(\"This task gotta die\".to_string()).into())\n", "file_path": "crates/holochain/src/conductor/manager/mod.rs", "rank": 64, "score": 183855.55390632985 }, { "content": "#[instrument(skip(env, stop, trigger_integration, conductor_api, network))]\n\npub fn spawn_app_validation_consumer(\n\n env: EnvironmentWrite,\n\n mut stop: sync::broadcast::Receiver<()>,\n\n mut trigger_integration: TriggerSender,\n\n conductor_api: impl CellConductorApiT + 'static,\n\n network: HolochainP2pCell,\n\n) -> (TriggerSender, JoinHandle<ManagedTaskResult>) {\n\n let (tx, mut rx) = TriggerSender::new();\n\n let mut trigger_self = tx.clone();\n\n let handle = tokio::spawn(async move {\n\n loop {\n\n // Wait for next job\n\n if let Job::Shutdown = next_job_or_exit(&mut rx, &mut stop).await {\n\n tracing::warn!(\n\n \"Cell is shutting down: stopping app_validation_workflow queue consumer.\"\n\n );\n\n break;\n\n }\n\n\n\n // Run the workflow\n", "file_path": "crates/holochain/src/core/queue_consumer/app_validation_consumer.rs", "rank": 65, "score": 183615.23055985066 }, { "content": "#[test_case(1)]\n\n#[test_case(10)]\n\n#[test_case(100)]\n\n#[test_case(1000)]\n\n#[test_case(2000)]\n\n#[ignore]\n\nfn speed_test_all(n: usize) {\n\n observability::test_run().unwrap();\n\n holochain::conductor::tokio_runtime().block_on(speed_test(Some(n)));\n\n}\n\n\n\n#[instrument]\n\nasync fn speed_test(n: Option<usize>) -> TestEnvironment {\n\n let num = n.unwrap_or(DEFAULT_NUM);\n\n\n\n // ////////////\n\n // START DNA\n\n // ////////////\n\n\n\n let dna_file = DnaFile::new(\n\n DnaDef {\n\n name: \"need_for_speed_test\".to_string(),\n\n uuid: \"ba1d046d-ce29-4778-914b-47e6010d2faf\".to_string(),\n\n properties: SerializedBytes::try_from(()).unwrap(),\n\n zomes: vec![TestWasm::Anchor.into()].into(),\n\n },\n", "file_path": "crates/holochain/tests/speed_tests.rs", "rank": 66, "score": 182931.35779708065 }, { "content": "/// simple Zome fixture\n\npub fn fake_zome() -> Zome {\n\n Zome {\n\n wasm_hash: holo_hash::WasmHash::from_raw_bytes(vec![0; 36]),\n\n }\n\n}\n\n\n", "file_path": "crates/types/src/test_utils.rs", "rank": 67, "score": 182381.81615593593 }, { "content": "fn store_element(a: TestData) -> (Vec<Db>, Vec<Db>, &'static str) {\n\n let entry = match &a.any_header {\n\n Header::Create(_) | Header::Update(_) => Some(a.original_entry.clone().into()),\n\n _ => None,\n\n };\n\n let op = DhtOp::StoreElement(\n\n a.signature.clone(),\n\n a.any_header.clone().into(),\n\n entry.clone(),\n\n );\n\n let pre_state = vec![Db::IntQueue(op.clone())];\n\n // Add op data to pending\n\n let pre_state = add_op_to_judged(pre_state, &op);\n\n let mut expect = vec![\n\n Db::Integrated(op.clone()),\n\n Db::CasHeader(a.any_header.clone().into(), None),\n\n ];\n\n if let Some(_) = &entry {\n\n expect.push(Db::CasEntry(a.original_entry.clone(), None, None));\n\n }\n\n (pre_state, expect, \"store element\")\n\n}\n\n\n", "file_path": "crates/holochain/src/core/workflow/integrate_dht_ops_workflow/tests.rs", "rank": 68, "score": 180954.09470602495 }, { "content": "fn register_deleted_by(a: TestData) -> (Vec<Db>, Vec<Db>, &'static str) {\n\n let op = DhtOp::RegisterDeletedEntryHeader(a.signature.clone(), a.entry_delete.clone());\n\n let pre_state = vec![\n\n Db::IntQueue(op.clone()),\n\n Db::CasEntry(\n\n a.original_entry.clone(),\n\n Some(a.original_header.clone().into()),\n\n Some(a.signature.clone()),\n\n ),\n\n ];\n\n let pre_state = add_op_to_judged(pre_state, &op);\n\n let expect = vec![\n\n Db::IntQueueEmpty,\n\n Db::Integrated(op.clone()),\n\n Db::MetaDelete(\n\n a.original_header_hash.clone().into(),\n\n a.entry_delete.clone().into(),\n\n ),\n\n ];\n\n (pre_state, expect, \"register deleted by\")\n\n}\n\n\n", "file_path": "crates/holochain/src/core/workflow/integrate_dht_ops_workflow/tests.rs", "rank": 69, "score": 180954.09470602495 }, { "content": "fn store_entry(a: TestData) -> (Vec<Db>, Vec<Db>, &'static str) {\n\n let op = DhtOp::StoreEntry(\n\n a.signature.clone(),\n\n a.original_header.clone(),\n\n a.original_entry.clone().into(),\n\n );\n\n debug!(?a.original_header);\n\n let pre_state = vec![Db::IntQueue(op.clone())];\n\n let pre_state = add_op_to_judged(pre_state, &op);\n\n let expect = vec![\n\n Db::Integrated(op.clone()),\n\n Db::CasHeader(a.original_header.clone().into(), None),\n\n Db::CasEntry(a.original_entry.clone(), None, None),\n\n Db::MetaHeader(a.original_entry.clone(), a.original_header.clone().into()),\n\n ];\n\n (pre_state, expect, \"store entry\")\n\n}\n\n\n", "file_path": "crates/holochain/src/core/workflow/integrate_dht_ops_workflow/tests.rs", "rank": 70, "score": 180954.09470602495 }, { "content": "fn extract_app_type(element: &Element) -> Option<AppEntryType> {\n\n element\n\n .header()\n\n .entry_data()\n\n .and_then(|(_, entry_type)| match entry_type {\n\n EntryType::App(aet) => Some(aet.clone()),\n\n _ => None,\n\n })\n\n}\n\n\n\n/// Get the validation package based on\n\n/// the requirements set by the AppEntryType\n\nasync fn get_validation_package(\n\n element: &Element,\n\n entry_def: &Option<EntryDef>,\n\n mut network: HolochainP2pCell,\n\n) -> AppValidationResult<Option<ValidationPackage>> {\n\n match entry_def {\n\n Some(entry_def) => {\n\n Ok(match entry_def.required_validation_type {\n", "file_path": "crates/holochain/src/core/workflow/app_validation_workflow.rs", "rank": 71, "score": 179965.89848182516 }, { "content": "#[hdk_extern]\n\nfn list_anchor_addresses(anchor_type: TestString) -> ExternResult<EntryHashes> {\n\n Ok(EntryHashes(hdk3::prelude::list_anchor_addresses(\n\n anchor_type.0,\n\n )?))\n\n}\n\n\n", "file_path": "crates/test_utils/wasm/wasm_workspace/anchor/src/lib.rs", "rank": 72, "score": 179686.66174157636 }, { "content": "/// Old version of holochain that anchors was designed for had two part link tags but now link\n\n/// tags are a single array of bytes, so to get an external interface that is somewhat backwards\n\n/// compatible we need to rebuild the anchors from the paths serialized into the links and then\n\n/// return them.\n\npub fn list_anchor_tags(anchor_type: String) -> Result<Vec<String>, HdkError> {\n\n let path: Path = (&Anchor {\n\n anchor_type,\n\n anchor_text: None,\n\n })\n\n .into();\n\n path.ensure()?;\n\n let hopefully_anchor_tags: Result<Vec<String>, SerializedBytesError> = path\n\n .children()?\n\n .into_inner()\n\n .into_iter()\n\n .map(|link| match Path::try_from(&link.tag) {\n\n Ok(path) => match Anchor::try_from(&path) {\n\n Ok(anchor) => match anchor.anchor_text {\n\n Some(text) => Ok(text),\n\n None => Err(SerializedBytesError::FromBytes(\n\n \"missing anchor text\".into(),\n\n )),\n\n },\n\n Err(e) => Err(e),\n\n },\n\n Err(e) => Err(e),\n\n })\n\n .collect();\n\n let mut anchor_tags = hopefully_anchor_tags?;\n\n anchor_tags.sort();\n\n anchor_tags.dedup();\n\n Ok(anchor_tags)\n\n}\n\n\n", "file_path": "crates/hdk/src/hash_path/anchor.rs", "rank": 73, "score": 179618.56417459433 }, { "content": "fn register_agent_activity(a: TestData) -> (Vec<Db>, Vec<Db>, &'static str) {\n\n let op = DhtOp::RegisterAgentActivity(a.signature.clone(), a.dna_header.clone());\n\n let pre_state = vec![Db::IntQueue(op.clone())];\n\n let pre_state = add_op_to_judged(pre_state, &op);\n\n let expect = vec![\n\n Db::Integrated(op.clone()),\n\n Db::MetaActivity(a.dna_header.clone()),\n\n ];\n\n (pre_state, expect, \"register agent activity\")\n\n}\n\n\n", "file_path": "crates/holochain/src/core/workflow/integrate_dht_ops_workflow/tests.rs", "rank": 74, "score": 179290.17126226766 }, { "content": "fn register_replaced_by_for_entry(a: TestData) -> (Vec<Db>, Vec<Db>, &'static str) {\n\n let op = DhtOp::RegisterUpdatedBy(\n\n a.signature.clone(),\n\n a.entry_update_entry.clone(),\n\n Some(a.new_entry.clone().into()),\n\n );\n\n let pre_state = vec![\n\n Db::IntQueue(op.clone()),\n\n Db::CasEntry(\n\n a.original_entry.clone(),\n\n Some(a.original_header.clone().into()),\n\n Some(a.signature.clone()),\n\n ),\n\n ];\n\n let pre_state = add_op_to_judged(pre_state, &op);\n\n let expect = vec![\n\n Db::Integrated(op.clone()),\n\n Db::MetaUpdate(\n\n a.original_entry_hash.clone().into(),\n\n a.entry_update_entry.clone().into(),\n\n ),\n\n ];\n\n (pre_state, expect, \"register replaced by for entry\")\n\n}\n\n\n", "file_path": "crates/holochain/src/core/workflow/integrate_dht_ops_workflow/tests.rs", "rank": 75, "score": 179290.17126226766 }, { "content": "fn register_delete_link(a: TestData) -> (Vec<Db>, Vec<Db>, &'static str) {\n\n let op = DhtOp::RegisterRemoveLink(a.signature.clone(), a.link_remove.clone());\n\n let pre_state = vec![\n\n Db::IntQueue(op.clone()),\n\n Db::CasHeader(a.link_add.clone().into(), Some(a.signature.clone())),\n\n Db::CasEntry(\n\n a.original_entry.clone().into(),\n\n Some(a.original_header.clone().into()),\n\n Some(a.signature.clone()),\n\n ),\n\n Db::MetaLink(a.link_add.clone(), a.new_entry_hash.clone().into()),\n\n ];\n\n let pre_state = add_op_to_judged(pre_state, &op);\n\n let expect = vec![\n\n Db::Integrated(op.clone()),\n\n Db::MetaLinkEmpty(a.link_add.clone()),\n\n ];\n\n (pre_state, expect, \"register link remove\")\n\n}\n\n\n", "file_path": "crates/holochain/src/core/workflow/integrate_dht_ops_workflow/tests.rs", "rank": 76, "score": 179290.17126226766 }, { "content": "#[allow(dead_code)]\n\nfn register_replaced_by_for_header(a: TestData) -> (Vec<Db>, Vec<Db>, &'static str) {\n\n let op = DhtOp::RegisterUpdatedBy(\n\n a.signature.clone(),\n\n a.entry_update_header.clone(),\n\n Some(a.new_entry.clone().into()),\n\n );\n\n let pre_state = vec![\n\n Db::IntQueue(op.clone()),\n\n Db::CasHeader(a.original_header.clone().into(), Some(a.signature.clone())),\n\n ];\n\n let pre_state = add_op_to_judged(pre_state, &op);\n\n let expect = vec![\n\n Db::Integrated(op.clone()),\n\n Db::MetaUpdate(\n\n a.original_header_hash.clone().into(),\n\n a.entry_update_header.clone().into(),\n\n ),\n\n ];\n\n (pre_state, expect, \"register replaced by for header\")\n\n}\n\n\n", "file_path": "crates/holochain/src/core/workflow/integrate_dht_ops_workflow/tests.rs", "rank": 77, "score": 179290.17126226766 }, { "content": "fn register_add_link(a: TestData) -> (Vec<Db>, Vec<Db>, &'static str) {\n\n let op = DhtOp::RegisterAddLink(a.signature.clone(), a.link_add.clone());\n\n let pre_state = vec![\n\n Db::IntQueue(op.clone()),\n\n Db::CasEntry(\n\n a.original_entry.clone().into(),\n\n Some(a.original_header.clone().into()),\n\n Some(a.signature.clone()),\n\n ),\n\n ];\n\n let pre_state = add_op_to_judged(pre_state, &op);\n\n let expect = vec![\n\n Db::Integrated(op.clone()),\n\n Db::MetaLink(a.link_add.clone(), a.new_entry_hash.clone().into()),\n\n ];\n\n (pre_state, expect, \"register link add\")\n\n}\n\n\n", "file_path": "crates/holochain/src/core/workflow/integrate_dht_ops_workflow/tests.rs", "rank": 78, "score": 179290.17126226766 }, { "content": "fn register_deleted_header_by(a: TestData) -> (Vec<Db>, Vec<Db>, &'static str) {\n\n let op = DhtOp::RegisterDeletedBy(a.signature.clone(), a.entry_delete.clone());\n\n let pre_state = vec![\n\n Db::IntQueue(op.clone()),\n\n Db::CasEntry(\n\n a.original_entry.clone(),\n\n Some(a.original_header.clone().into()),\n\n Some(a.signature.clone()),\n\n ),\n\n ];\n\n let pre_state = add_op_to_judged(pre_state, &op);\n\n let expect = vec![\n\n Db::Integrated(op.clone()),\n\n Db::MetaDelete(\n\n a.original_header_hash.clone().into(),\n\n a.entry_delete.clone().into(),\n\n ),\n\n ];\n\n (pre_state, expect, \"register deleted header by\")\n\n}\n\n\n", "file_path": "crates/holochain/src/core/workflow/integrate_dht_ops_workflow/tests.rs", "rank": 79, "score": 179290.17126226766 }, { "content": "#[instrument(skip(env, workspace))]\n\nfn inspect_integrated(\n\n env: &EnvironmentWrite,\n\n workspace: &IncomingDhtOpsWorkspace,\n\n) -> Vec<(DhtOpHash, IntegratedDhtOpsValue, Element)> {\n\n debug!(\"start\");\n\n let element_buf = ElementBuf::vault(env.clone().into(), true).unwrap();\n\n let element_buf_reject = ElementBuf::rejected(env.clone().into()).unwrap();\n\n fresh_reader_test!(env, |r| {\n\n workspace\n\n .integrated_dht_ops\n\n .iter(&r)\n\n .unwrap()\n\n .map(|(k, i)| {\n\n let hash = DhtOpHash::from_raw_bytes(k.to_vec());\n\n let el = element_buf\n\n .get_element(&i.op.header_hash())\n\n .unwrap()\n\n .or_else(|| element_buf_reject.get_element(&i.op.header_hash()).unwrap())\n\n .expect(\"missing element\");\n\n debug!(?hash, ?i, op_in_int = ?el);\n\n Ok((hash, i, el))\n\n })\n\n .collect()\n\n .unwrap()\n\n })\n\n}\n", "file_path": "crates/holochain/src/core/workflow/app_validation_workflow/tests.rs", "rank": 80, "score": 179258.0073083892 }, { "content": "/// Check for capability headers\n\n/// and exit as we don't want to validate them\n\nfn check_for_caps(element: &Element) -> AppValidationOutcome<()> {\n\n match element.header().entry_type() {\n\n Some(EntryType::CapClaim) | Some(EntryType::CapGrant) => Outcome::accepted(),\n\n _ => Ok(()),\n\n }\n\n}\n\n\n\n/// Get the zome name from the app entry type\n\n/// or get all zome names.\n\nasync fn get_zomes_to_invoke(\n\n element: &Element,\n\n dna_file: &DnaFile,\n\n workspace: &mut AppValidationWorkspace,\n\n network: &HolochainP2pCell,\n\n) -> AppValidationOutcome<ZomesToInvoke> {\n\n let aet = {\n\n let cascade = workspace.full_cascade(network.clone());\n\n get_app_entry_type(element, cascade).await?\n\n };\n\n match aet {\n\n Some(aet) => Ok(ZomesToInvoke::One(get_zome_name(&aet, &dna_file)?)),\n\n None => match element.header() {\n\n Header::CreateLink(_) | Header::DeleteLink(_) => {\n\n get_link_zome(element, dna_file, workspace, network).await\n\n }\n\n _ => Ok(ZomesToInvoke::All),\n\n },\n\n }\n\n}\n\n\n", "file_path": "crates/holochain/src/core/workflow/app_validation_workflow.rs", "rank": 81, "score": 178814.66695065034 }, { "content": "fn add_op_to_judged(mut ps: Vec<Db>, op: &DhtOp) -> Vec<Db> {\n\n match op {\n\n DhtOp::StoreElement(s, h, e) => {\n\n ps.push(Db::PendingHeader(h.clone(), Some(s.clone())));\n\n if let Some(e) = e {\n\n ps.push(Db::PendingEntry(\n\n *e.clone(),\n\n Some(h.clone()),\n\n Some(s.clone()),\n\n ));\n\n }\n\n }\n\n DhtOp::StoreEntry(s, h, e) => {\n\n let h: Header = h.clone().try_into().unwrap();\n\n ps.push(Db::PendingHeader(h.clone(), Some(s.clone())));\n\n ps.push(Db::PendingEntry(\n\n *e.clone(),\n\n Some(h.clone()),\n\n Some(s.clone()),\n\n ));\n", "file_path": "crates/holochain/src/core/workflow/integrate_dht_ops_workflow/tests.rs", "rank": 82, "score": 177816.55063555075 }, { "content": "#[instrument(skip(env, workspace))]\n\nfn inspect_val_limbo(\n\n env: &EnvironmentWrite,\n\n workspace: &IncomingDhtOpsWorkspace,\n\n) -> Vec<(DhtOpHash, ValidationLimboValue, Option<Element>)> {\n\n debug!(\"start\");\n\n let element_buf = ElementBuf::pending(env.clone().into()).unwrap();\n\n fresh_reader_test!(env, |r| {\n\n workspace\n\n .validation_limbo\n\n .iter(&r)\n\n .unwrap()\n\n .map(|(k, i)| {\n\n let hash = DhtOpHash::from_raw_bytes(k.to_vec());\n\n let el = element_buf.get_element(&i.op.header_hash()).unwrap();\n\n debug!(?hash, ?i, op_in_val = ?el);\n\n Ok((hash, i, el))\n\n })\n\n .collect()\n\n .unwrap()\n\n })\n\n}\n\n\n", "file_path": "crates/holochain/src/core/workflow/app_validation_workflow/tests.rs", "rank": 83, "score": 177125.40674157758 }, { "content": "// These are the expected invalid ops\n\nfn expected_invalid_entry(\n\n (hash, i, el): &(DhtOpHash, IntegratedDhtOpsValue, Element),\n\n line: u32,\n\n invalid_header_hash: &HeaderHash,\n\n invalid_entry_hash: &AnyDhtHash,\n\n) -> bool {\n\n let s = format!(\"\\nline:{}\\n{:?}\\n{:?}\\n{:?}\", line, hash, i, el);\n\n match &i.op {\n\n // A Store entry that matches these hashes\n\n DhtOpLight::StoreEntry(hh, _, eh)\n\n if eh == invalid_entry_hash && hh == invalid_header_hash =>\n\n {\n\n assert_eq!(i.validation_status, ValidationStatus::Rejected, \"{}\", s)\n\n }\n\n // And the store element\n\n DhtOpLight::StoreElement(hh, _, _) if hh == invalid_header_hash => {\n\n assert_eq!(i.validation_status, ValidationStatus::Rejected, \"{}\", s);\n\n }\n\n _ => return false,\n\n }\n\n true\n\n}\n\n\n", "file_path": "crates/holochain/src/core/workflow/app_validation_workflow/tests.rs", "rank": 84, "score": 177125.40674157758 }, { "content": "fn run_validation_callback_inner(\n\n zomes_to_invoke: ZomesToInvoke,\n\n element: Arc<Element>,\n\n validation_package: Option<Arc<ValidationPackage>>,\n\n entry_def_id: Option<EntryDefId>,\n\n ribosome: &impl RibosomeT,\n\n workspace_lock: CallZomeWorkspaceLock,\n\n network: HolochainP2pCell,\n\n) -> AppValidationResult<Outcome> {\n\n let validate: ValidateResult = ribosome.run_validate(\n\n ValidateHostAccess::new(workspace_lock, network),\n\n ValidateInvocation {\n\n zomes_to_invoke,\n\n element,\n\n validation_package,\n\n entry_def_id,\n\n },\n\n )?;\n\n match validate {\n\n ValidateResult::Valid => Ok(Outcome::Accepted),\n\n ValidateResult::Invalid(reason) => Ok(Outcome::Rejected(reason)),\n\n ValidateResult::UnresolvedDependencies(hashes) => Ok(Outcome::AwaitingDeps(hashes)),\n\n }\n\n}\n\n\n", "file_path": "crates/holochain/src/core/workflow/app_validation_workflow.rs", "rank": 85, "score": 177125.40674157758 }, { "content": "// Now we expect an invalid link\n\nfn expected_invalid_link(\n\n (hash, i, el): &(DhtOpHash, IntegratedDhtOpsValue, Element),\n\n line: u32,\n\n invalid_link_hash: &HeaderHash,\n\n) -> bool {\n\n let s = format!(\"\\nline:{}\\n{:?}\\n{:?}\\n{:?}\", line, hash, i, el);\n\n match &i.op {\n\n // Invalid link\n\n DhtOpLight::RegisterAddLink(hh, _) if hh == invalid_link_hash => {\n\n assert_eq!(i.validation_status, ValidationStatus::Rejected, \"{}\", s)\n\n }\n\n // The store element for this CreateLink header is also rejected\n\n DhtOpLight::StoreElement(hh, _, _) if hh == invalid_link_hash => {\n\n assert_eq!(i.validation_status, ValidationStatus::Rejected, \"{}\", s)\n\n }\n\n _ => return false,\n\n }\n\n true\n\n}\n\n\n", "file_path": "crates/holochain/src/core/workflow/app_validation_workflow/tests.rs", "rank": 86, "score": 177125.40674157758 }, { "content": "// Link remove when not an author\n\nfn register_delete_link_missing_base(a: TestData) -> (Vec<Db>, Vec<Db>, &'static str) {\n\n let op = DhtOp::RegisterRemoveLink(a.signature.clone(), a.link_remove.clone());\n\n let pre_state = vec![Db::IntQueue(op.clone())];\n\n let pre_state = add_op_to_judged(pre_state, &op);\n\n let expect = vec![Db::IntegratedEmpty, Db::IntQueue(op.clone()), Db::MetaEmpty];\n\n (\n\n pre_state,\n\n expect,\n\n \"register remove link remove missing base\",\n\n )\n\n}\n\n\n\n// This runs the above tests\n\n#[tokio::test(threaded_scheduler)]\n\nasync fn test_ops_state() {\n\n observability::test_run().ok();\n\n let test_env = test_cell_env();\n\n let env = test_env.env();\n\n\n\n let tests = [\n", "file_path": "crates/holochain/src/core/workflow/integrate_dht_ops_workflow/tests.rs", "rank": 87, "score": 176088.14497256587 }, { "content": "// Now we're trying to remove an invalid link\n\nfn expected_invalid_remove_link(\n\n (hash, i, el): &(DhtOpHash, IntegratedDhtOpsValue, Element),\n\n line: u32,\n\n invalid_remove_hash: &HeaderHash,\n\n) -> bool {\n\n let s = format!(\"\\nline:{}\\n{:?}\\n{:?}\\n{:?}\", line, hash, i, el);\n\n\n\n // To make it simple we want to skip this op\n\n if let DhtOpLight::RegisterAgentActivity(_, _) = &i.op {\n\n return false;\n\n }\n\n\n\n // Get the hash of the entry that makes the link invalid\n\n let sb = SerializedBytes::try_from(&MaybeLinkable::NeverLinkable).unwrap();\n\n let invalid_link_entry_hash = EntryHash::with_data_sync(&Entry::app(sb).unwrap());\n\n\n\n // Link adds with these base / target are invalid\n\n if let Header::CreateLink(la) = el.header() {\n\n if invalid_link_entry_hash == la.base_address\n\n || invalid_link_entry_hash == la.target_address\n", "file_path": "crates/holochain/src/core/workflow/app_validation_workflow/tests.rs", "rank": 88, "score": 175054.31099673948 }, { "content": "#[hdk_extern]\n\nfn zome_info(_: ()) -> ExternResult<ZomeInfoOutput> {\n\n Ok(ZomeInfoOutput::new(zome_info!()?))\n\n}\n", "file_path": "crates/test_utils/wasm/wasm_workspace/zome_info/src/lib.rs", "rank": 89, "score": 174517.10461238178 }, { "content": "/// Get the element from the op or\n\n/// return accepted because we don't app\n\n/// validate this op.\n\nfn get_element(op: DhtOp) -> AppValidationOutcome<Element> {\n\n match op {\n\n DhtOp::RegisterAgentActivity(_, _) => Outcome::accepted(),\n\n DhtOp::StoreElement(s, h, e) => match h {\n\n Header::Delete(_) | Header::CreateLink(_) | Header::DeleteLink(_) => Ok(Element::new(\n\n SignedHeaderHashed::with_presigned(HeaderHashed::from_content_sync(h), s),\n\n None,\n\n )),\n\n Header::Update(_) | Header::Create(_) => Ok(Element::new(\n\n SignedHeaderHashed::with_presigned(HeaderHashed::from_content_sync(h), s),\n\n e.map(|e| *e),\n\n )),\n\n _ => Outcome::accepted(),\n\n },\n\n DhtOp::StoreEntry(s, h, e) => Ok(Element::new(\n\n SignedHeaderHashed::with_presigned(HeaderHashed::from_content_sync(h.into()), s),\n\n Some(*e),\n\n )),\n\n DhtOp::RegisterUpdatedBy(s, h, e) => Ok(Element::new(\n\n SignedHeaderHashed::with_presigned(HeaderHashed::from_content_sync(h.into()), s),\n", "file_path": "crates/holochain/src/core/workflow/app_validation_workflow.rs", "rank": 90, "score": 174032.73803270241 }, { "content": "#[hdk_extern]\n\nfn hash_entry(input: HashEntryInput) -> ExternResult<EntryHash> {\n\n Ok(hash_entry!(input)?)\n\n}\n", "file_path": "crates/test_utils/wasm/wasm_workspace/hash_entry/src/lib.rs", "rank": 91, "score": 172679.30703756452 }, { "content": "pub fn run_delete_link_validation_callback(\n\n zome_name: ZomeName,\n\n delete_link: DeleteLink,\n\n ribosome: &impl RibosomeT,\n\n workspace_lock: CallZomeWorkspaceLock,\n\n network: HolochainP2pCell,\n\n) -> AppValidationResult<Outcome> {\n\n let invocation = ValidateDeleteLinkInvocation {\n\n zome_name,\n\n delete_link,\n\n };\n\n let invocation = ValidateLinkInvocation::<ValidateDeleteLinkInvocation>::new(invocation);\n\n run_link_validation_callback(invocation, ribosome, workspace_lock, network)\n\n}\n\n\n", "file_path": "crates/holochain/src/core/workflow/app_validation_workflow.rs", "rank": 92, "score": 171303.67591545163 }, { "content": "pub fn run_create_link_validation_callback(\n\n zome_name: ZomeName,\n\n link_add: Arc<CreateLink>,\n\n base: Arc<Entry>,\n\n target: Arc<Entry>,\n\n ribosome: &impl RibosomeT,\n\n workspace_lock: CallZomeWorkspaceLock,\n\n network: HolochainP2pCell,\n\n) -> AppValidationResult<Outcome> {\n\n let invocation = ValidateCreateLinkInvocation {\n\n zome_name,\n\n link_add,\n\n base,\n\n target,\n\n };\n\n let invocation = ValidateLinkInvocation::<ValidateCreateLinkInvocation>::new(invocation);\n\n run_link_validation_callback(invocation, ribosome, workspace_lock, network)\n\n}\n\n\n", "file_path": "crates/holochain/src/core/workflow/app_validation_workflow.rs", "rank": 93, "score": 171303.67591545163 }, { "content": "fn visit_parents<S, N>(stack: &mut String, ctx: &FmtContext<'_, S, N>)\n\nwhere\n\n S: Subscriber + for<'a> LookupSpan<'a>,\n\n N: for<'writer> FormatFields<'writer> + 'static,\n\n{\n\n ctx.visit_spans::<(), _>(|span| {\n\n let meta = span.metadata();\n\n let name = meta.name();\n\n let module = meta.module_path();\n\n let line = meta.line();\n\n if let Some(module) = module {\n\n write!(stack, \"{}:\", module).ok();\n\n }\n\n if let Some(line) = line {\n\n write!(stack, \"{}\", line).ok();\n\n }\n\n write!(stack, \":{}\", name).ok();\n\n *stack += \"; \";\n\n Ok(())\n\n })\n\n .ok();\n\n}\n", "file_path": "crates/observability/src/fmt.rs", "rank": 94, "score": 168757.01531420188 }, { "content": "/// For now errors result in an outcome but in the future\n\n/// we might find it useful to include the reason something\n\n/// was rejected etc.\n\n/// This is why the errors contain data but is currently unread.\n\nfn handle_failed(error: ValidationOutcome) -> Outcome {\n\n use Outcome::*;\n\n match error {\n\n ValidationOutcome::Counterfeit(_, _) => {\n\n unreachable!(\"Counterfeit ops are dropped before sys validation\")\n\n }\n\n ValidationOutcome::DepMissingFromDht(_) => MissingDhtDep,\n\n ValidationOutcome::EntryDefId(_) => Rejected,\n\n ValidationOutcome::EntryHash => Rejected,\n\n ValidationOutcome::EntryTooLarge(_, _) => Rejected,\n\n ValidationOutcome::EntryType => Rejected,\n\n ValidationOutcome::EntryVisibility(_) => Rejected,\n\n ValidationOutcome::TagTooLarge(_, _) => Rejected,\n\n ValidationOutcome::NotCreateLink(_) => Rejected,\n\n ValidationOutcome::NotNewEntry(_) => Rejected,\n\n ValidationOutcome::NotHoldingDep(dep) => AwaitingOpDep(dep),\n\n ValidationOutcome::PrevHeaderError(PrevHeaderError::MissingMeta(dep)) => {\n\n AwaitingOpDep(dep.into())\n\n }\n\n ValidationOutcome::PrevHeaderError(_) => Rejected,\n", "file_path": "crates/holochain/src/core/workflow/sys_validation_workflow.rs", "rank": 95, "score": 168449.5198819965 }, { "content": "/// A fixture AgentPubKey for unit testing.\n\n/// NB: This must match up with AgentPubKeyFixturator's Predictable curve\n\npub fn fake_agent_pubkey_1() -> AgentPubKey {\n\n AgentPubKey::try_from(\"uhCAkmrkoAHPVf_eufG7eC5fm6QKrW5pPMoktvG5LOC0SnJ4vV1Uv\").unwrap()\n\n}\n\n\n", "file_path": "crates/zome_types/src/test_utils.rs", "rank": 96, "score": 165060.82215219067 }, { "content": "/// Another fixture AgentPubKey for unit testing.\n\n/// NB: This must match up with AgentPubKeyFixturator's Predictable curve\n\npub fn fake_agent_pubkey_2() -> AgentPubKey {\n\n AgentPubKey::try_from(\"uhCAke1j8Z2a-_min0h0pGuEMcYlo_V1l1mt9OtBuywKmHlg4L_R-\").unwrap()\n\n}\n\n\n", "file_path": "crates/zome_types/src/test_utils.rs", "rank": 97, "score": 165060.79373372183 }, { "content": "fn new_entry_element(entry: Entry, header_type: HeaderType, index: usize) -> Element {\n\n let et = match entry {\n\n Entry::App(_) => EntryType::App(\n\n AppEntryTypeFixturator::new_indexed(Unpredictable, index)\n\n .next()\n\n .unwrap(),\n\n ),\n\n Entry::Agent(_) => EntryType::AgentPubKey,\n\n Entry::CapClaim(_) => EntryType::CapClaim,\n\n Entry::CapGrant(_) => EntryType::CapGrant,\n\n };\n\n match header_type {\n\n HeaderType::Create => {\n\n let c = CreateFixturator::new_indexed(et, index).next().unwrap();\n\n let c = NewEntryHeader::Create(c);\n\n let element: Element = ElementFixturator::new_indexed(c, index).next().unwrap();\n\n let (shh, _) = element.into_inner();\n\n Element::new(shh, Some(entry))\n\n }\n\n HeaderType::Update => {\n", "file_path": "crates/types/src/fixt.rs", "rank": 98, "score": 164592.87171009823 }, { "content": "fn base() -> ExternResult<EntryHash> {\n\n path(\"a\")\n\n}\n\n\n", "file_path": "crates/test_utils/wasm/wasm_workspace/link/src/lib.rs", "rank": 99, "score": 163485.37704945705 } ]
Rust
query/src/servers/mysql/mysql_interactive_worker.rs
CNLHC/databend
fad0df2843c148c9c74793dadd38a9e5db274a36
use std::marker::PhantomData; use std::time::Instant; use common_datablocks::DataBlock; use common_exception::ErrorCode; use common_exception::Result; use common_runtime::tokio; use metrics::histogram; use msql_srv::ErrorKind; use msql_srv::InitWriter; use msql_srv::MysqlShim; use msql_srv::ParamParser; use msql_srv::QueryResultWriter; use msql_srv::StatementMetaWriter; use rand::RngCore; use tokio_stream::StreamExt; use crate::interpreters::InterpreterFactory; use crate::servers::mysql::writers::DFInitResultWriter; use crate::servers::mysql::writers::DFQueryResultWriter; use crate::servers::server::mock::get_mock_user; use crate::sessions::DatabendQueryContextRef; use crate::sessions::SessionRef; use crate::sql::DfHint; use crate::sql::PlanParser; struct InteractiveWorkerBase<W: std::io::Write>(PhantomData<W>); pub struct InteractiveWorker<W: std::io::Write> { base: InteractiveWorkerBase<W>, session: SessionRef, version: String, salt: [u8; 20], } impl<W: std::io::Write> MysqlShim<W> for InteractiveWorker<W> { type Error = ErrorCode; fn on_prepare(&mut self, query: &str, writer: StatementMetaWriter<W>) -> Result<()> { if self.session.is_aborting() { writer.error( ErrorKind::ER_ABORTING_CONNECTION, "Aborting this connection. because we are try aborting server.".as_bytes(), )?; return Err(ErrorCode::AbortedSession( "Aborting this connection. because we are try aborting server.", )); } self.base .do_prepare(query, writer, self.session.create_context()) } fn on_execute( &mut self, id: u32, param: ParamParser, writer: QueryResultWriter<W>, ) -> Result<()> { if self.session.is_aborting() { writer.error( ErrorKind::ER_ABORTING_CONNECTION, "Aborting this connection. because we are try aborting server.".as_bytes(), )?; return Err(ErrorCode::AbortedSession( "Aborting this connection. because we are try aborting server.", )); } self.base .do_execute(id, param, writer, self.session.create_context()) } fn on_close(&mut self, id: u32) { self.base.do_close(id, self.session.create_context()); } fn on_query(&mut self, query: &str, writer: QueryResultWriter<W>) -> Result<()> { if self.session.is_aborting() { writer.error( ErrorKind::ER_ABORTING_CONNECTION, "Aborting this connection. because we are try aborting server.".as_bytes(), )?; return Err(ErrorCode::AbortedSession( "Aborting this connection. because we are try aborting server.", )); } let start = Instant::now(); let context = self.session.create_context(); context.attach_query_str(query); if let Err(cause) = DFQueryResultWriter::create(writer).write(self.base.do_query(query, context)) { let new_error = cause.add_message(query); return Err(new_error); }; histogram!( super::mysql_metrics::METRIC_MYSQL_PROCESSOR_REQUEST_DURATION, start.elapsed() ); Ok(()) } fn on_init(&mut self, database_name: &str, writer: InitWriter<W>) -> Result<()> { if self.session.is_aborting() { writer.error( ErrorKind::ER_ABORTING_CONNECTION, "Aborting this connection. because we are try aborting server.".as_bytes(), )?; return Err(ErrorCode::AbortedSession( "Aborting this connection. because we are try aborting server.", )); } let context = self.session.create_context(); DFInitResultWriter::create(writer).write(self.base.do_init(database_name, context)) } fn version(&self) -> &str { self.version.as_str() } fn connect_id(&self) -> u32 { u32::from_le_bytes([0x08, 0x00, 0x00, 0x00]) } fn default_auth_plugin(&self) -> &str { "mysql_native_password" } fn auth_plugin_for_username(&self, _user: &[u8]) -> &str { "mysql_native_password" } fn salt(&self) -> [u8; 20] { self.salt } fn authenticate( &self, auth_plugin: &str, username: &[u8], salt: &[u8], auth_data: &[u8], ) -> bool { let user = String::from_utf8_lossy(username); if let Ok(user) = get_mock_user(&user) { let encode_password = match auth_plugin { "mysql_native_password" => { if auth_data.is_empty() { vec![] } else { let mut m = sha1::Sha1::new(); m.update(salt); m.update(&user.password); let result = m.digest().bytes(); if auth_data.len() != result.len() { return false; } let mut s = Vec::with_capacity(result.len()); for i in 0..result.len() { s.push(auth_data[i] ^ result[i]); } s } } _ => auth_data.to_vec(), }; return user.authenticate_user(encode_password); } false } } impl<W: std::io::Write> InteractiveWorkerBase<W> { fn do_prepare( &mut self, _: &str, writer: StatementMetaWriter<'_, W>, _: DatabendQueryContextRef, ) -> Result<()> { writer.error( ErrorKind::ER_UNKNOWN_ERROR, "Prepare is not support in Databend.".as_bytes(), )?; Ok(()) } fn do_execute( &mut self, _: u32, _: ParamParser<'_>, writer: QueryResultWriter<'_, W>, _: DatabendQueryContextRef, ) -> Result<()> { writer.error( ErrorKind::ER_UNKNOWN_ERROR, "Execute is not support in Databend.".as_bytes(), )?; Ok(()) } fn do_close(&mut self, _: u32, _: DatabendQueryContextRef) {} fn do_query( &mut self, query: &str, context: DatabendQueryContextRef, ) -> Result<Vec<DataBlock>> { log::debug!("{}", query); let runtime = Self::build_runtime()?; let (plan, hints) = PlanParser::create(context.clone()).build_with_hint_from_sql(query); let fetch_query_blocks = || -> Result<Vec<DataBlock>> { let start = Instant::now(); let interpreter = InterpreterFactory::get(context.clone(), plan?)?; let name = interpreter.name().to_string(); let data_stream = runtime.block_on(interpreter.execute())?; histogram!( super::mysql_metrics::METRIC_INTERPRETER_USEDTIME, start.elapsed(), "interpreter" => name ); runtime.block_on(data_stream.collect::<Result<Vec<DataBlock>>>()) }; let blocks = fetch_query_blocks(); match blocks { Ok(v) => Ok(v), Err(e) => { let hint = hints.iter().find(|v| v.error_code.is_some()); if let Some(DfHint { error_code: Some(code), .. }) = hint { if *code == e.code() { Ok(vec![DataBlock::empty()]) } else { let actual_code = e.code(); Err(e.add_message(format!( "Expected server error code: {} but got: {}.", code, actual_code ))) } } else { Err(e) } } } } fn do_init(&mut self, database_name: &str, context: DatabendQueryContextRef) -> Result<()> { self.do_query(&format!("USE {};", database_name), context)?; Ok(()) } fn build_runtime() -> Result<tokio::runtime::Runtime> { tokio::runtime::Builder::new_multi_thread() .enable_all() .build() .map_err(|tokio_error| ErrorCode::TokioError(format!("{}", tokio_error))) } } impl<W: std::io::Write> InteractiveWorker<W> { pub fn create(session: SessionRef) -> InteractiveWorker<W> { let mut bs = vec![0u8; 20]; let mut rng = rand::thread_rng(); rng.fill_bytes(bs.as_mut()); let mut scramble: [u8; 20] = [0; 20]; for i in 0..20 { scramble[i] = bs[i]; if scramble[i] == b'\0' || scramble[i] == b'$' { scramble[i] += 1; } } let context = session.create_context(); InteractiveWorker::<W> { session, base: InteractiveWorkerBase::<W>(PhantomData::<W>), salt: scramble, version: context.get_fuse_version(), } } }
use std::marker::PhantomData; use std::time::Instant; use common_datablocks::DataBlock; use common_exception::ErrorCode; use common_exception::Result; use common_runtime::tokio; use metrics::histogram; use msql_srv::ErrorKind; use msql_srv::InitWriter; use msql_srv::MysqlShim; use msql_srv::ParamParser; use msql_srv::QueryResultWriter; use msql_srv::StatementMetaWriter; use rand::RngCore; use tokio_stream::StreamExt; use crate::interpreters::InterpreterFactory; use crate::servers::mysql::writers::DFInitResultWriter; use crate::servers::mysql::writers::DFQueryResultWriter; use crate::servers::server::mock::get_mock_user; use crate::sessions::DatabendQueryContextRef; use crate::sessions::SessionRef; use crate::sql::DfHint; use crate::sql::PlanParser; struct InteractiveWorkerBase<W: std::io::Write>(PhantomData<W>); pub struct InteractiveWorker<W: std::io::Write> { base: InteractiveWorkerBase<W>, session: SessionRef, version: String, salt: [u8; 20], } impl<W: std::io::Write> MysqlShim<W> for InteractiveWorker<W> { type Error = ErrorCode; fn on_prepare(&mut self, query: &str, writer: StatementMetaWriter<W>) -> Result<()> { if self.session.is_aborting() { writer.error( ErrorKind::ER_ABORTING_CONNECTION, "Aborting this connection. because we are try aborting server.".as_bytes(), )?; return Err(ErrorCode::AbortedSession( "Aborting this connection. because we are try aborting server.", )); } self.base .do_prepare(query, writer, self.session.create_context()) } fn on_execute( &mut self, id: u32, param: ParamParser, writer: QueryResultWriter<W>, ) -> Result<()> { if self.session.is_aborting() { writer.error( ErrorKind::ER_ABORTING_CONNECTION, "Aborting this connection. because we are try aborting server.".as_bytes(), )?; return
; } self.base .do_execute(id, param, writer, self.session.create_context()) } fn on_close(&mut self, id: u32) { self.base.do_close(id, self.session.create_context()); } fn on_query(&mut self, query: &str, writer: QueryResultWriter<W>) -> Result<()> { if self.session.is_aborting() { writer.error( ErrorKind::ER_ABORTING_CONNECTION, "Aborting this connection. because we are try aborting server.".as_bytes(), )?; return Err(ErrorCode::AbortedSession( "Aborting this connection. because we are try aborting server.", )); } let start = Instant::now(); let context = self.session.create_context(); context.attach_query_str(query); if let Err(cause) = DFQueryResultWriter::create(writer).write(self.base.do_query(query, context)) { let new_error = cause.add_message(query); return Err(new_error); }; histogram!( super::mysql_metrics::METRIC_MYSQL_PROCESSOR_REQUEST_DURATION, start.elapsed() ); Ok(()) } fn on_init(&mut self, database_name: &str, writer: InitWriter<W>) -> Result<()> { if self.session.is_aborting() { writer.error( ErrorKind::ER_ABORTING_CONNECTION, "Aborting this connection. because we are try aborting server.".as_bytes(), )?; return Err(ErrorCode::AbortedSession( "Aborting this connection. because we are try aborting server.", )); } let context = self.session.create_context(); DFInitResultWriter::create(writer).write(self.base.do_init(database_name, context)) } fn version(&self) -> &str { self.version.as_str() } fn connect_id(&self) -> u32 { u32::from_le_bytes([0x08, 0x00, 0x00, 0x00]) } fn default_auth_plugin(&self) -> &str { "mysql_native_password" } fn auth_plugin_for_username(&self, _user: &[u8]) -> &str { "mysql_native_password" } fn salt(&self) -> [u8; 20] { self.salt } fn authenticate( &self, auth_plugin: &str, username: &[u8], salt: &[u8], auth_data: &[u8], ) -> bool { let user = String::from_utf8_lossy(username); if let Ok(user) = get_mock_user(&user) { let encode_password = match auth_plugin { "mysql_native_password" => { if auth_data.is_empty() { vec![] } else { let mut m = sha1::Sha1::new(); m.update(salt); m.update(&user.password); let result = m.digest().bytes(); if auth_data.len() != result.len() { return false; } let mut s = Vec::with_capacity(result.len()); for i in 0..result.len() { s.push(auth_data[i] ^ result[i]); } s } } _ => auth_data.to_vec(), }; return user.authenticate_user(encode_password); } false } } impl<W: std::io::Write> InteractiveWorkerBase<W> { fn do_prepare( &mut self, _: &str, writer: StatementMetaWriter<'_, W>, _: DatabendQueryContextRef, ) -> Result<()> { writer.error( ErrorKind::ER_UNKNOWN_ERROR, "Prepare is not support in Databend.".as_bytes(), )?; Ok(()) } fn do_execute( &mut self, _: u32, _: ParamParser<'_>, writer: QueryResultWriter<'_, W>, _: DatabendQueryContextRef, ) -> Result<()> { writer.error( ErrorKind::ER_UNKNOWN_ERROR, "Execute is not support in Databend.".as_bytes(), )?; Ok(()) } fn do_close(&mut self, _: u32, _: DatabendQueryContextRef) {} fn do_query( &mut self, query: &str, context: DatabendQueryContextRef, ) -> Result<Vec<DataBlock>> { log::debug!("{}", query); let runtime = Self::build_runtime()?; let (plan, hints) = PlanParser::create(context.clone()).build_with_hint_from_sql(query); let fetch_query_blocks = || -> Result<Vec<DataBlock>> { let start = Instant::now(); let interpreter = InterpreterFactory::get(context.clone(), plan?)?; let name = interpreter.name().to_string(); let data_stream = runtime.block_on(interpreter.execute())?; histogram!( super::mysql_metrics::METRIC_INTERPRETER_USEDTIME, start.elapsed(), "interpreter" => name ); runtime.block_on(data_stream.collect::<Result<Vec<DataBlock>>>()) }; let blocks = fetch_query_blocks(); match blocks { Ok(v) => Ok(v), Err(e) => { let hint = hints.iter().find(|v| v.error_code.is_some()); if let Some(DfHint { error_code: Some(code), .. }) = hint { if *code == e.code() { Ok(vec![DataBlock::empty()]) } else { let actual_code = e.code(); Err(e.add_message(format!( "Expected server error code: {} but got: {}.", code, actual_code ))) } } else { Err(e) } } } } fn do_init(&mut self, database_name: &str, context: DatabendQueryContextRef) -> Result<()> { self.do_query(&format!("USE {};", database_name), context)?; Ok(()) } fn build_runtime() -> Result<tokio::runtime::Runtime> { tokio::runtime::Builder::new_multi_thread() .enable_all() .build() .map_err(|tokio_error| ErrorCode::TokioError(format!("{}", tokio_error))) } } impl<W: std::io::Write> InteractiveWorker<W> { pub fn create(session: SessionRef) -> InteractiveWorker<W> { let mut bs = vec![0u8; 20]; let mut rng = rand::thread_rng(); rng.fill_bytes(bs.as_mut()); let mut scramble: [u8; 20] = [0; 20]; for i in 0..20 { scramble[i] = bs[i]; if scramble[i] == b'\0' || scramble[i] == b'$' { scramble[i] += 1; } } let context = session.create_context(); InteractiveWorker::<W> { session, base: InteractiveWorkerBase::<W>(PhantomData::<W>), salt: scramble, version: context.get_fuse_version(), } } }
Err(ErrorCode::AbortedSession( "Aborting this connection. because we are try aborting server.", ))
call_expression
[ { "content": "fn query<T: FromRow>(connection: &mut Conn, query: &str) -> Result<Vec<T>> {\n\n connection\n\n .query::<T, &str>(query)\n\n .map_err_to_code(ErrorCode::UnknownException, || \"Query error\")\n\n}\n\n\n", "file_path": "query/src/servers/mysql/mysql_handler_test.rs", "rank": 0, "score": 415952.97915072954 }, { "content": "pub fn from_clickhouse_err(res: common_clickhouse_srv::errors::Error) -> ErrorCode {\n\n ErrorCode::LogicalError(format!(\"clickhouse-srv expception: {:?}\", res))\n\n}\n\n\n", "file_path": "query/src/servers/clickhouse/writers/query_writer.rs", "rank": 1, "score": 393526.6487588225 }, { "content": "pub fn to_clickhouse_err(res: ErrorCode) -> common_clickhouse_srv::errors::Error {\n\n common_clickhouse_srv::errors::Error::Server(ServerError {\n\n code: res.code() as u32,\n\n name: \"DB:Exception\".to_string(),\n\n message: res.message(),\n\n stack_trace: res.backtrace_str(),\n\n })\n\n}\n\n\n", "file_path": "query/src/servers/clickhouse/writers/query_writer.rs", "rank": 2, "score": 393526.6487588225 }, { "content": "pub fn try_create_context_with_conf(mut config: Config) -> Result<DatabendQueryContextRef> {\n\n let cluster = Cluster::empty();\n\n\n\n // Setup log dir to the tests directory.\n\n config.log.log_dir = env::current_dir()?\n\n .join(\"../tests/data/logs\")\n\n .display()\n\n .to_string();\n\n\n\n let sessions = SessionManager::from_conf(config, cluster)?;\n\n let test_session = sessions.create_session(\"TestSession\")?;\n\n let test_context = test_session.create_context();\n\n test_context.get_settings().set_max_threads(8)?;\n\n Ok(test_context)\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct ClusterNode {\n\n name: String,\n\n priority: u8,\n", "file_path": "query/src/tests/context.rs", "rank": 3, "score": 392362.4698925454 }, { "content": "pub fn to_clickhouse_block(block: DataBlock) -> Result<Block> {\n\n let mut result = Block::new();\n\n if block.num_columns() == 0 {\n\n return Ok(result);\n\n }\n\n\n\n let utc: Tz = \"UTC\".parse().unwrap();\n\n for column_index in 0..block.num_columns() {\n\n let column = block.column(column_index).to_array()?;\n\n let field = block.schema().field(column_index);\n\n let name = field.name();\n\n let is_nullable = field.is_nullable();\n\n result = match is_nullable {\n\n true => match field.data_type() {\n\n DataType::Int8 => result.column(name, column.i8()?.collect_values()),\n\n DataType::Int16 => result.column(name, column.i16()?.collect_values()),\n\n DataType::Int32 => result.column(name, column.i32()?.collect_values()),\n\n DataType::Int64 => result.column(name, column.i64()?.collect_values()),\n\n DataType::UInt8 => result.column(name, column.u8()?.collect_values()),\n\n DataType::UInt16 => result.column(name, column.u16()?.collect_values()),\n", "file_path": "query/src/servers/clickhouse/writers/query_writer.rs", "rank": 4, "score": 390040.29579779605 }, { "content": "pub fn flight_result_to_str(r: &arrow_flight::Result) -> String {\n\n match std::str::from_utf8(&r.body) {\n\n Ok(v) => v.to_string(),\n\n Err(_e) => format!(\"{:?}\", r.body),\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct RpcClientTlsConfig {\n\n pub rpc_tls_server_root_ca_cert: String,\n\n pub domain_name: String,\n\n}\n\nimpl RpcClientTlsConfig {\n\n pub fn enabled(&self) -> bool {\n\n !self.rpc_tls_server_root_ca_cert.is_empty() && !self.domain_name.is_empty()\n\n }\n\n}\n", "file_path": "common/flights/src/common.rs", "rank": 5, "score": 380552.5277552145 }, { "content": "fn parse_opt_u8(source: &str) -> std::result::Result<Option<u8>, ()> {\n\n if source == \"none\" {\n\n return Ok(None);\n\n }\n\n\n\n let duration: u8 = match source.parse() {\n\n Ok(value) => value,\n\n Err(_) => return Err(()),\n\n };\n\n\n\n Ok(Some(duration))\n\n}\n\n\n", "file_path": "common/clickhouse-srv/src/types/options.rs", "rank": 6, "score": 376702.20755355863 }, { "content": "pub fn try_create_session_mgr(max_active_sessions: Option<u64>) -> Result<SessionManagerRef> {\n\n let mut conf = Config::default();\n\n // Setup log dir to the tests directory.\n\n conf.log.log_dir = env::current_dir()?\n\n .join(\"../tests/data/logs\")\n\n .display()\n\n .to_string();\n\n // Set max active session number if have.\n\n if let Some(max) = max_active_sessions {\n\n conf.query.max_active_sessions = max;\n\n }\n\n\n\n SessionManager::from_conf(conf, Cluster::empty())\n\n}\n", "file_path": "query/src/tests/sessions.rs", "rank": 7, "score": 368506.15907200007 }, { "content": "pub fn block_location(name: &str) -> String {\n\n format!(\"_b/{}\", name)\n\n}\n\n\n", "file_path": "query/src/datasources/table/fuse/util/location_gen.rs", "rank": 8, "score": 363633.69640616456 }, { "content": "pub fn snapshot_location(name: &str) -> String {\n\n format!(\"_ss/{}\", name)\n\n}\n", "file_path": "query/src/datasources/table/fuse/util/location_gen.rs", "rank": 9, "score": 363633.69640616456 }, { "content": "pub fn from_clickhouse_block(schema: DataSchemaRef, block: Block) -> Result<DataBlock> {\n\n let get_series = |block: &Block, index: usize| -> CHResult<Series> {\n\n let col = &block.columns()[index];\n\n match col.sql_type() {\n\n SqlType::UInt8 => {\n\n Ok(DFUInt8Array::new_from_iter(col.iter::<u8>()?.copied()).into_series())\n\n }\n\n SqlType::UInt16 | SqlType::Date => {\n\n Ok(DFUInt16Array::new_from_iter(col.iter::<u16>()?.copied()).into_series())\n\n }\n\n SqlType::UInt32 | SqlType::DateTime(DateTimeType::DateTime32) => {\n\n Ok(DFUInt32Array::new_from_iter(col.iter::<u32>()?.copied()).into_series())\n\n }\n\n SqlType::UInt64 => {\n\n Ok(DFUInt64Array::new_from_iter(col.iter::<u64>()?.copied()).into_series())\n\n }\n\n SqlType::Int8 => {\n\n Ok(DFInt8Array::new_from_iter(col.iter::<i8>()?.copied()).into_series())\n\n }\n\n SqlType::Int16 => {\n", "file_path": "query/src/servers/clickhouse/writers/query_writer.rs", "rank": 10, "score": 361435.4195522373 }, { "content": "pub fn segment_info_location(name: &str) -> String {\n\n format!(\"_sg/{}\", name)\n\n}\n\n\n", "file_path": "query/src/datasources/table/fuse/util/location_gen.rs", "rank": 11, "score": 359176.4509951987 }, { "content": "pub fn criterion_benchmark_suite(c: &mut Criterion, sql: &str) {\n\n c.bench_function(sql, |b| {\n\n b.iter(|| {\n\n tokio::runtime::Runtime::new()\n\n .unwrap()\n\n .block_on(select_executor(sql))\n\n })\n\n });\n\n}\n", "file_path": "query/benches/suites/mod.rs", "rank": 12, "score": 358515.35794524837 }, { "content": "fn set_params<'a, I>(options: &mut Options, iter: I) -> std::result::Result<(), UrlError>\n\nwhere I: Iterator<Item = (Cow<'a, str>, Cow<'a, str>)> {\n\n for (key, value) in iter {\n\n match key.as_ref() {\n\n \"pool_min\" => options.pool_min = parse_param(key, value, usize::from_str)?,\n\n \"pool_max\" => options.pool_max = parse_param(key, value, usize::from_str)?,\n\n \"nodelay\" => options.nodelay = parse_param(key, value, bool::from_str)?,\n\n \"keepalive\" => options.keepalive = parse_param(key, value, parse_opt_duration)?,\n\n \"ping_before_query\" => {\n\n options.ping_before_query = parse_param(key, value, bool::from_str)?\n\n }\n\n \"send_retries\" => options.send_retries = parse_param(key, value, usize::from_str)?,\n\n \"retry_timeout\" => options.retry_timeout = parse_param(key, value, parse_duration)?,\n\n \"ping_timeout\" => options.ping_timeout = parse_param(key, value, parse_duration)?,\n\n \"connection_timeout\" => {\n\n options.connection_timeout = parse_param(key, value, parse_duration)?\n\n }\n\n \"query_timeout\" => options.query_timeout = parse_param(key, value, parse_duration)?,\n\n \"insert_timeout\" => {\n\n options.insert_timeout = parse_param(key, value, parse_opt_duration)?\n", "file_path": "common/clickhouse-srv/src/types/options.rs", "rank": 13, "score": 355793.42581020435 }, { "content": "fn decompress_buffer<R>(reader: &mut R, mut buffer: Vec<u8>) -> Result<Vec<u8>>\n\nwhere R: ReadEx {\n\n let h = UInt128 {\n\n lo: reader.read_scalar()?,\n\n hi: reader.read_scalar()?,\n\n };\n\n\n\n let method: u8 = reader.read_scalar()?;\n\n if method != 0x82 {\n\n let message: String = format!(\"unsupported compression method {}\", method);\n\n return Err(raise_error(message));\n\n }\n\n\n\n let compressed: u32 = reader.read_scalar()?;\n\n let original: u32 = reader.read_scalar()?;\n\n\n\n if compressed > DBMS_MAX_COMPRESSED_SIZE {\n\n return Err(raise_error(\"compressed data too big\".to_string()));\n\n }\n\n\n", "file_path": "common/clickhouse-srv/src/types/block/compressed.rs", "rank": 14, "score": 353978.4449818585 }, { "content": "pub fn try_create_context() -> Result<DatabendQueryContextRef> {\n\n let config = Config::default();\n\n try_create_context_with_conf(config)\n\n}\n\n\n", "file_path": "query/src/tests/context.rs", "rank": 15, "score": 352303.4969535782 }, { "content": "fn do_action_request(query_id: &str, stage_id: &str) -> Result<Request<Action>> {\n\n let flight_action = FlightAction::PrepareShuffleAction(ShuffleAction {\n\n query_id: String::from(query_id),\n\n stage_id: String::from(stage_id),\n\n plan: parse_query(\"SELECT number FROM numbers(5)\")?,\n\n sinks: vec![String::from(\"stream_id\")],\n\n scatters_expression: Expression::create_literal(DataValue::UInt64(Some(1))),\n\n });\n\n\n\n Ok(Request::new(flight_action.try_into()?))\n\n}\n", "file_path": "query/src/api/rpc/flight_service_test.rs", "rank": 16, "score": 351906.1956214741 }, { "content": "fn do_get_request(query_id: &str, stage_id: &str) -> Result<Request<Ticket>> {\n\n let stream_ticket = FlightTicket::StreamTicket(StreamTicket {\n\n query_id: String::from(query_id),\n\n stage_id: String::from(stage_id),\n\n stream: String::from(\"stream_id\"),\n\n });\n\n\n\n Ok(Request::new(stream_ticket.try_into()?))\n\n}\n\n\n", "file_path": "query/src/api/rpc/flight_service_test.rs", "rank": 17, "score": 351906.1956214741 }, { "content": "pub fn try_create_catalog() -> Result<DatabaseCatalog> {\n\n let conf = Config::default();\n\n let catalog = DatabaseCatalog::try_create_with_config(conf.clone())?;\n\n // Register local/system and remote database engine.\n\n if conf.query.disable_local_database_engine == \"0\" {\n\n catalog.register_db_engine(\"local\", Arc::new(LocalDatabases::create(conf.clone())))?;\n\n }\n\n catalog.register_db_engine(\"system\", Arc::new(SystemDatabases::create(conf.clone())))?;\n\n catalog.register_db_engine(\"remote\", Arc::new(RemoteDatabases::create(conf)))?;\n\n\n\n Ok(catalog)\n\n}\n", "file_path": "query/src/tests/catalog.rs", "rank": 18, "score": 348560.18796994607 }, { "content": "fn parse_date_time64(source: &str) -> Option<(u32, Option<String>)> {\n\n let integer = many1::<String, _, _>(digit()).and_then(|digits| {\n\n digits\n\n .parse::<u32>()\n\n .map_err(|_| StringStreamError::UnexpectedParse)\n\n });\n\n\n\n let word_syms = token('\\\\').with(any()).or(none_of(\"'\".chars()));\n\n let word = token('\\'')\n\n .with(many::<String, _, _>(word_syms))\n\n .skip(token('\\''));\n\n\n\n let timezone = optional(spaces().skip(token(',')).skip(spaces()).with(word));\n\n\n\n let pair = spaces()\n\n .with(integer)\n\n .skip(spaces())\n\n .and(timezone)\n\n .skip(spaces());\n\n\n", "file_path": "common/clickhouse-srv/src/types/column/factory.rs", "rank": 19, "score": 342526.0083302727 }, { "content": "pub fn parse_query(query: impl ToString) -> Result<PlanNode> {\n\n let context = try_create_context()?;\n\n PlanParser::create(context).build_from_sql(&query.to_string())\n\n}\n", "file_path": "query/src/tests/parse_query.rs", "rank": 20, "score": 341007.16217026004 }, { "content": "pub fn parse_storage_scheme(value: Option<&String>) -> Result<StorageScheme> {\n\n if let Some(v) = value {\n\n let v = v.to_uppercase();\n\n match v.as_str() {\n\n \"LOCAL_FS\" | \"LOCAL\" => Ok(TableStorageScheme::LocalFs),\n\n \"DATABEND_DFS\" => Ok(TableStorageScheme::FuseDfs),\n\n \"S3\" => Ok(TableStorageScheme::S3),\n\n _ => Err(ErrorCode::IllegalSchema(format!(\"unknown scheme {}\", v))),\n\n }\n\n } else {\n\n Err(ErrorCode::IllegalSchema(\n\n \"invalid table option for Fuse Table, no Storage Scheme provided\",\n\n ))\n\n }\n\n}\n", "file_path": "query/src/datasources/table/fuse/util/storage_scheme_helper.rs", "rank": 21, "score": 327571.42221282725 }, { "content": "///! Create a visual representation of record batches\n\npub fn pretty_format_series(results: &[Series]) -> Result<String> {\n\n Ok(create_table(results)?.trim_fmt())\n\n}\n\n\n", "file_path": "common/datavalues/src/series/series_debug.rs", "rank": 22, "score": 325879.91849372117 }, { "content": "pub fn try_create_cluster_context(nodes: &[ClusterNode]) -> Result<DatabendQueryContextRef> {\n\n let config = Config::default();\n\n let cluster = Cluster::empty();\n\n\n\n for node in nodes {\n\n let node = node.clone();\n\n let cluster = cluster.clone();\n\n std::thread::spawn(move || -> Result<()> {\n\n let runtime = Runtime::new()\n\n .map_err_to_code(ErrorCode::TokioError, || \"Cannot create tokio runtime.\")?;\n\n\n\n runtime.block_on(cluster.add_node(&node.name, node.priority, &node.address))\n\n })\n\n .join()\n\n .unwrap()?;\n\n }\n\n\n\n let sessions = SessionManager::from_conf(config, cluster)?;\n\n let test_session = sessions.create_session(\"TestSession\")?;\n\n let test_context = test_session.create_context();\n\n test_context.get_settings().set_max_threads(8)?;\n\n Ok(test_context)\n\n}\n", "file_path": "query/src/tests/context.rs", "rank": 23, "score": 322957.775478272 }, { "content": "///! Create a visual representation of record batches\n\npub fn pretty_format_blocks(results: &[DataBlock]) -> Result<String> {\n\n Ok(create_table(results)?.trim_fmt())\n\n}\n\n\n", "file_path": "common/datablocks/src/data_block_debug.rs", "rank": 24, "score": 322405.30510531995 }, { "content": "pub fn get_meta(meta: &MetadataMap) -> Result<(String, String)> {\n\n fn deserialize_meta(value: &MetadataValue<Binary>, error_msg: &'static str) -> Result<String> {\n\n match value.to_bytes() {\n\n Ok(bytes) => Ok(String::from_utf8(bytes.to_vec())?),\n\n Err(error) => Err(ErrorCode::InvalidMetaBinaryFormat(format!(\n\n \"{}, cause {}\",\n\n error_msg, error\n\n ))),\n\n }\n\n }\n\n\n\n fn fetch_string(meta: &MetadataMap, key: &str, error_msg: &'static str) -> Result<String> {\n\n match meta.get_bin(key) {\n\n None => Err(ErrorCode::UnknownKey(format!(\"Unknown meta key {}\", key))),\n\n Some(meta_binary) => deserialize_meta(meta_binary, error_msg),\n\n }\n\n }\n\n\n\n let db_name = fetch_string(meta, META_KEY_DB_NAME, \"invalid db_name meta data\")?;\n\n let tbl_name = fetch_string(meta, META_KEY_TBL_NAME, \"invalid tbl_name meta data\")?;\n\n Ok((db_name, tbl_name))\n\n}\n", "file_path": "common/flights/src/impls/storage_api_impl_utils.rs", "rank": 25, "score": 321152.9725838888 }, { "content": "/// Counts lines in the source `handle`.\n\n/// count_lines(std::fs::File.open(\"foo.txt\")\n\npub fn count_lines<R: io::Read>(handle: R) -> Result<usize, io::Error> {\n\n let sep = b'\\n';\n\n let mut reader = BufReader::new(handle);\n\n let mut count = 0;\n\n let mut line: Vec<u8> = Vec::new();\n\n while match reader.read_until(sep, &mut line) {\n\n Ok(n) if n > 0 => true,\n\n Err(e) => return Err(e),\n\n _ => false,\n\n } {\n\n if *line.last().unwrap() == sep {\n\n count += 1;\n\n };\n\n }\n\n Ok(count)\n\n}\n", "file_path": "query/src/datasources/common/line.rs", "rank": 26, "score": 313820.5782160659 }, { "content": "fn raise_error(message: String) -> Error {\n\n message.into()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_decompress() {\n\n let expected = vec![\n\n 1u8, 0, 2, 255, 255, 255, 255, 0, 1, 1, 1, 115, 6, 83, 116, 114, 105, 110, 103, 3, 97,\n\n 98, 99,\n\n ];\n\n\n\n let source = vec![\n\n 245_u8, 5, 222, 235, 225, 158, 59, 108, 225, 31, 65, 215, 66, 66, 36, 92, 130, 34, 0,\n\n 0, 0, 23, 0, 0, 0, 240, 8, 1, 0, 2, 255, 255, 255, 255, 0, 1, 1, 1, 115, 6, 83, 116,\n\n 114, 105, 110, 103, 3, 97, 98, 99,\n\n ];\n\n\n\n let mut cursor = io::Cursor::new(&source[..]);\n\n let actual = decompress_buffer(&mut cursor, Vec::new()).unwrap();\n\n\n\n assert_eq!(actual, expected);\n\n }\n\n}\n", "file_path": "common/clickhouse-srv/src/types/block/compressed.rs", "rank": 27, "score": 313599.2084247776 }, { "content": "#[inline]\n\npub fn unset_bit(data: &mut [u8], i: usize) {\n\n data[i >> 3] ^= BIT_MASK[i & 7];\n\n}\n\n\n\n/// Sets bit at position `i` for `data` to 0\n\n///\n\n/// # Safety\n\n///\n\n/// Note this doesn't do any bound checking, for performance reason. The caller is\n\n/// responsible to guarantee that `i` is within bounds.\n\n#[inline]\n\npub unsafe fn unset_bit_raw(data: *mut u8, i: usize) {\n\n *data.add(i >> 3) ^= BIT_MASK[i & 7];\n\n}\n\n\n\n/// Returns the ceil of `value`/`divisor`\n", "file_path": "common/datavalues/src/bit_util.rs", "rank": 28, "score": 313265.88561421674 }, { "content": "#[inline]\n\npub fn set_bit(data: &mut [u8], i: usize) {\n\n data[i >> 3] |= BIT_MASK[i & 7];\n\n}\n\n\n\n/// Sets bit at position `i` for `data`\n\n///\n\n/// # Safety\n\n///\n\n/// Note this doesn't do any bound checking, for performance reason. The caller is\n\n/// responsible to guarantee that `i` is within bounds.\n\n#[inline]\n\npub unsafe fn set_bit_raw(data: *mut u8, i: usize) {\n\n *data.add(i >> 3) |= BIT_MASK[i & 7];\n\n}\n\n\n\n/// Sets bit at position `i` for `data` to 0\n", "file_path": "common/datavalues/src/bit_util.rs", "rank": 29, "score": 313265.88561421674 }, { "content": "pub fn block_stats(data_block: &DataBlock) -> Result<HashMap<ColumnId, (DataType, ColStats)>> {\n\n // TODO column id is FAKED, this is OK as long as table schema is NOT changed, which is not realistic\n\n // we should extend DataField with column_id ...\n\n (0..).into_iter().zip(data_block.columns().iter()).try_fold(\n\n HashMap::new(),\n\n |mut res, (idx, col)| {\n\n let data_type = col.data_type();\n\n let min = match col {\n\n DataColumn::Array(s) => s.min(),\n\n DataColumn::Constant(v, _) => Ok(v.clone()),\n\n }?;\n\n\n\n let max = match col {\n\n DataColumn::Array(s) => s.max(),\n\n DataColumn::Constant(v, _) => Ok(v.clone()),\n\n }?;\n\n\n\n let null_count = match col {\n\n DataColumn::Array(s) => s.null_count(),\n\n DataColumn::Constant(v, _) => {\n", "file_path": "query/src/datasources/table/fuse/io/block_appender.rs", "rank": 30, "score": 307260.0956812893 }, { "content": "fn from_url(url_str: &str) -> Result<Options> {\n\n let url = Url::parse(url_str)?;\n\n\n\n if url.scheme() != \"tcp\" {\n\n return Err(UrlError::UnsupportedScheme {\n\n scheme: url.scheme().to_string(),\n\n }\n\n .into());\n\n }\n\n\n\n if url.cannot_be_a_base() || !url.has_host() {\n\n return Err(UrlError::Invalid.into());\n\n }\n\n\n\n let mut options = Options::default();\n\n\n\n if let Some(username) = get_username_from_url(&url) {\n\n options.username = username.into();\n\n }\n\n\n", "file_path": "common/clickhouse-srv/src/types/options.rs", "rank": 31, "score": 306165.0163107726 }, { "content": "fn date_iter(column: &Column<Simple>) -> Result<(*const u8, usize, Tz, Option<u32>)> {\n\n let (ptr, size, tz, precision) = unsafe {\n\n let mut ptr: *const u8 = ptr::null();\n\n let mut tz: *const Tz = ptr::null();\n\n let mut size: usize = 0;\n\n let mut precision: Option<u32> = None;\n\n column.get_internal(\n\n &[\n\n &mut ptr as *mut *const u8,\n\n &mut tz as *mut *const Tz as *mut *const u8,\n\n &mut size as *mut usize as *mut *const u8,\n\n &mut precision as *mut Option<u32> as *mut *const u8,\n\n ],\n\n 0,\n\n )?;\n\n assert_ne!(ptr, ptr::null());\n\n assert_ne!(tz, ptr::null());\n\n (ptr, size, &*tz, precision)\n\n };\n\n\n", "file_path": "common/clickhouse-srv/src/types/column/iter/mod.rs", "rank": 32, "score": 304492.0527785401 }, { "content": "fn parse_duration(source: &str) -> std::result::Result<Duration, ()> {\n\n let digits_count = source.chars().take_while(|c| c.is_digit(10)).count();\n\n\n\n let left: String = source.chars().take(digits_count).collect();\n\n let right: String = source.chars().skip(digits_count).collect();\n\n\n\n let num = match u64::from_str(&left) {\n\n Ok(value) => value,\n\n Err(_) => return Err(()),\n\n };\n\n\n\n match right.as_str() {\n\n \"s\" => Ok(Duration::from_secs(num)),\n\n \"ms\" => Ok(Duration::from_millis(num)),\n\n _ => Err(()),\n\n }\n\n}\n\n\n", "file_path": "common/clickhouse-srv/src/types/options.rs", "rank": 33, "score": 299772.01818994456 }, { "content": "fn parse_compression(source: &str) -> std::result::Result<bool, ()> {\n\n match source {\n\n \"none\" => Ok(false),\n\n \"lz4\" => Ok(true),\n\n _ => Err(()),\n\n }\n\n}\n\n\n", "file_path": "common/clickhouse-srv/src/types/options.rs", "rank": 34, "score": 299772.01818994456 }, { "content": "// put_uvarint encodes a uint64 into buf and returns the number of bytes written.\n\n// If the buffer is too small, put_uvarint will panic.\n\npub fn put_uvarint(mut buffer: impl AsMut<[u8]>, x: u64) -> usize {\n\n let mut i = 0;\n\n let mut mx = x;\n\n let buf = buffer.as_mut();\n\n while mx >= 0x80 {\n\n buf[i] = mx as u8 | 0x80;\n\n mx >>= 7;\n\n i += 1;\n\n }\n\n buf[i] = mx as u8;\n\n i + 1\n\n}\n", "file_path": "common/io/src/binary_write.rs", "rank": 35, "score": 299406.7946994626 }, { "content": "#[inline]\n\npub fn aggregate_types(args: &[DataType]) -> Result<DataType> {\n\n match args.len() {\n\n 0 => Result::Err(ErrorCode::BadArguments(\"Can't aggregate empty args\")),\n\n 1 => Ok(args[0].clone()),\n\n _ => {\n\n let left = args[0].clone();\n\n let right = aggregate_types(&args[1..args.len()])?;\n\n merge_types(&left, &right)\n\n }\n\n }\n\n}\n\n\n", "file_path": "common/datavalues/src/types/data_type_coercion.rs", "rank": 36, "score": 299360.53510979196 }, { "content": "pub fn put_meta(meta: &mut MetadataMap, db_name: &str, tbl_name: &str) {\n\n meta.insert_bin(\n\n META_KEY_DB_NAME,\n\n MetadataValue::from_bytes(db_name.as_bytes()),\n\n );\n\n meta.insert_bin(\n\n META_KEY_TBL_NAME,\n\n MetadataValue::from_bytes(tbl_name.as_bytes()),\n\n );\n\n}\n\n\n", "file_path": "common/flights/src/impls/storage_api_impl_utils.rs", "rank": 37, "score": 298205.7815144368 }, { "content": "fn parse_decimal(source: &str) -> Option<(u8, u8, NoBits)> {\n\n if source.len() < 12 {\n\n return None;\n\n }\n\n\n\n if !source.starts_with(\"Decimal\") {\n\n return None;\n\n }\n\n\n\n let mut nobits = None;\n\n let mut precision = None;\n\n let mut scale = None;\n\n\n\n let mut params_indexes = (None, None);\n\n\n\n for (idx, byte) in source.as_bytes().iter().enumerate() {\n\n if *byte == b'(' {\n\n match &source.as_bytes()[..idx] {\n\n b\"Decimal\" => {}\n\n b\"Decimal32\" => {\n", "file_path": "common/clickhouse-srv/src/types/column/factory.rs", "rank": 38, "score": 297415.8590203655 }, { "content": "struct DummyCHSession;\n\n\n\nimpl DummyCHSession {\n\n pub fn create() -> Arc<dyn ClickHouseSession> {\n\n Arc::new(DummyCHSession {})\n\n }\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl ClickHouseSession for DummyCHSession {\n\n async fn execute_query(&self, _: &mut CHContext, _: &mut Connection) -> CHResult<()> {\n\n unimplemented!()\n\n }\n\n}\n", "file_path": "query/src/servers/clickhouse/reject_connection.rs", "rank": 39, "score": 296828.2677772804 }, { "content": "// put_uvarint encodes a uint64 into buf and returns the number of bytes written.\n\n// If the buffer is too small, put_uvarint will panic.\n\npub fn put_uvarint(mut buffer: impl AsMut<[u8]>, x: u64) -> usize {\n\n let mut i = 0;\n\n let mut mx = x;\n\n let buf = buffer.as_mut();\n\n while mx >= 0x80 {\n\n buf[i] = mx as u8 | 0x80;\n\n mx >>= 7;\n\n i += 1;\n\n }\n\n buf[i] = mx as u8;\n\n i + 1\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n #[test]\n\n fn test_put_uvarint() {\n\n let expected = [148u8, 145, 6, 0, 0, 0, 0, 0, 0, 0];\n\n let mut buffer = [0u8; 10];\n\n\n\n let actual = super::put_uvarint(&mut buffer[..], 100_500);\n\n\n\n assert_eq!(actual, 3);\n\n assert_eq!(buffer, expected);\n\n }\n\n}\n", "file_path": "common/clickhouse-srv/src/binary/uvarint.rs", "rank": 40, "score": 296052.3819615618 }, { "content": "pub fn numeric_byte_size(dt: &DataType) -> Result<usize> {\n\n match dt {\n\n DataType::Int8 | DataType::UInt8 => Ok(1),\n\n DataType::Int16 | DataType::UInt16 => Ok(2),\n\n DataType::Int32 | DataType::UInt32 | DataType::Float32 => Ok(4),\n\n DataType::Int64 | DataType::UInt64 | DataType::Float64 => Ok(8),\n\n _ => Result::Err(ErrorCode::BadArguments(format!(\n\n \"Function number_byte_size argument must be numeric types, but got {:?}\",\n\n dt\n\n ))),\n\n }\n\n}\n\n\n", "file_path": "common/datavalues/src/types/data_type_coercion.rs", "rank": 41, "score": 292171.85589244054 }, { "content": "fn stream_ticket(query_id: &str, stage_id: &str, stream: &str) -> StreamTicket {\n\n StreamTicket {\n\n query_id: query_id.to_string(),\n\n stage_id: stage_id.to_string(),\n\n stream: stream.to_string(),\n\n }\n\n}\n\n\n", "file_path": "query/src/api/rpc/flight_dispatcher_test.rs", "rank": 42, "score": 291886.97127464646 }, { "content": "fn parse_hosts(source: &str) -> std::result::Result<Vec<Url>, ()> {\n\n let mut result = Vec::new();\n\n for host in source.split(',') {\n\n match Url::from_str(&format!(\"tcp://{}\", host)) {\n\n Ok(url) => result.push(url),\n\n Err(_) => return Err(()),\n\n }\n\n }\n\n Ok(result)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_parse_hosts() {\n\n let source = \"host2:9000,host3:9000\";\n\n let expected = vec![\n\n Url::from_str(\"tcp://host2:9000\").unwrap(),\n", "file_path": "common/clickhouse-srv/src/types/options.rs", "rank": 43, "score": 291663.3415519026 }, { "content": "fn create_connection(port: u16) -> Result<mysql::Conn> {\n\n let uri = &format!(\"mysql://127.0.0.1:{}?user=default\", port);\n\n let opts = mysql::Opts::from_url(uri).unwrap();\n\n mysql::Conn::new(opts).map_err_to_code(ErrorCode::UnknownException, || \"Reject connection\")\n\n}\n\n\n", "file_path": "query/src/servers/mysql/mysql_handler_test.rs", "rank": 44, "score": 289303.81985595805 }, { "content": "pub fn encode_password(password: impl AsRef<[u8]>, auth_type: &AuthType) -> Vec<u8> {\n\n match auth_type {\n\n AuthType::None => vec![],\n\n AuthType::PlainText => password.as_ref().to_vec(),\n\n AuthType::DoubleSha1 => {\n\n let mut m = sha1::Sha1::new();\n\n m.update(password.as_ref());\n\n\n\n let bs = m.digest().bytes();\n\n let mut m = sha1::Sha1::new();\n\n m.update(&bs[..]);\n\n\n\n m.digest().bytes().to_vec()\n\n }\n\n AuthType::Sha256 => {\n\n let result = sha2::Sha256::digest(password.as_ref());\n\n result[..].to_vec()\n\n }\n\n }\n\n}\n", "file_path": "common/management/src/user/utils.rs", "rank": 45, "score": 288541.61264651385 }, { "content": "fn parse_opt_duration(source: &str) -> std::result::Result<Option<Duration>, ()> {\n\n if source == \"none\" {\n\n return Ok(None);\n\n }\n\n\n\n let duration = parse_duration(source)?;\n\n Ok(Some(duration))\n\n}\n\n\n", "file_path": "common/clickhouse-srv/src/types/options.rs", "rank": 46, "score": 288319.748366615 }, { "content": "fn read_all<R: Read>(r: &mut R) -> io::Result<Vec<u8>> {\n\n let mut v = vec![];\n\n r.read_to_end(&mut v)?;\n\n Ok(v)\n\n}\n\n\n\nimpl TestFixture {\n\n pub fn new() -> TestFixture {\n\n TestFixture {\n\n tempdir: tempfile::Builder::new()\n\n .prefix(\"lru-disk-cache-test\")\n\n .tempdir()\n\n .unwrap(),\n\n }\n\n }\n\n\n\n pub fn tmp(&self) -> &Path {\n\n self.tempdir.path()\n\n }\n\n\n\n pub fn create_file<T: AsRef<Path>>(&self, path: T, size: usize) -> PathBuf {\n\n create_file(self.tempdir.path(), path, |mut f| {\n\n f.write_all(&vec![0; size])\n\n })\n\n .unwrap()\n\n }\n\n}\n\n\n", "file_path": "common/cache/src/disk_cache_test.rs", "rank": 47, "score": 285210.4867351282 }, { "content": "pub fn merge_types(lhs_type: &DataType, rhs_type: &DataType) -> Result<DataType> {\n\n match (lhs_type, rhs_type) {\n\n (DataType::Null, _) => Ok(rhs_type.clone()),\n\n (_, DataType::Null) => Ok(lhs_type.clone()),\n\n (DataType::List(a), DataType::List(b)) => {\n\n if a.name() != b.name() {\n\n return Result::Err(ErrorCode::BadDataValueType(format!(\n\n \"Can't merge types from {} and {}\",\n\n lhs_type, rhs_type\n\n )));\n\n }\n\n let typ = merge_types(a.data_type(), b.data_type())?;\n\n Ok(DataType::List(Box::new(DataField::new(\n\n a.name(),\n\n typ,\n\n a.is_nullable() || b.is_nullable(),\n\n ))))\n\n }\n\n (DataType::Struct(a), DataType::Struct(b)) => {\n\n if a.len() != b.len() {\n", "file_path": "common/datavalues/src/types/data_type_coercion.rs", "rank": 48, "score": 281184.0130624003 }, { "content": "// coercion rules for equality operations. This is a superset of all numerical coercion rules.\n\npub fn equal_coercion(lhs_type: &DataType, rhs_type: &DataType) -> Result<DataType> {\n\n if lhs_type == rhs_type {\n\n // same type => equality is possible\n\n return Ok(lhs_type.clone());\n\n }\n\n\n\n numerical_coercion(lhs_type, rhs_type, true)\n\n}\n\n\n\n// aggregate_types aggregates data types for a multi-argument function.\n", "file_path": "common/datavalues/src/types/data_type_coercion.rs", "rank": 49, "score": 279816.1937720409 }, { "content": "fn get_database_from_url(url: &Url) -> Result<Option<&str>> {\n\n match url.path_segments() {\n\n None => Ok(None),\n\n Some(mut segments) => {\n\n let head = segments.next();\n\n\n\n if segments.next().is_some() {\n\n return Err(Error::Url(UrlError::Invalid));\n\n }\n\n\n\n match head {\n\n Some(database) if !database.is_empty() => Ok(Some(database)),\n\n _ => Ok(None),\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "common/clickhouse-srv/src/types/options.rs", "rank": 50, "score": 278668.1080013872 }, { "content": "#[test]\n\nfn use_database_test() -> Result<()> {\n\n expect_parse_ok(\n\n \"USe db1\",\n\n DfStatement::UseDatabase(DfUseDatabase {\n\n name: ObjectName(vec![Ident::new(\"db1\")]),\n\n }),\n\n )?;\n\n expect_parse_ok(\n\n \"use db1\",\n\n DfStatement::UseDatabase(DfUseDatabase {\n\n name: ObjectName(vec![Ident::new(\"db1\")]),\n\n }),\n\n )?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "query/src/sql/sql_parser_test.rs", "rank": 51, "score": 278454.45647278696 }, { "content": "#[test]\n\nfn test_fuse_commit_version() -> Result<()> {\n\n let v = &crate::configs::config::DATABEND_COMMIT_VERSION;\n\n assert!(v.len() > 0);\n\n Ok(())\n\n}\n", "file_path": "query/src/configs/config_test.rs", "rank": 52, "score": 278442.6064698473 }, { "content": "fn expect_parse_ok(sql: &str, expected: DfStatement) -> Result<()> {\n\n let (statements, _) = DfParser::parse_sql(sql)?;\n\n assert_eq!(\n\n statements.len(),\n\n 1,\n\n \"Expected to parse exactly one statement\"\n\n );\n\n assert_eq!(statements[0], expected);\n\n Ok(())\n\n}\n\n\n", "file_path": "query/src/sql/sql_parser_test.rs", "rank": 53, "score": 273821.7296906538 }, { "content": "pub fn next_port() -> u32 {\n\n 19000u32 + (*Seq::default() as u32)\n\n}\n\n\n\npub struct StoreTestContext {\n\n #[allow(dead_code)]\n\n meta_temp_dir: TempDir,\n\n\n\n #[allow(dead_code)]\n\n local_fs_tmp_dir: TempDir,\n\n\n\n // /// To hold a per-case logging guard\n\n // #[allow(dead_code)]\n\n // logging_guard: (WorkerGuard, DefaultGuard),\n\n pub config: configs::Config,\n\n\n\n pub meta_nodes: Vec<Arc<MetaNode>>,\n\n\n\n /// channel to send to stop StoreServer, and channel for waiting for shutdown to finish.\n\n pub channels: Option<(oneshot::Sender<()>, oneshot::Receiver<()>)>,\n\n}\n\n\n", "file_path": "store/src/tests/service.rs", "rank": 54, "score": 272229.2907567355 }, { "content": "fn parse_enum16(input: &str) -> Option<Vec<(String, i16)>> {\n\n parse_enum(EnumSize::Enum16, input)\n\n}\n\n\n", "file_path": "common/clickhouse-srv/src/types/column/factory.rs", "rank": 55, "score": 270522.58423190285 }, { "content": "fn parse_enum8(input: &str) -> Option<Vec<(String, i8)>> {\n\n match parse_enum(EnumSize::Enum8, input) {\n\n Some(result) => {\n\n let res: Vec<(String, i8)> = result\n\n .iter()\n\n .map(|(key, val)| (key.clone(), *val as i8))\n\n .collect();\n\n Some(res)\n\n }\n\n None => None,\n\n }\n\n}\n", "file_path": "common/clickhouse-srv/src/types/column/factory.rs", "rank": 56, "score": 270522.58423190285 }, { "content": "// test cases fro Cmd::IncrSeq:\n\n// case_name, txid, key, want\n\npub fn cases_incr_seq() -> Vec<(&'static str, Option<RaftTxId>, &'static str, u64)> {\n\n vec![\n\n (\"incr on none\", Some(RaftTxId::new(\"foo\", 1)), \"k1\", 1),\n\n (\"incr on existent\", Some(RaftTxId::new(\"foo\", 2)), \"k1\", 2),\n\n (\n\n \"dup: same serial, even with diff key, got the previous result\",\n\n Some(RaftTxId::new(\"foo\", 2)),\n\n \"k2\",\n\n 2,\n\n ),\n\n (\n\n \"diff client, same serial, not a dup request\",\n\n Some(RaftTxId::new(\"bar\", 2)),\n\n \"k2\",\n\n 1,\n\n ),\n\n (\"no txid, no de-dup\", None, \"k2\", 2),\n\n ]\n\n}\n\n\n", "file_path": "store/src/meta_service/raftmeta_test.rs", "rank": 57, "score": 268458.65559328045 }, { "content": "#[async_trait::async_trait]\n\npub trait Server {\n\n async fn shutdown(&mut self);\n\n\n\n async fn start(&mut self, listening: SocketAddr) -> Result<SocketAddr>;\n\n}\n\n\n\npub struct ShutdownHandle {\n\n shutdown: Arc<AtomicBool>,\n\n sessions: SessionManagerRef,\n\n services: Vec<Box<dyn Server>>,\n\n}\n\n\n\nimpl ShutdownHandle {\n\n pub fn create(sessions: SessionManagerRef) -> ShutdownHandle {\n\n ShutdownHandle {\n\n services: vec![],\n\n sessions,\n\n shutdown: Arc::new(AtomicBool::new(false)),\n\n }\n\n }\n", "file_path": "query/src/servers/server.rs", "rank": 58, "score": 263581.0727164885 }, { "content": "pub fn pretty_snapshot_iter(snap: impl Iterator<Item = sled::Result<(IVec, IVec)>>) -> Vec<String> {\n\n let mut res = vec![];\n\n for kv in snap {\n\n let (k, v) = kv.unwrap();\n\n let line = format!(\"{:?}:{}\", k, String::from_utf8(v.to_vec()).unwrap());\n\n res.push(line);\n\n }\n\n\n\n res\n\n}\n", "file_path": "store/src/meta_service/testing.rs", "rank": 59, "score": 262087.6240767758 }, { "content": "#[test]\n\nfn test_node_id_serde() -> anyhow::Result<()> {\n\n let id9: NodeId = 9;\n\n let id10: NodeId = 10;\n\n\n\n let got9 = id9.ser()?;\n\n let got10 = id10.ser()?;\n\n assert!(got9 < got10);\n\n\n\n let got9 = NodeId::de(got9)?;\n\n let got10 = NodeId::de(got10)?;\n\n assert_eq!(id9, got9);\n\n assert_eq!(id10, got10);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "store/src/meta_service/raft_types_test.rs", "rank": 60, "score": 262059.5392434163 }, { "content": "fn get_timezone(timezone: &Option<String>, tz: Tz) -> Result<Tz> {\n\n match timezone {\n\n None => Ok(tz),\n\n Some(t) => Ok(t.parse()?),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_parse_decimal() {\n\n assert_eq!(parse_decimal(\"Decimal(9, 4)\"), Some((9, 4, NoBits::N32)));\n\n assert_eq!(parse_decimal(\"Decimal(10, 4)\"), Some((10, 4, NoBits::N64)));\n\n assert_eq!(parse_decimal(\"Decimal(20, 4)\"), None);\n\n assert_eq!(parse_decimal(\"Decimal(2000, 4)\"), None);\n\n assert_eq!(parse_decimal(\"Decimal(3, 4)\"), None);\n\n assert_eq!(parse_decimal(\"Decimal(20, -4)\"), None);\n\n assert_eq!(parse_decimal(\"Decimal(0)\"), None);\n", "file_path": "common/clickhouse-srv/src/types/column/factory.rs", "rank": 61, "score": 261721.75515341866 }, { "content": "/// Rebuilds an `expr` as a projection on top of a collection of `Expression`'s.\n\n///\n\n/// For example, the Expressionession `a + b < 1` would require, as input, the 2\n\n/// individual columns, `a` and `b`. But, if the base exprs already\n\n/// contain the `a + b` result, then that may be used in lieu of the `a` and\n\n/// `b` columns.\n\n///\n\n/// This is useful in the context of a query like:\n\n///\n\n/// SELECT a + b < 1 ... GROUP BY a + b\n\n///\n\n/// where post-aggregation, `a + b` need not be a projection against the\n\n/// individual columns `a` and `b`, but rather it is a projection against the\n\n/// `a + b` found in the GROUP BY.\n\npub fn rebase_expr(expr: &Expression, base_exprs: &[Expression]) -> Result<Expression> {\n\n clone_with_replacement(expr, &|nest_exprs| {\n\n if base_exprs.contains(nest_exprs) {\n\n Ok(Some(expr_as_column_expr(nest_exprs)?))\n\n } else {\n\n Ok(None)\n\n }\n\n })\n\n}\n\n\n", "file_path": "common/planners/src/plan_expression_common.rs", "rank": 62, "score": 261411.23992129686 }, { "content": "#[test]\n\nfn test_node_id_range_serde() -> anyhow::Result<()> {\n\n let a: NodeId = 8;\n\n let b: NodeId = 11;\n\n let got = (a..b).ser()?;\n\n let want = (\n\n Bound::Included(sled::IVec::from(vec![0, 0, 0, 0, 0, 0, 0, 8])),\n\n Bound::Excluded(sled::IVec::from(vec![0, 0, 0, 0, 0, 0, 0, 11])),\n\n );\n\n assert_eq!(want, got);\n\n Ok(())\n\n}\n", "file_path": "store/src/meta_service/raft_types_test.rs", "rank": 63, "score": 258327.08708284807 }, { "content": "pub fn assert_unary_params<D: Display>(name: D, actual: usize) -> Result<()> {\n\n if actual != 1 {\n\n return Err(ErrorCode::NumberArgumentsNotMatch(format!(\n\n \"{} expect to have single parameters, but got {}\",\n\n name, actual\n\n )));\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "common/functions/src/aggregates/aggregator_common.rs", "rank": 64, "score": 253880.61842420656 }, { "content": "fn display_identifier_vec(f: &mut Formatter<'_>, name: &[Identifier]) -> std::fmt::Result {\n\n for i in 0..name.len() {\n\n write!(f, \"{}\", name[i])?;\n\n if i != name.len() - 1 {\n\n write!(f, \".\")?;\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n\nimpl Display for Identifier {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {\n\n if let Some(c) = self.quote {\n\n write!(f, \"{}\", c)?;\n\n write!(f, \"{}\", self.name)?;\n\n write!(f, \"{}\", c)\n\n } else {\n\n write!(f, \"{}\", self.name)\n\n }\n\n }\n\n}\n", "file_path": "query/src/sql/parser/ast/mod.rs", "rank": 65, "score": 252544.5688734848 }, { "content": "/// Creates a global tracing/logging subscriber which saves events in one log file.\n\npub fn init_global_tracing(app_name: &str, dir: &str) -> WorkerGuard {\n\n let (g, sub) = init_file_subscriber(app_name, dir);\n\n tracing::subscriber::set_global_default(sub).expect(\"error setting global tracing subscriber\");\n\n\n\n tracing::info!(\"initialized global tracing\");\n\n g\n\n}\n\n\n", "file_path": "common/tracing/src/logging.rs", "rank": 66, "score": 252304.4797033097 }, { "content": "fn parse_enum(size: EnumSize, input: &str) -> Option<Vec<(String, i16)>> {\n\n let size = match size {\n\n EnumSize::Enum8 => \"Enum8\",\n\n EnumSize::Enum16 => \"Enum16\",\n\n };\n\n\n\n let integer = optional(token('-'))\n\n .and(many1::<String, _, _>(digit()))\n\n .and_then(|(x, mut digits)| {\n\n if let Some(x) = x {\n\n digits.insert(0, x);\n\n }\n\n digits\n\n .parse::<i16>()\n\n .map_err(|_| StringStreamError::UnexpectedParse)\n\n });\n\n\n\n let word_syms = token('\\\\').with(any()).or(none_of(\"'\".chars()));\n\n let word = token('\\'').with(many(word_syms)).skip(token('\\''));\n\n\n", "file_path": "common/clickhouse-srv/src/types/column/factory.rs", "rank": 67, "score": 251610.28619894254 }, { "content": "pub fn init_sled_db(path: String) {\n\n let mut g = GLOBAL_SLED.as_ref().lock().unwrap();\n\n\n\n if g.is_some() {\n\n return;\n\n }\n\n\n\n *g = Some(GlobalSledDb {\n\n temp_dir: None,\n\n db: sled::open(path).expect(\"open global sled::Db\"),\n\n });\n\n}\n\n\n", "file_path": "store/src/meta_service/raft_db.rs", "rank": 68, "score": 251538.98350140697 }, { "content": "fn parse_fixed_string(source: &str) -> Option<usize> {\n\n if !source.starts_with(\"FixedString\") {\n\n return None;\n\n }\n\n\n\n let inner_size = &source[12..source.len() - 1];\n\n match inner_size.parse::<usize>() {\n\n Err(_) => None,\n\n Ok(value) => Some(value),\n\n }\n\n}\n\n\n", "file_path": "common/clickhouse-srv/src/types/column/factory.rs", "rank": 69, "score": 249745.86553228763 }, { "content": "pub fn col(name: &str) -> Expression {\n\n Expression::Column(name.to_string())\n\n}\n", "file_path": "common/planners/src/plan_expression_column.rs", "rank": 70, "score": 248089.25812874903 }, { "content": "pub fn construct_numeric_type(\n\n is_signed: bool,\n\n is_floating: bool,\n\n byte_size: usize,\n\n) -> Result<DataType> {\n\n match (is_signed, is_floating, byte_size) {\n\n (false, false, 1) => Ok(DataType::UInt8),\n\n (false, false, 2) => Ok(DataType::UInt16),\n\n (false, false, 4) => Ok(DataType::UInt32),\n\n (false, false, 8) => Ok(DataType::UInt64),\n\n (false, true, 4) => Ok(DataType::Float32),\n\n (false, true, 8) => Ok(DataType::Float64),\n\n (true, false, 1) => Ok(DataType::Int8),\n\n (true, false, 2) => Ok(DataType::Int16),\n\n (true, false, 4) => Ok(DataType::Int32),\n\n (true, false, 8) => Ok(DataType::Int64),\n\n (true, true, 1) => Ok(DataType::Float32),\n\n (true, true, 2) => Ok(DataType::Float32),\n\n (true, true, 4) => Ok(DataType::Float32),\n\n (true, true, 8) => Ok(DataType::Float64),\n", "file_path": "common/datavalues/src/types/data_type_coercion.rs", "rank": 71, "score": 247062.03758927056 }, { "content": "/// Write logs to file and rotation by HOUR.\n\npub fn init_tracing_with_file(app_name: &str, dir: &str, level: &str) -> Vec<WorkerGuard> {\n\n let mut guards = vec![];\n\n\n\n let (stdout_writer, stdout_guard) = tracing_appender::non_blocking(std::io::stdout());\n\n let stdout_logging_layer = Layer::new().with_writer(stdout_writer);\n\n guards.push(stdout_guard);\n\n\n\n let file_appender = RollingFileAppender::new(Rotation::HOURLY, dir, app_name);\n\n let (file_writer, file_guard) = tracing_appender::non_blocking(file_appender);\n\n let file_logging_layer = BunyanFormattingLayer::new(app_name.to_string(), file_writer);\n\n guards.push(file_guard);\n\n\n\n let subscriber = Registry::default()\n\n .with(EnvFilter::new(level))\n\n .with(stdout_logging_layer)\n\n .with(JsonStorageLayer)\n\n .with(file_logging_layer)\n\n .with(jaeger_layer());\n\n\n\n tracing::subscriber::set_global_default(subscriber)\n\n .expect(\"error setting global tracing subscriber\");\n\n\n\n guards\n\n}\n\n\n", "file_path": "common/tracing/src/logging.rs", "rank": 72, "score": 246789.50657550164 }, { "content": "fn concat_strings(l: &[u8], r: &[u8]) -> Vec<u8> {\n\n let mut s = Vec::with_capacity(l.len() + r.len());\n\n s.extend_from_slice(l);\n\n s.extend_from_slice(r);\n\n s\n\n}\n\n\n\nimpl Add for &DFStringArray {\n\n type Output = Result<DFStringArray>;\n\n\n\n fn add(self, rhs: Self) -> Self::Output {\n\n // broadcasting path\n\n if rhs.len() == 1 {\n\n let rhs = rhs.get(0);\n\n return match rhs {\n\n Some(rhs) => self.add(rhs),\n\n None => Ok(DFStringArray::full_null(self.len())),\n\n };\n\n }\n\n\n", "file_path": "common/datavalues/src/arrays/arithmetic.rs", "rank": 73, "score": 245755.94056277908 }, { "content": "#[test]\n\nfn show_queries() -> Result<()> {\n\n use sqlparser::dialect::GenericDialect;\n\n use sqlparser::parser::Parser;\n\n use sqlparser::tokenizer::Tokenizer;\n\n\n\n // positive case\n\n expect_parse_ok(\"SHOW TABLES\", DfStatement::ShowTables(DfShowTables::All))?;\n\n expect_parse_ok(\"SHOW TABLES;\", DfStatement::ShowTables(DfShowTables::All))?;\n\n expect_parse_ok(\"SHOW SETTINGS\", DfStatement::ShowSettings(DfShowSettings))?;\n\n expect_parse_ok(\n\n \"SHOW TABLES LIKE 'aaa'\",\n\n DfStatement::ShowTables(DfShowTables::Like(Ident::with_quote('\\'', \"aaa\"))),\n\n )?;\n\n\n\n expect_parse_ok(\n\n \"SHOW TABLES --comments should not in sql case1\",\n\n DfStatement::ShowTables(DfShowTables::All),\n\n )?;\n\n\n\n expect_parse_ok(\n", "file_path": "query/src/sql/sql_parser_test.rs", "rank": 74, "score": 245237.2859525914 }, { "content": "/// Creates a tracing/logging subscriber that is valid until the guards are dropped.\n\n/// The format layer logging span/event in plain text, without color, one event per line.\n\n/// This is useful in a unit test.\n\npub fn init_tracing(app_name: &str, dir: &str) -> (WorkerGuard, DefaultGuard) {\n\n let (g, sub) = init_file_subscriber(app_name, dir);\n\n\n\n let subs_guard = tracing::subscriber::set_default(sub);\n\n\n\n tracing::info!(\"initialized tracing\");\n\n (g, subs_guard)\n\n}\n\n\n", "file_path": "common/tracing/src/logging.rs", "rank": 75, "score": 245196.8808828583 }, { "content": "fn parse_array_type(source: &str) -> Option<&str> {\n\n if !source.starts_with(\"Array\") {\n\n return None;\n\n }\n\n\n\n let inner_type = &source[6..source.len() - 1];\n\n Some(inner_type)\n\n}\n\n\n", "file_path": "common/clickhouse-srv/src/types/column/factory.rs", "rank": 76, "score": 244862.42031172116 }, { "content": "fn parse_nullable_type(source: &str) -> Option<&str> {\n\n if !source.starts_with(\"Nullable\") {\n\n return None;\n\n }\n\n\n\n let inner_type = &source[9..source.len() - 1];\n\n\n\n if inner_type.starts_with(\"Nullable\") {\n\n return None;\n\n }\n\n\n\n Some(inner_type)\n\n}\n\n\n", "file_path": "common/clickhouse-srv/src/types/column/factory.rs", "rank": 77, "score": 244862.42031172116 }, { "content": "// Can works before expression,filter,having in PlanBuilder\n\npub fn validate_expression(expr: &Expression) -> Result<()> {\n\n let validator = ExpressionValidator::new(&|expr: &Expression| match expr {\n\n Expression::ScalarFunction { op, args } => {\n\n let func = FunctionFactory::get(op)?;\n\n validate_function_arg(func, args)\n\n }\n\n\n\n // Currently no need to check UnaryExpression and BinaryExpression\n\n // todo: AggregateFunction validation after generic AggregateFunctions\n\n _ => Ok(()),\n\n });\n\n\n\n let validator = expr.accept(validator)?;\n\n match validator.error {\n\n Some(err) => Err(err),\n\n None => Ok(()),\n\n }\n\n}\n", "file_path": "common/planners/src/plan_expression_validator.rs", "rank": 78, "score": 244290.15015605418 }, { "content": "/// Coercion rule for numerical types: The type that both lhs and rhs\n\n/// can be casted to for numerical calculation, while maintaining\n\n/// maximum precision\n\npub fn numerical_coercion(\n\n lhs_type: &DataType,\n\n rhs_type: &DataType,\n\n allow_overflow: bool,\n\n) -> Result<DataType> {\n\n let has_float = is_floating(lhs_type) || is_floating(rhs_type);\n\n let has_integer = is_integer(lhs_type) || is_integer(rhs_type);\n\n let has_signed = is_signed_numeric(lhs_type) || is_signed_numeric(rhs_type);\n\n\n\n let size_of_lhs = numeric_byte_size(lhs_type)?;\n\n let size_of_rhs = numeric_byte_size(rhs_type)?;\n\n\n\n let max_size_of_unsigned_integer = cmp::max(\n\n if is_signed_numeric(lhs_type) {\n\n 0\n\n } else {\n\n size_of_lhs\n\n },\n\n if is_signed_numeric(rhs_type) {\n\n 0\n", "file_path": "common/datavalues/src/types/data_type_coercion.rs", "rank": 79, "score": 243544.25105517026 }, { "content": "#[test]\n\nfn test_version_function() -> Result<()> {\n\n #[allow(dead_code)]\n\n struct Test {\n\n name: &'static str,\n\n display: &'static str,\n\n nullable: bool,\n\n columns: Vec<DataColumn>,\n\n expect: DataColumn,\n\n error: &'static str,\n\n func: Box<dyn Function>,\n\n }\n\n\n\n let tests = vec![Test {\n\n name: \"version-function-passed\",\n\n display: \"version\",\n\n nullable: false,\n\n func: VersionFunction::try_create(\"version\")?,\n\n columns: vec![Series::new(vec![\n\n \"DatabendQuery v-0.1.0-3afb26c(1.54.0-nightly-2021-06-09T07:56:09.461981495+00:00)\",\n\n ])\n", "file_path": "common/functions/src/scalars/udfs/version_test.rs", "rank": 80, "score": 241767.32040485763 }, { "content": "#[inline]\n\npub fn interval_arithmetic_coercion(\n\n op: &DataValueArithmeticOperator,\n\n lhs_type: &DataType,\n\n rhs_type: &DataType,\n\n) -> Result<DataType> {\n\n let e = Result::Err(ErrorCode::BadDataValueType(format!(\n\n \"DataValue Error: Unsupported date coercion ({:?}) {} ({:?})\",\n\n lhs_type, op, rhs_type\n\n )));\n\n\n\n // only allow date/datetime [+/-] interval\n\n if !(is_date_or_date_time(lhs_type) && is_interval(rhs_type)\n\n || is_date_or_date_time(rhs_type) && is_interval(lhs_type))\n\n {\n\n return e;\n\n }\n\n\n\n match op {\n\n DataValueArithmeticOperator::Plus | DataValueArithmeticOperator::Minus => {\n\n if is_date_or_date_time(lhs_type) {\n\n Ok(lhs_type.clone())\n\n } else {\n\n Ok(rhs_type.clone())\n\n }\n\n }\n\n _ => e,\n\n }\n\n}\n\n\n", "file_path": "common/datavalues/src/types/data_type_coercion.rs", "rank": 81, "score": 240554.22921777016 }, { "content": "#[inline]\n\npub fn numerical_arithmetic_coercion(\n\n op: &DataValueArithmeticOperator,\n\n lhs_type: &DataType,\n\n rhs_type: &DataType,\n\n) -> Result<DataType> {\n\n // error on any non-numeric type\n\n if !is_numeric(lhs_type) || !is_numeric(rhs_type) {\n\n return Result::Err(ErrorCode::BadDataValueType(format!(\n\n \"DataValue Error: Unsupported ({:?}) {} ({:?})\",\n\n lhs_type, op, rhs_type\n\n )));\n\n };\n\n\n\n let has_signed = is_signed_numeric(lhs_type) || is_signed_numeric(rhs_type);\n\n let has_float = is_floating(lhs_type) || is_floating(rhs_type);\n\n let max_size = cmp::max(numeric_byte_size(lhs_type)?, numeric_byte_size(rhs_type)?);\n\n\n\n match op {\n\n DataValueArithmeticOperator::Plus | DataValueArithmeticOperator::Mul => {\n\n construct_numeric_type(has_signed, has_float, next_size(max_size))\n", "file_path": "common/datavalues/src/types/data_type_coercion.rs", "rank": 82, "score": 240554.22921777016 }, { "content": "#[inline]\n\npub fn datetime_arithmetic_coercion(\n\n op: &DataValueArithmeticOperator,\n\n lhs_type: &DataType,\n\n rhs_type: &DataType,\n\n) -> Result<DataType> {\n\n let e = Result::Err(ErrorCode::BadDataValueType(format!(\n\n \"DataValue Error: Unsupported date coercion ({:?}) {} ({:?})\",\n\n lhs_type, op, rhs_type\n\n )));\n\n\n\n if !is_date_or_date_time(lhs_type) && !is_date_or_date_time(rhs_type) {\n\n return e;\n\n }\n\n\n\n let mut a = lhs_type.clone();\n\n let mut b = rhs_type.clone();\n\n if !is_date_or_date_time(&a) {\n\n a = rhs_type.clone();\n\n b = lhs_type.clone();\n\n }\n", "file_path": "common/datavalues/src/types/data_type_coercion.rs", "rank": 83, "score": 240554.22921777016 }, { "content": "fn get_u64_data(block: Block<Complex>) -> Result<u64> {\n\n match block.get(0, \"c\") {\n\n Ok(value) => Ok(value),\n\n Err(error) => Err(ErrorCode::UnknownException(format!(\n\n \"Cannot get data {:?}\",\n\n error\n\n ))),\n\n }\n\n}\n\n\n\nasync fn query(handler: &mut ClientHandle, query: &str) -> Result<Block<Complex>> {\n\n let query_result = handler.query(query);\n\n match query_result.fetch_all().await {\n\n Ok(block) => Ok(block),\n\n Err(error) => Err(ErrorCode::UnknownException(format!(\n\n \"Error query: {:?}\",\n\n error\n\n ))),\n\n }\n\n}\n", "file_path": "query/src/servers/clickhouse/clickhouse_handler_test.rs", "rank": 84, "score": 240324.99519839662 }, { "content": "pub trait Base {\n\n fn scale(self, scale: i64) -> i64;\n\n}\n\n\n", "file_path": "common/clickhouse-srv/src/types/decimal.rs", "rank": 85, "score": 239465.59829240767 }, { "content": "fn make_column_def(name: impl Into<String>, data_type: DataType) -> ColumnDef {\n\n ColumnDef {\n\n name: Ident {\n\n value: name.into(),\n\n quote_style: None,\n\n },\n\n data_type,\n\n collation: None,\n\n options: vec![],\n\n }\n\n}\n\n\n", "file_path": "query/src/sql/sql_parser_test.rs", "rank": 86, "score": 238622.1241141763 }, { "content": "/// Create a file based tracing/logging subscriber.\n\n/// A guard must be held during using the logging.\n\n/// The format layer logging span/event in plain text, without color, one event per line.\n\n/// Optionally it adds a layer to send to opentelemetry if env var `DATABEND_JAEGER` is present.\n\npub fn init_file_subscriber(app_name: &str, dir: &str) -> (WorkerGuard, impl Subscriber) {\n\n let path_str = dir.to_string() + \"/\" + app_name;\n\n let path: &Path = path_str.as_ref();\n\n\n\n // open log file\n\n\n\n let mut open_options = OpenOptions::new();\n\n open_options.append(true).create(true);\n\n\n\n let mut open_res = open_options.open(path);\n\n if open_res.is_err() {\n\n if let Some(parent) = path.parent() {\n\n std::fs::create_dir_all(parent).unwrap();\n\n open_res = open_options.open(path);\n\n }\n\n }\n\n\n\n let f = open_res.unwrap();\n\n\n\n // build subscriber\n", "file_path": "common/tracing/src/logging.rs", "rank": 87, "score": 238575.01938519103 }, { "content": "#[inline]\n\npub fn numerical_unary_arithmetic_coercion(\n\n op: &DataValueArithmeticOperator,\n\n val_type: &DataType,\n\n) -> Result<DataType> {\n\n // error on any non-numeric type\n\n if !is_numeric(val_type) {\n\n return Result::Err(ErrorCode::BadDataValueType(format!(\n\n \"DataValue Error: Unsupported ({:?})\",\n\n val_type\n\n )));\n\n };\n\n\n\n match op {\n\n DataValueArithmeticOperator::Plus => Ok(val_type.clone()),\n\n DataValueArithmeticOperator::Minus => {\n\n let has_float = is_floating(val_type);\n\n let has_signed = is_signed_numeric(val_type);\n\n let numeric_size = numeric_byte_size(val_type)?;\n\n let max_size = if has_signed {\n\n numeric_size\n", "file_path": "common/datavalues/src/types/data_type_coercion.rs", "rank": 88, "score": 237684.22236728354 }, { "content": "type StreamLenFuture = Pin<Box<dyn Future<Output = Result<i64, Error>> + Send>>;\n\n\n", "file_path": "query/src/datasources/dal/impls/aws_s3/s3_input_stream.rs", "rank": 89, "score": 235699.46057855655 }, { "content": "#[test]\n\nfn test_to_type_name_function() -> Result<()> {\n\n #[allow(dead_code)]\n\n struct Test {\n\n name: &'static str,\n\n display: &'static str,\n\n nullable: bool,\n\n arg_names: Vec<&'static str>,\n\n columns: Vec<DataColumn>,\n\n expect: DataColumn,\n\n error: &'static str,\n\n func: Box<dyn Function>,\n\n }\n\n\n\n let schema = DataSchemaRefExt::create(vec![DataField::new(\"a\", DataType::Boolean, false)]);\n\n\n\n let tests = vec![Test {\n\n name: \"to_type_name-example-passed\",\n\n display: \"toTypeName\",\n\n nullable: false,\n\n arg_names: vec![\"a\"],\n", "file_path": "common/functions/src/scalars/udfs/to_type_name_test.rs", "rank": 90, "score": 235330.783413973 }, { "content": "#[allow(dead_code)]\n\npub fn project_col_idx(schema: &DataSchemaRef, projection: &DataSchemaRef) -> Result<Vec<usize>> {\n\n let col_map = schema\n\n .fields()\n\n .iter()\n\n .enumerate()\n\n .fold(HashMap::new(), |mut v, (i, item)| {\n\n v.insert(item.name().to_string(), i);\n\n v\n\n });\n\n\n\n let mut proj_idx = vec![];\n\n\n\n for col in projection.fields() {\n\n let name = col.name();\n\n if let Some(idx) = col_map.get(col.name()) {\n\n proj_idx.push(*idx)\n\n } else {\n\n return Err(ErrorCode::IllegalSchema(format!(\n\n \"column [{}] specified in projection, but does not exist in schema\",\n\n name\n\n )));\n\n }\n\n }\n\n Ok(proj_idx)\n\n}\n", "file_path": "query/src/datasources/table/fuse/util/projection_helper.rs", "rank": 91, "score": 234749.85937325016 }, { "content": "#[inline]\n\npub fn get_bit(data: &[u8], i: usize) -> bool {\n\n (data[i >> 3] & BIT_MASK[i & 7]) != 0\n\n}\n\n\n\n/// Returns whether bit at position `i` in `data` is set or not.\n\n///\n\n/// # Safety\n\n///\n\n/// Note this doesn't do any bound checking, for performance reason. The caller is\n\n/// responsible to guarantee that `i` is within bounds.\n\n#[inline]\n\npub unsafe fn get_bit_raw(data: *const u8, i: usize) -> bool {\n\n (*data.add(i >> 3) & BIT_MASK[i & 7]) != 0\n\n}\n\n\n\n/// Sets bit at position `i` for `data`\n", "file_path": "common/datavalues/src/bit_util.rs", "rank": 92, "score": 234185.60586776547 }, { "content": "/// Assert with order sensitive.\n\n/// ['a', 'b'] not equals ['b', 'a']\n\npub fn assert_series_eq_with_name(test_name: &str, expect: Vec<&str>, series: &[Series]) {\n\n let expected_lines: Vec<String> = expect.iter().map(|&s| s.into()).collect();\n\n let formatted = pretty_format_series(series).unwrap();\n\n let actual_lines: Vec<&str> = formatted.trim().lines().collect();\n\n\n\n assert_eq!(\n\n expected_lines, actual_lines,\n\n \"{:#?}\\n\\nexpected:\\n\\n{:#?}\\nactual:\\n\\n{:#?}\\n\\n\",\n\n test_name, expected_lines, actual_lines\n\n );\n\n}\n\n\n", "file_path": "common/datavalues/src/series/series_debug.rs", "rank": 93, "score": 233999.07558001723 }, { "content": "/// Rebuilds an `expr` using the inner expr for expression\n\n/// `(a + b) as c` ---> `(a + b)`\n\npub fn unwrap_alias_exprs(expr: &Expression) -> Result<Expression> {\n\n clone_with_replacement(expr, &|nest_exprs| match nest_exprs {\n\n Expression::Alias(_, nested_expr) => Ok(Some(*nested_expr.clone())),\n\n _ => Ok(None),\n\n })\n\n}\n", "file_path": "common/planners/src/plan_expression_common.rs", "rank": 94, "score": 232806.0321166094 }, { "content": "/// Convert any `Expression` to an `Expression::Column`.\n\npub fn expr_as_column_expr(expr: &Expression) -> Result<Expression> {\n\n match expr {\n\n Expression::Column(_) => Ok(expr.clone()),\n\n _ => Ok(Expression::Column(expr.column_name())),\n\n }\n\n}\n\n\n", "file_path": "common/planners/src/plan_expression_common.rs", "rank": 95, "score": 232799.9469137725 }, { "content": "#[test]\n\nfn test_default_config() -> Result<()> {\n\n let expect = Config {\n\n log: LogConfig::default(),\n\n meta: MetaConfig::default(),\n\n store: StoreConfig::default(),\n\n query: QueryConfig::default(),\n\n config_file: \"\".to_string(),\n\n };\n\n let actual = Config::default();\n\n assert_eq!(actual, expect);\n\n Ok(())\n\n}\n\n\n\n// From env, defaulting.\n", "file_path": "query/src/configs/config_test.rs", "rank": 96, "score": 232303.0276457299 }, { "content": "#[test]\n\n#[ignore]\n\nfn test_args_config() -> Result<()> {\n\n let actual = Config::load_from_args();\n\n assert_eq!(\"INFO\", actual.log.log_level);\n\n Ok(())\n\n}\n\n\n\n// From file NotFound.\n", "file_path": "query/src/configs/config_test.rs", "rank": 97, "score": 232303.0276457299 }, { "content": "#[test]\n\n#[ignore]\n\nfn test_file_config() -> Result<()> {\n\n let toml_str = r#\"\n\n[log_config]\n\nlog_level = \"ERROR\"\n\nlog_dir = \"./_logs\"\n\n \"#;\n\n\n\n let actual = Config::load_from_toml_str(toml_str)?;\n\n assert_eq!(\"INFO\", actual.log.log_level);\n\n\n\n std::env::set_var(\"QUERY_LOG_LEVEL\", \"DEBUG\");\n\n let env = Config::load_from_env(&actual)?;\n\n assert_eq!(\"INFO\", env.log.log_level);\n\n std::env::remove_var(\"QUERY_LOG_LEVEL\");\n\n Ok(())\n\n}\n\n\n\n// From env, load config file and ignore the rest settings.\n", "file_path": "query/src/configs/config_test.rs", "rank": 98, "score": 232303.0276457299 }, { "content": "#[test]\n\nfn test_env_config() -> Result<()> {\n\n std::env::set_var(\"LOG_LEVEL\", \"DEBUG\");\n\n std::env::set_var(\"QUERY_TENANT\", \"tenant-1\");\n\n std::env::set_var(\"QUERY_NAMESPACE\", \"cluster-1\");\n\n std::env::set_var(\"QUERY_MYSQL_HANDLER_HOST\", \"0.0.0.0\");\n\n std::env::set_var(\"QUERY_MYSQL_HANDLER_PORT\", \"3306\");\n\n std::env::set_var(\"QUERY_MAX_ACTIVE_SESSIONS\", \"255\");\n\n std::env::set_var(\"QUERY_CLICKHOUSE_HANDLER_HOST\", \"1.2.3.4\");\n\n std::env::set_var(\"QUERY_CLICKHOUSE_HANDLER_PORT\", \"9000\");\n\n std::env::set_var(\"QUERY_FLIGHT_API_ADDRESS\", \"1.2.3.4:9091\");\n\n std::env::set_var(\"QUERY_HTTP_API_ADDRESS\", \"1.2.3.4:8081\");\n\n std::env::set_var(\"QUERY_METRIC_API_ADDRESS\", \"1.2.3.4:7071\");\n\n std::env::set_var(\"QUERY_DISABLE_LOCAL_DATABASE_ENGINE\", \"1\");\n\n std::env::set_var(\"STORE_ADDRESS\", \"1.2.3.4:1234\");\n\n std::env::set_var(\"STORE_USERNAME\", \"admin\");\n\n std::env::set_var(\"STORE_PASSWORD\", \"password!\");\n\n std::env::remove_var(\"CONFIG_FILE\");\n\n\n\n let default = Config::default();\n\n let configured = Config::load_from_env(&default)?;\n", "file_path": "query/src/configs/config_test.rs", "rank": 99, "score": 232303.0276457299 } ]
Rust
beacon_node/network/src/sync/range_sync/chain_collection.rs
protolambda/lighthouse
3acb3cc640c7a1fe4aab94ce35be1c300a4146d8
use super::chain::{ChainSyncingState, ProcessingResult, SyncingChain}; use crate::message_processor::PeerSyncInfo; use crate::sync::network_context::SyncNetworkContext; use beacon_chain::{BeaconChain, BeaconChainTypes}; use eth2_libp2p::PeerId; use slog::{debug, warn}; use std::sync::Weak; use types::EthSpec; use types::{Hash256, Slot}; pub enum SyncState { Finalized, Head, Idle, } pub struct ChainCollection<T: BeaconChainTypes> { finalized_chains: Vec<SyncingChain<T>>, head_chains: Vec<SyncingChain<T>>, sync_state: SyncState, } impl<T: BeaconChainTypes> ChainCollection<T> { pub fn new() -> Self { ChainCollection { sync_state: SyncState::Idle, finalized_chains: Vec::new(), head_chains: Vec::new(), } } pub fn sync_state(&self) -> &SyncState { &self.sync_state } pub fn fully_synced_peer_found(&mut self) { if let SyncState::Head = self.sync_state { if self.head_chains.is_empty() { self.sync_state = SyncState::Idle; } } } pub fn set_head_sync(&mut self) { if let SyncState::Idle = self.sync_state { self.sync_state = SyncState::Head; } } fn finalized_syncing_index(&self) -> Option<usize> { self.finalized_chains .iter() .enumerate() .find_map(|(index, chain)| { if chain.state == ChainSyncingState::Syncing { Some(index) } else { None } }) } pub fn purge_finalized(&mut self, local_finalized_slot: Slot) { self.finalized_chains .retain(|chain| chain.target_head_slot > local_finalized_slot); } pub fn purge_head(&mut self, head_slot: Slot) { self.head_chains .retain(|chain| chain.target_head_slot > head_slot); } fn get_chain<'a>( chain: &'a mut [SyncingChain<T>], target_head_root: Hash256, target_head_slot: Slot, ) -> Option<&'a mut SyncingChain<T>> { chain.iter_mut().find(|iter_chain| { iter_chain.target_head_root == target_head_root && iter_chain.target_head_slot == target_head_slot }) } pub fn get_finalized_mut( &mut self, target_head_root: Hash256, target_head_slot: Slot, ) -> Option<&mut SyncingChain<T>> { ChainCollection::get_chain( self.finalized_chains.as_mut(), target_head_root, target_head_slot, ) } pub fn get_head_mut( &mut self, target_head_root: Hash256, target_head_slot: Slot, ) -> Option<&mut SyncingChain<T>> { ChainCollection::get_chain( self.head_chains.as_mut(), target_head_root, target_head_slot, ) } pub fn update_finalized( &mut self, beacon_chain: Weak<BeaconChain<T>>, network: &mut SyncNetworkContext, log: &slog::Logger, ) { let local_info = match beacon_chain.upgrade() { Some(chain) => PeerSyncInfo::from(&chain), None => { warn!(log, "Beacon chain dropped. Chains not updated"); return; } }; let local_slot = local_info .finalized_epoch .start_slot(T::EthSpec::slots_per_epoch()); self.purge_finalized(local_slot); self.finalized_chains .retain(|chain| !chain.peer_pool.is_empty()); self.purge_head(local_info.head_slot); self.finalized_chains .retain(|chain| !chain.peer_pool.is_empty()); if let Some(index) = self.finalized_syncing_index() { let syncing_chain_peer_count = self.finalized_chains[index].peer_pool.len(); if let Some((new_index, chain)) = self.finalized_chains .iter_mut() .enumerate() .find(|(iter_index, chain)| { *iter_index != index && chain.peer_pool.len() > syncing_chain_peer_count }) { debug!(log, "Switching finalized chains to sync"; "new_target_root" => format!("{}", chain.target_head_root), "new_end_slot" => chain.target_head_slot, "new_start_slot"=> chain.start_slot); self.finalized_chains[index].stop_syncing(); self.finalized_chains[new_index].start_syncing(network, local_slot, log); self.sync_state = SyncState::Finalized; } } else if let Some(chain) = self .finalized_chains .iter_mut() .max_by_key(|chain| chain.peer_pool.len()) { debug!(log, "New finalized chain started syncing"; "new_target_root" => format!("{}", chain.target_head_root), "new_end_slot" => chain.target_head_slot, "new_start_slot"=> chain.start_slot); chain.start_syncing(network, local_slot, log); self.sync_state = SyncState::Finalized; } else { if self.head_chains.is_empty() { self.sync_state = SyncState::Idle; } else { self.sync_state = SyncState::Head; } } } pub fn new_finalized_chain( &mut self, local_finalized_slot: Slot, target_head: Hash256, target_slot: Slot, peer_id: PeerId, ) { self.finalized_chains.push(SyncingChain::new( local_finalized_slot, target_slot, target_head, peer_id, )); } pub fn new_head_chain( &mut self, network: &mut SyncNetworkContext, remote_finalized_slot: Slot, target_head: Hash256, target_slot: Slot, peer_id: PeerId, log: &slog::Logger, ) { self.head_chains.iter_mut().for_each(|chain| { chain.peer_pool.remove(&peer_id); }); self.head_chains.retain(|chain| !chain.peer_pool.is_empty()); let mut new_head_chain = SyncingChain::new(remote_finalized_slot, target_slot, target_head, peer_id); new_head_chain.start_syncing(network, remote_finalized_slot, log); self.head_chains.push(new_head_chain); } pub fn is_finalizing_sync(&self) -> bool { !self.finalized_chains.is_empty() } fn request_function<'a, F, I>(chain: I, mut func: F) -> Option<(usize, ProcessingResult)> where I: Iterator<Item = &'a mut SyncingChain<T>>, F: FnMut(&'a mut SyncingChain<T>) -> Option<ProcessingResult>, { chain .enumerate() .find_map(|(index, chain)| Some((index, func(chain)?))) } pub fn finalized_request<F>(&mut self, func: F) -> Option<(usize, ProcessingResult)> where F: FnMut(&mut SyncingChain<T>) -> Option<ProcessingResult>, { ChainCollection::request_function(self.finalized_chains.iter_mut(), func) } pub fn head_request<F>(&mut self, func: F) -> Option<(usize, ProcessingResult)> where F: FnMut(&mut SyncingChain<T>) -> Option<ProcessingResult>, { ChainCollection::request_function(self.head_chains.iter_mut(), func) } #[allow(dead_code)] pub fn head_finalized_request<F>(&mut self, func: F) -> Option<(usize, ProcessingResult)> where F: FnMut(&mut SyncingChain<T>) -> Option<ProcessingResult>, { ChainCollection::request_function( self.finalized_chains .iter_mut() .chain(self.head_chains.iter_mut()), func, ) } pub fn remove_finalized_chain(&mut self, index: usize) -> SyncingChain<T> { self.finalized_chains.swap_remove(index) } pub fn remove_head_chain(&mut self, index: usize) -> SyncingChain<T> { self.head_chains.swap_remove(index) } pub fn remove_chain(&mut self, index: usize) -> SyncingChain<T> { if index >= self.finalized_chains.len() { let index = index - self.finalized_chains.len(); self.head_chains.swap_remove(index) } else { self.finalized_chains.swap_remove(index) } } }
use super::chain::{ChainSyncingState, ProcessingResult, SyncingChain}; use crate::message_processor::PeerSyncInfo; use crate::sync::network_context::SyncNetworkContext; use beacon_chain::{BeaconChain, BeaconChainTypes}; use eth2_libp2p::PeerId; use slog::{debug, warn}; use std::sync::Weak; use types::EthSpec; use types::{Hash256, Slot}; pub enum SyncState { Finalized, Head, Idle, } pub struct ChainCollection<T: BeaconChainTypes> { finalized_chains: Vec<SyncingChain<T>>, head_chains: Vec<SyncingChain<T>>, sync_state: SyncState, } impl<T: BeaconChainTypes> ChainCollection<T> { pub fn new() -> Self { ChainCollection { sync_state: SyncState::Idle, finalized_chains: Vec::new(), head_chains: Vec::new(), } } pub fn sync_state(&self) -> &SyncState { &self.sync_state } pub fn fully_synced_peer_found(&mut self) { if let SyncState::Head = self.sync_state { if self.head_chains.is_empty() { self.sync_state = SyncState::Idle; } } } pub fn set_head_sync(&mut self) { if let SyncState::Idle = self.sync_state { self.sync_state = SyncState::Head; } } fn finalized_syncing_index(&self) -> Option<usize> { self.finalized_chains .iter() .enumerate() .find_map(|(index, chain)| { if chain.state == ChainSyncingState::Syncing { Some(index) } else { None } }) } pub fn purge_finalized(&mut self, local_finalized_slot: Slot) { self.finalized_chains .retain(|chain| chain.target_head_slot > local_finalized_slot); } pub fn purge_head(&mut self, head_slot: Slot) { self.head_chains .retain(|chain| chain.target_head_slot > head_slot); } fn get_chain<'a>( chain: &'a mut [SyncingChain<T>], target_head_root: Hash256, target_head_slot: Slot, ) -> Option<&'a mut SyncingChain<T>> { chain.iter_mut().find(|iter_chain| { iter_chain.target_head_root == target_head_root && iter_chain.target_head_slot == target_head_slot }) } pub fn get_finalized_mut( &mut self, target_head_root: Hash256, target_head_slot: Slot, ) -> Option<&mut SyncingChain<T>> { ChainCollection::get_chain( self.finalized_chains.as_mut(), target_head_root, target_head_slot, ) }
pub fn update_finalized( &mut self, beacon_chain: Weak<BeaconChain<T>>, network: &mut SyncNetworkContext, log: &slog::Logger, ) { let local_info = match beacon_chain.upgrade() { Some(chain) => PeerSyncInfo::from(&chain), None => { warn!(log, "Beacon chain dropped. Chains not updated"); return; } }; let local_slot = local_info .finalized_epoch .start_slot(T::EthSpec::slots_per_epoch()); self.purge_finalized(local_slot); self.finalized_chains .retain(|chain| !chain.peer_pool.is_empty()); self.purge_head(local_info.head_slot); self.finalized_chains .retain(|chain| !chain.peer_pool.is_empty()); if let Some(index) = self.finalized_syncing_index() { let syncing_chain_peer_count = self.finalized_chains[index].peer_pool.len(); if let Some((new_index, chain)) = self.finalized_chains .iter_mut() .enumerate() .find(|(iter_index, chain)| { *iter_index != index && chain.peer_pool.len() > syncing_chain_peer_count }) { debug!(log, "Switching finalized chains to sync"; "new_target_root" => format!("{}", chain.target_head_root), "new_end_slot" => chain.target_head_slot, "new_start_slot"=> chain.start_slot); self.finalized_chains[index].stop_syncing(); self.finalized_chains[new_index].start_syncing(network, local_slot, log); self.sync_state = SyncState::Finalized; } } else if let Some(chain) = self .finalized_chains .iter_mut() .max_by_key(|chain| chain.peer_pool.len()) { debug!(log, "New finalized chain started syncing"; "new_target_root" => format!("{}", chain.target_head_root), "new_end_slot" => chain.target_head_slot, "new_start_slot"=> chain.start_slot); chain.start_syncing(network, local_slot, log); self.sync_state = SyncState::Finalized; } else { if self.head_chains.is_empty() { self.sync_state = SyncState::Idle; } else { self.sync_state = SyncState::Head; } } } pub fn new_finalized_chain( &mut self, local_finalized_slot: Slot, target_head: Hash256, target_slot: Slot, peer_id: PeerId, ) { self.finalized_chains.push(SyncingChain::new( local_finalized_slot, target_slot, target_head, peer_id, )); } pub fn new_head_chain( &mut self, network: &mut SyncNetworkContext, remote_finalized_slot: Slot, target_head: Hash256, target_slot: Slot, peer_id: PeerId, log: &slog::Logger, ) { self.head_chains.iter_mut().for_each(|chain| { chain.peer_pool.remove(&peer_id); }); self.head_chains.retain(|chain| !chain.peer_pool.is_empty()); let mut new_head_chain = SyncingChain::new(remote_finalized_slot, target_slot, target_head, peer_id); new_head_chain.start_syncing(network, remote_finalized_slot, log); self.head_chains.push(new_head_chain); } pub fn is_finalizing_sync(&self) -> bool { !self.finalized_chains.is_empty() } fn request_function<'a, F, I>(chain: I, mut func: F) -> Option<(usize, ProcessingResult)> where I: Iterator<Item = &'a mut SyncingChain<T>>, F: FnMut(&'a mut SyncingChain<T>) -> Option<ProcessingResult>, { chain .enumerate() .find_map(|(index, chain)| Some((index, func(chain)?))) } pub fn finalized_request<F>(&mut self, func: F) -> Option<(usize, ProcessingResult)> where F: FnMut(&mut SyncingChain<T>) -> Option<ProcessingResult>, { ChainCollection::request_function(self.finalized_chains.iter_mut(), func) } pub fn head_request<F>(&mut self, func: F) -> Option<(usize, ProcessingResult)> where F: FnMut(&mut SyncingChain<T>) -> Option<ProcessingResult>, { ChainCollection::request_function(self.head_chains.iter_mut(), func) } #[allow(dead_code)] pub fn head_finalized_request<F>(&mut self, func: F) -> Option<(usize, ProcessingResult)> where F: FnMut(&mut SyncingChain<T>) -> Option<ProcessingResult>, { ChainCollection::request_function( self.finalized_chains .iter_mut() .chain(self.head_chains.iter_mut()), func, ) } pub fn remove_finalized_chain(&mut self, index: usize) -> SyncingChain<T> { self.finalized_chains.swap_remove(index) } pub fn remove_head_chain(&mut self, index: usize) -> SyncingChain<T> { self.head_chains.swap_remove(index) } pub fn remove_chain(&mut self, index: usize) -> SyncingChain<T> { if index >= self.finalized_chains.len() { let index = index - self.finalized_chains.len(); self.head_chains.swap_remove(index) } else { self.finalized_chains.swap_remove(index) } } }
pub fn get_head_mut( &mut self, target_head_root: Hash256, target_head_slot: Slot, ) -> Option<&mut SyncingChain<T>> { ChainCollection::get_chain( self.head_chains.as_mut(), target_head_root, target_head_slot, ) }
function_block-full_function
[ { "content": "/// Ensures that the finalized root can be set to all values in `roots`.\n\nfn test_update_finalized_root(roots: &[(Hash256, Slot)]) {\n\n let harness = &FORKED_HARNESS;\n\n\n\n let lmd = harness.new_fork_choice();\n\n\n\n for (root, _slot) in roots.iter().rev() {\n\n let block = harness\n\n .store_clone()\n\n .get::<BeaconBlock<TestEthSpec>>(root)\n\n .expect(\"block should exist\")\n\n .expect(\"db should not error\");\n\n lmd.update_finalized_root(&block, *root)\n\n .expect(\"finalized root should update for faulty fork\");\n\n\n\n assert_eq!(\n\n lmd.verify_integrity(),\n\n Ok(()),\n\n \"Tree integrity should be maintained after updating the finalized root\"\n\n );\n\n }\n\n}\n\n\n\n/// Iterates from low-to-high slot through the faulty roots, updating the finalized root.\n", "file_path": "eth2/lmd_ghost/tests/test.rs", "rank": 0, "score": 295880.6356986947 }, { "content": "/// Modfies the specification to better suit present-capacity testnets.\n\npub fn lighthouse_testnet_spec(mut spec: ChainSpec) -> ChainSpec {\n\n spec.min_deposit_amount = 100;\n\n spec.max_effective_balance = 3_200_000_000;\n\n spec.ejection_balance = 1_600_000_000;\n\n spec.effective_balance_increment = 100_000_000;\n\n\n\n spec.eth1_follow_distance = 16;\n\n\n\n // This value must be at least 2x the `ETH1_FOLLOW_DISTANCE` otherwise `all_eth1_data` can\n\n // become a subset of `new_eth1_data` which may result in an Exception in the spec\n\n // implementation.\n\n //\n\n // This value determines the delay between the eth1 block that triggers genesis and the first\n\n // slot of that new chain.\n\n //\n\n // With a follow distance of 16, this is 40mins.\n\n spec.seconds_per_day = SECONDS_PER_ETH1_BLOCK * spec.eth1_follow_distance * 2 * 5;\n\n\n\n spec\n\n}\n\n\n", "file_path": "lcli/src/deploy_deposit_contract.rs", "rank": 1, "score": 286691.1385014585 }, { "content": "/// Helper: returns the slot for some block_root.\n\nfn get_slot_for_block_root(harness: &BeaconChainHarness, block_root: Hash256) -> Slot {\n\n harness\n\n .chain\n\n .store\n\n .get::<BeaconBlock<TestEthSpec>>(&block_root)\n\n .expect(\"head block should exist\")\n\n .expect(\"DB should not error\")\n\n .slot\n\n}\n\n\n\nconst RANDOM_ITERATIONS: usize = 50;\n\nconst RANDOM_ACTIONS_PER_ITERATION: usize = 100;\n\n\n\n/// Create a single LMD instance and have one validator vote in reverse (highest to lowest slot)\n\n/// down the chain.\n", "file_path": "eth2/lmd_ghost/tests/test.rs", "rank": 2, "score": 284245.74611521454 }, { "content": "fn get_finalized_slot(mut url: Url, slots_per_epoch: u64) -> Result<Slot, Error> {\n\n url.path_segments_mut()\n\n .map(|mut url| {\n\n url.push(\"beacon\").push(\"latest_finalized_checkpoint\");\n\n })\n\n .map_err(|_| Error::InvalidUrl)?;\n\n\n\n let checkpoint: Checkpoint = reqwest::get(url)?.error_for_status()?.json()?;\n\n\n\n Ok(checkpoint.epoch.start_slot(slots_per_epoch))\n\n}\n\n\n\n#[derive(Deserialize)]\n\n#[serde(bound = \"T: EthSpec\")]\n\npub struct StateResponse<T: EthSpec> {\n\n pub root: Hash256,\n\n pub beacon_state: BeaconState<T>,\n\n}\n\n\n", "file_path": "eth2/utils/lighthouse_bootstrap/src/lib.rs", "rank": 3, "score": 275442.03638890194 }, { "content": "pub fn hash256_iter<'a>(\n\n values: &'a [Hash256],\n\n) -> impl Iterator<Item = [u8; BYTES_PER_CHUNK]> + ExactSizeIterator + 'a {\n\n values.iter().copied().map(Hash256::to_fixed_bytes)\n\n}\n\n\n", "file_path": "eth2/utils/cached_tree_hash/src/impls.rs", "rank": 4, "score": 263270.7398589322 }, { "content": "/// Activate genesis validators, if their balance is acceptable.\n\n///\n\n/// Spec v0.8.0\n\npub fn process_activations<T: EthSpec>(state: &mut BeaconState<T>, spec: &ChainSpec) {\n\n for (index, validator) in state.validators.iter_mut().enumerate() {\n\n let balance = state.balances[index];\n\n validator.effective_balance = std::cmp::min(\n\n balance - balance % spec.effective_balance_increment,\n\n spec.max_effective_balance,\n\n );\n\n if validator.effective_balance == spec.max_effective_balance {\n\n validator.activation_eligibility_epoch = T::genesis_epoch();\n\n validator.activation_epoch = T::genesis_epoch();\n\n }\n\n }\n\n}\n", "file_path": "eth2/state_processing/src/genesis.rs", "rank": 5, "score": 249401.49099486507 }, { "content": "/// Checks that the chain has made the first possible finalization.\n\n///\n\n/// Intended to be run as soon as chain starts.\n\npub fn verify_first_finalization<E: EthSpec>(\n\n network: LocalNetwork<E>,\n\n slot_duration: Duration,\n\n) -> impl Future<Item = (), Error = String> {\n\n epoch_delay(Epoch::new(4), slot_duration, E::slots_per_epoch())\n\n .and_then(|()| verify_all_finalized_at(network, Epoch::new(2)))\n\n}\n\n\n", "file_path": "tests/beacon_chain_sim/src/checks.rs", "rank": 6, "score": 249351.7904256277 }, { "content": "/// HTTP handler to return a list of head BeaconBlocks.\n\npub fn get_heads<T: BeaconChainTypes>(\n\n req: Request<Body>,\n\n beacon_chain: Arc<BeaconChain<T>>,\n\n) -> ApiResult {\n\n let heads = beacon_chain\n\n .heads()\n\n .into_iter()\n\n .map(|(beacon_block_root, beacon_block_slot)| HeadBeaconBlock {\n\n beacon_block_root,\n\n beacon_block_slot,\n\n })\n\n .collect::<Vec<_>>();\n\n\n\n ResponseBuilder::new(&req)?.body(&heads)\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Encode, Decode)]\n\n#[serde(bound = \"T: EthSpec\")]\n\npub struct BlockResponse<T: EthSpec> {\n\n pub root: Hash256,\n\n pub beacon_block: BeaconBlock<T>,\n\n}\n\n\n", "file_path": "beacon_node/rest_api/src/beacon.rs", "rank": 7, "score": 249350.58104306474 }, { "content": "/// HTTP handler to return a `BeaconBlock` at a given `root` or `slot`.\n\npub fn get_head<T: BeaconChainTypes>(\n\n req: Request<Body>,\n\n beacon_chain: Arc<BeaconChain<T>>,\n\n) -> ApiResult {\n\n let chain_head = beacon_chain.head();\n\n\n\n let head = CanonicalHeadResponse {\n\n slot: chain_head.beacon_state.slot,\n\n block_root: chain_head.beacon_block_root,\n\n state_root: chain_head.beacon_state_root,\n\n finalized_slot: chain_head\n\n .beacon_state\n\n .finalized_checkpoint\n\n .epoch\n\n .start_slot(T::EthSpec::slots_per_epoch()),\n\n finalized_block_root: chain_head.beacon_state.finalized_checkpoint.root,\n\n justified_slot: chain_head\n\n .beacon_state\n\n .current_justified_checkpoint\n\n .epoch\n", "file_path": "beacon_node/rest_api/src/beacon.rs", "rank": 8, "score": 249350.3949403226 }, { "content": "/// Returns a `BeaconState` and it's root in the canonical chain of `beacon_chain` at the given\n\n/// `slot`, if possible.\n\n///\n\n/// Will not return a state if the request slot is in the future. Will return states higher than\n\n/// the current head by skipping slots.\n\npub fn state_at_slot<T: BeaconChainTypes>(\n\n beacon_chain: &BeaconChain<T>,\n\n slot: Slot,\n\n) -> Result<(Hash256, BeaconState<T::EthSpec>), ApiError> {\n\n let head_state = &beacon_chain.head().beacon_state;\n\n\n\n if head_state.slot == slot {\n\n // The request slot is the same as the best block (head) slot.\n\n\n\n // I'm not sure if this `.clone()` will be optimized out. If not, it seems unnecessary.\n\n Ok((\n\n beacon_chain.head().beacon_state_root,\n\n beacon_chain.head().beacon_state.clone(),\n\n ))\n\n } else {\n\n let root = state_root_at_slot(beacon_chain, slot)?;\n\n\n\n let state: BeaconState<T::EthSpec> = beacon_chain\n\n .store\n\n .get_state(&root, Some(slot))?\n\n .ok_or_else(|| ApiError::NotFound(format!(\"Unable to find state at root {}\", root)))?;\n\n\n\n Ok((root, state))\n\n }\n\n}\n\n\n", "file_path": "beacon_node/rest_api/src/helpers.rs", "rank": 9, "score": 249191.7397463923 }, { "content": "/// HTTP Handler to produce a new Attestation from the current state, ready to be signed by a validator.\n\npub fn get_new_attestation<T: BeaconChainTypes>(\n\n req: Request<Body>,\n\n beacon_chain: Arc<BeaconChain<T>>,\n\n) -> ApiResult {\n\n let query = UrlQuery::from_request(&req)?;\n\n\n\n let slot = query.slot()?;\n\n let index = query.committee_index()?;\n\n\n\n let attestation = beacon_chain\n\n .produce_attestation(slot, index)\n\n .map_err(|e| ApiError::BadRequest(format!(\"Unable to produce attestation: {:?}\", e)))?;\n\n\n\n ResponseBuilder::new(&req)?.body(&attestation)\n\n}\n\n\n", "file_path": "beacon_node/rest_api/src/validator.rs", "rank": 10, "score": 246269.19947218575 }, { "content": "/// Returns the root of the `BeaconState` in the canonical chain of `beacon_chain` at the given\n\n/// `slot`, if possible.\n\n///\n\n/// Will not return a state root if the request slot is in the future. Will return state roots\n\n/// higher than the current head by skipping slots.\n\npub fn state_root_at_slot<T: BeaconChainTypes>(\n\n beacon_chain: &BeaconChain<T>,\n\n slot: Slot,\n\n) -> Result<Hash256, ApiError> {\n\n let head_state = &beacon_chain.head().beacon_state;\n\n let current_slot = beacon_chain\n\n .slot()\n\n .map_err(|_| ApiError::ServerError(\"Unable to read slot clock\".to_string()))?;\n\n\n\n // There are four scenarios when obtaining a state for a given slot:\n\n //\n\n // 1. The request slot is in the future.\n\n // 2. The request slot is the same as the best block (head) slot.\n\n // 3. The request slot is prior to the head slot.\n\n // 4. The request slot is later than the head slot.\n\n if current_slot < slot {\n\n // 1. The request slot is in the future. Reject the request.\n\n //\n\n // We could actually speculate about future state roots by skipping slots, however that's\n\n // likely to cause confusion for API users.\n", "file_path": "beacon_node/rest_api/src/helpers.rs", "rank": 11, "score": 246108.24988939133 }, { "content": "/// Returns the root of the `BeaconBlock` in the canonical chain of `beacon_chain` at the given\n\n/// `slot`, if possible.\n\n///\n\n/// May return a root for a previous slot, in the case of skip slots.\n\npub fn block_root_at_slot<T: BeaconChainTypes>(\n\n beacon_chain: &BeaconChain<T>,\n\n target: Slot,\n\n) -> Option<Hash256> {\n\n beacon_chain\n\n .rev_iter_block_roots()\n\n .take_while(|(_root, slot)| *slot >= target)\n\n .find(|(_root, slot)| *slot == target)\n\n .map(|(root, _slot)| root)\n\n}\n\n\n", "file_path": "beacon_node/rest_api/src/helpers.rs", "rank": 12, "score": 246103.57459308137 }, { "content": "/// HTTP Handler to produce a new BeaconBlock from the current state, ready to be signed by a validator.\n\npub fn get_new_beacon_block<T: BeaconChainTypes>(\n\n req: Request<Body>,\n\n beacon_chain: Arc<BeaconChain<T>>,\n\n log: Logger,\n\n) -> ApiResult {\n\n let query = UrlQuery::from_request(&req)?;\n\n\n\n let slot = query.slot()?;\n\n let randao_reveal = query.randao_reveal()?;\n\n\n\n let (new_block, _state) = beacon_chain\n\n .produce_block(randao_reveal, slot)\n\n .map_err(|e| {\n\n error!(\n\n log,\n\n \"Error whilst producing block\";\n\n \"error\" => format!(\"{:?}\", e)\n\n );\n\n\n\n ApiError::ServerError(format!(\n\n \"Beacon node is not able to produce a block: {:?}\",\n\n e\n\n ))\n\n })?;\n\n\n\n ResponseBuilder::new(&req)?.body(&new_block)\n\n}\n\n\n", "file_path": "beacon_node/rest_api/src/validator.rs", "rank": 13, "score": 243285.3915726234 }, { "content": "fn get_slots_per_epoch(mut url: Url) -> Result<Slot, Error> {\n\n url.path_segments_mut()\n\n .map(|mut url| {\n\n url.push(\"spec\").push(\"slots_per_epoch\");\n\n })\n\n .map_err(|_| Error::InvalidUrl)?;\n\n\n\n reqwest::get(url)?\n\n .error_for_status()?\n\n .json()\n\n .map_err(Into::into)\n\n}\n\n\n", "file_path": "eth2/utils/lighthouse_bootstrap/src/lib.rs", "rank": 14, "score": 239699.46742041066 }, { "content": "/// Parse a slot.\n\n///\n\n/// E.g., `\"1234\"`\n\npub fn parse_slot(string: &str) -> Result<Slot, ApiError> {\n\n string\n\n .parse::<u64>()\n\n .map(Slot::from)\n\n .map_err(|e| ApiError::BadRequest(format!(\"Unable to parse slot: {:?}\", e)))\n\n}\n\n\n", "file_path": "beacon_node/rest_api/src/helpers.rs", "rank": 15, "score": 239220.01516278036 }, { "content": "/// Scrape the given `state` assuming it's the head state, updating the `DEFAULT_REGISTRY`.\n\nfn scrape_head_state<T: BeaconChainTypes>(state: &BeaconState<T::EthSpec>, state_root: Hash256) {\n\n set_gauge_by_slot(&HEAD_STATE_SLOT, state.slot);\n\n set_gauge_by_hash(&HEAD_STATE_ROOT, state_root);\n\n set_gauge_by_slot(\n\n &HEAD_STATE_LATEST_BLOCK_SLOT,\n\n state.latest_block_header.slot,\n\n );\n\n set_gauge_by_hash(\n\n &HEAD_STATE_CURRENT_JUSTIFIED_ROOT,\n\n state.current_justified_checkpoint.root,\n\n );\n\n set_gauge_by_epoch(\n\n &HEAD_STATE_CURRENT_JUSTIFIED_EPOCH,\n\n state.current_justified_checkpoint.epoch,\n\n );\n\n set_gauge_by_hash(\n\n &HEAD_STATE_PREVIOUS_JUSTIFIED_ROOT,\n\n state.previous_justified_checkpoint.root,\n\n );\n\n set_gauge_by_epoch(\n", "file_path": "beacon_node/beacon_chain/src/metrics.rs", "rank": 16, "score": 228605.87013466784 }, { "content": "/// Compute the slot of the last guaranteed restore point in the freezer database.\n\nfn slot_of_prev_restore_point<E: EthSpec>(current_slot: Slot) -> Slot {\n\n let slots_per_historical_root = E::SlotsPerHistoricalRoot::to_u64();\n\n (current_slot - 1) / slots_per_historical_root * slots_per_historical_root\n\n}\n\n\n\npub type ReverseBlockRootIterator<'a, E, S> =\n\n ReverseHashAndSlotIterator<BlockRootsIterator<'a, E, S>>;\n\npub type ReverseStateRootIterator<'a, E, S> =\n\n ReverseHashAndSlotIterator<StateRootsIterator<'a, E, S>>;\n\n\n\npub type ReverseHashAndSlotIterator<I> = ReverseChainIterator<(Hash256, Slot), I>;\n\n\n\n/// Provides a wrapper for an iterator that returns a given `T` before it starts returning results of\n\n/// the `Iterator`.\n\npub struct ReverseChainIterator<T, I> {\n\n first_value_used: bool,\n\n first_value: T,\n\n iter: I,\n\n}\n\n\n", "file_path": "beacon_node/store/src/iter.rs", "rank": 17, "score": 227802.05050533335 }, { "content": "/// Parse a root from a `0x` preixed string.\n\n///\n\n/// E.g., `\"0x0000000000000000000000000000000000000000000000000000000000000000\"`\n\npub fn parse_root(string: &str) -> Result<Hash256, ApiError> {\n\n const PREFIX: &str = \"0x\";\n\n\n\n if string.starts_with(PREFIX) {\n\n let trimmed = string.trim_start_matches(PREFIX);\n\n trimmed\n\n .parse()\n\n .map_err(|e| ApiError::BadRequest(format!(\"Unable to parse root: {:?}\", e)))\n\n } else {\n\n Err(ApiError::BadRequest(\n\n \"Root must have a 0x prefix\".to_string(),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "beacon_node/rest_api/src/helpers.rs", "rank": 18, "score": 226521.76757885393 }, { "content": "fn get_block<T: EthSpec>(mut url: Url, slot: Slot) -> Result<BlockResponse<T>, Error> {\n\n url.path_segments_mut()\n\n .map(|mut url| {\n\n url.push(\"beacon\").push(\"block\");\n\n })\n\n .map_err(|_| Error::InvalidUrl)?;\n\n\n\n url.query_pairs_mut()\n\n .append_pair(\"slot\", &format!(\"{}\", slot.as_u64()));\n\n\n\n reqwest::get(url)?\n\n .error_for_status()?\n\n .json()\n\n .map_err(Into::into)\n\n}\n\n\n", "file_path": "eth2/utils/lighthouse_bootstrap/src/lib.rs", "rank": 19, "score": 225898.37378094826 }, { "content": "fn get_state<T: EthSpec>(mut url: Url, slot: Slot) -> Result<StateResponse<T>, Error> {\n\n url.path_segments_mut()\n\n .map(|mut url| {\n\n url.push(\"beacon\").push(\"state\");\n\n })\n\n .map_err(|_| Error::InvalidUrl)?;\n\n\n\n url.query_pairs_mut()\n\n .append_pair(\"slot\", &format!(\"{}\", slot.as_u64()));\n\n\n\n reqwest::get(url)?\n\n .error_for_status()?\n\n .json()\n\n .map_err(Into::into)\n\n}\n\n\n\n#[derive(Deserialize)]\n\n#[serde(bound = \"T: EthSpec\")]\n\npub struct BlockResponse<T: EthSpec> {\n\n pub root: Hash256,\n\n pub beacon_block: BeaconBlock<T>,\n\n}\n\n\n", "file_path": "eth2/utils/lighthouse_bootstrap/src/lib.rs", "rank": 20, "score": 225898.37378094826 }, { "content": "#[cfg(not(feature = \"fake_crypto\"))]\n\npub fn verify_signature_sets<'a>(iter: impl Iterator<Item = SignatureSet<'a>>) -> bool {\n\n let rng = &mut rand::thread_rng();\n\n RawAggregateSignature::verify_multiple_signatures(rng, iter.map(Into::into))\n\n}\n\n\n", "file_path": "eth2/utils/bls/src/signature_set.rs", "rank": 21, "score": 225586.1854074336 }, { "content": "/// HTTP handler to return the full spec object.\n\npub fn get_slots_per_epoch<T: BeaconChainTypes>(req: Request<Body>) -> ApiResult {\n\n ResponseBuilder::new(&req)?.body(&T::EthSpec::slots_per_epoch())\n\n}\n", "file_path": "beacon_node/rest_api/src/spec.rs", "rank": 22, "score": 219558.36610025057 }, { "content": "pub fn u64_iter<'a>(\n\n values: &'a [u64],\n\n) -> impl Iterator<Item = [u8; BYTES_PER_CHUNK]> + ExactSizeIterator + 'a {\n\n let type_size = size_of::<u64>();\n\n let vals_per_chunk = BYTES_PER_CHUNK / type_size;\n\n values.chunks(vals_per_chunk).map(move |xs| {\n\n xs.iter().map(|x| x.to_le_bytes()).enumerate().fold(\n\n [0; BYTES_PER_CHUNK],\n\n |mut chunk, (i, x_bytes)| {\n\n chunk[i * type_size..(i + 1) * type_size].copy_from_slice(&x_bytes);\n\n chunk\n\n },\n\n )\n\n })\n\n}\n\n\n\nimpl<N: Unsigned> CachedTreeHash<TreeHashCache> for FixedVector<Hash256, N> {\n\n fn new_tree_hash_cache() -> TreeHashCache {\n\n TreeHashCache::new(int_log(N::to_usize()))\n\n }\n", "file_path": "eth2/utils/cached_tree_hash/src/impls.rs", "rank": 23, "score": 214457.06784943782 }, { "content": "/// Run the account manager, logging an error if the operation did not succeed.\n\npub fn run<T: EthSpec>(matches: &ArgMatches, mut env: Environment<T>) {\n\n let log = env.core_context().log.clone();\n\n match run_account_manager(matches, env) {\n\n Ok(()) => (),\n\n Err(e) => crit!(log, \"Account manager failed\"; \"error\" => e),\n\n }\n\n}\n\n\n", "file_path": "account_manager/src/lib.rs", "rank": 24, "score": 214363.31426047412 }, { "content": "/// Advances a state forward by one slot, performing per-epoch processing if required.\n\n///\n\n/// Spec v0.9.1\n\npub fn per_slot_processing<T: EthSpec>(\n\n state: &mut BeaconState<T>,\n\n spec: &ChainSpec,\n\n) -> Result<(), Error> {\n\n cache_state(state)?;\n\n\n\n if state.slot > spec.genesis_slot && (state.slot + 1) % T::slots_per_epoch() == 0 {\n\n per_epoch_processing(state, spec)?;\n\n }\n\n\n\n state.slot += 1;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "eth2/state_processing/src/per_slot_processing.rs", "rank": 25, "score": 214224.89466678113 }, { "content": "fn set_gauge_by_slot(gauge: &Result<IntGauge>, value: Slot) {\n\n set_gauge(gauge, value.as_u64() as i64);\n\n}\n\n\n", "file_path": "beacon_node/beacon_chain/src/metrics.rs", "rank": 26, "score": 208588.68408744174 }, { "content": "/// Scrape the `beacon_chain` for metrics that are not constantly updated (e.g., the present slot,\n\n/// head state info, etc) and update the Prometheus `DEFAULT_REGISTRY`.\n\npub fn scrape_for_metrics<T: BeaconChainTypes>(beacon_chain: &BeaconChain<T>) {\n\n scrape_head_state::<T>(\n\n &beacon_chain.head().beacon_state,\n\n beacon_chain.head().beacon_state_root,\n\n );\n\n}\n\n\n", "file_path": "beacon_node/beacon_chain/src/metrics.rs", "rank": 27, "score": 207526.7891263494 }, { "content": "pub fn new_env() -> Environment<MinimalEthSpec> {\n\n EnvironmentBuilder::minimal()\n\n // Use a single thread, so that when all tests are run in parallel they don't have so many\n\n // threads.\n\n .single_thread_tokio_runtime()\n\n .expect(\"should start tokio runtime\")\n\n .null_logger()\n\n .expect(\"should start null logger\")\n\n .build()\n\n .expect(\"should build env\")\n\n}\n\n\n", "file_path": "beacon_node/eth1/tests/test.rs", "rank": 28, "score": 206370.37804353712 }, { "content": "pub fn new_env() -> Environment<MinimalEthSpec> {\n\n EnvironmentBuilder::minimal()\n\n .single_thread_tokio_runtime()\n\n .expect(\"should start tokio runtime\")\n\n .null_logger()\n\n .expect(\"should start null logger\")\n\n .build()\n\n .expect(\"should build env\")\n\n}\n\n\n", "file_path": "beacon_node/genesis/tests/tests.rs", "rank": 29, "score": 206370.37804353712 }, { "content": "/// Spawns a new `SyncManager` thread which has a weak reference to underlying beacon\n\n/// chain. This allows the chain to be\n\n/// dropped during the syncing process which will gracefully end the `SyncManager`.\n\npub fn spawn<T: BeaconChainTypes>(\n\n executor: &tokio::runtime::TaskExecutor,\n\n beacon_chain: Weak<BeaconChain<T>>,\n\n network_send: mpsc::UnboundedSender<NetworkMessage>,\n\n log: slog::Logger,\n\n) -> (\n\n mpsc::UnboundedSender<SyncMessage<T::EthSpec>>,\n\n oneshot::Sender<()>,\n\n) {\n\n // generate the exit channel\n\n let (sync_exit, exit_rx) = tokio::sync::oneshot::channel();\n\n // generate the message channel\n\n let (sync_send, sync_recv) = mpsc::unbounded_channel::<SyncMessage<T::EthSpec>>();\n\n\n\n // create an instance of the SyncManager\n\n let sync_manager = SyncManager {\n\n chain: beacon_chain.clone(),\n\n state: ManagerState::Stalled,\n\n input_channel: sync_recv,\n\n network: SyncNetworkContext::new(network_send, log.clone()),\n", "file_path": "beacon_node/network/src/sync/manager.rs", "rank": 30, "score": 205438.3017013305 }, { "content": "pub fn route<T: BeaconChainTypes>(\n\n req: Request<Body>,\n\n beacon_chain: Arc<BeaconChain<T>>,\n\n network_service: Arc<NetworkService<T>>,\n\n network_channel: NetworkChannel,\n\n eth2_config: Arc<Eth2Config>,\n\n local_log: slog::Logger,\n\n db_path: PathBuf,\n\n freezer_db_path: PathBuf,\n\n) -> impl Future<Item = Response<Body>, Error = Error> {\n\n metrics::inc_counter(&metrics::REQUEST_COUNT);\n\n let timer = metrics::start_timer(&metrics::REQUEST_RESPONSE_TIME);\n\n\n\n let path = req.uri().path().to_string();\n\n\n\n let log = local_log.clone();\n\n let request_result: Box<dyn Future<Item = Response<_>, Error = _> + Send> =\n\n match (req.method(), path.as_ref()) {\n\n // Methods for Client\n\n (&Method::GET, \"/node/version\") => into_boxfut(node::get_version(req)),\n", "file_path": "beacon_node/rest_api/src/router.rs", "rank": 31, "score": 205426.86780564275 }, { "content": "/// Spawns a notifier service which periodically logs information about the node.\n\npub fn spawn_notifier<T: BeaconChainTypes>(\n\n context: RuntimeContext<T::EthSpec>,\n\n beacon_chain: Arc<BeaconChain<T>>,\n\n network: Arc<NetworkService<T>>,\n\n milliseconds_per_slot: u64,\n\n) -> Result<Signal, String> {\n\n let log_1 = context.log.clone();\n\n let log_2 = context.log.clone();\n\n\n\n let slot_duration = Duration::from_millis(milliseconds_per_slot);\n\n let duration_to_next_slot = beacon_chain\n\n .slot_clock\n\n .duration_to_next_slot()\n\n .ok_or_else(|| \"slot_notifier unable to determine time to next slot\")?;\n\n\n\n // Run this half way through each slot.\n\n let start_instant = Instant::now() + duration_to_next_slot + (slot_duration / 2);\n\n\n\n // Run this each slot.\n\n let interval_duration = slot_duration;\n", "file_path": "beacon_node/client/src/notifier.rs", "rank": 32, "score": 205426.86780564275 }, { "content": "/// Check that the chain has finalized under best-case assumptions, and check the head slot.\n\nfn check_finalization(harness: &TestHarness, expected_slot: u64) {\n\n let state = &harness.chain.head().beacon_state;\n\n\n\n check_slot(harness, expected_slot);\n\n\n\n assert_eq!(\n\n state.current_justified_checkpoint.epoch,\n\n state.current_epoch() - 1,\n\n \"the head should be justified one behind the current epoch\"\n\n );\n\n assert_eq!(\n\n state.finalized_checkpoint.epoch,\n\n state.current_epoch() - 2,\n\n \"the head should be finalized two behind the current epoch\"\n\n );\n\n}\n\n\n", "file_path": "beacon_node/beacon_chain/tests/store_tests.rs", "rank": 33, "score": 205321.32612732155 }, { "content": "pub fn run<T: EthSpec>(mut env: Environment<T>, matches: &ArgMatches) -> Result<(), String> {\n\n let endpoint = matches\n\n .value_of(\"eth1-endpoint\")\n\n .ok_or_else(|| \"eth1-endpoint not specified\")?;\n\n\n\n let testnet_dir = matches\n\n .value_of(\"testnet-dir\")\n\n .ok_or_else(|| ())\n\n .and_then(|dir| dir.parse::<PathBuf>().map_err(|_| ()))\n\n .unwrap_or_else(|_| {\n\n dirs::home_dir()\n\n .map(|home| home.join(\".lighthouse\").join(\"testnet\"))\n\n .expect(\"should locate home directory\")\n\n });\n\n\n\n let mut eth2_testnet_config: Eth2TestnetConfig<T> =\n\n Eth2TestnetConfig::load(testnet_dir.clone())?;\n\n\n\n let spec = eth2_testnet_config\n\n .yaml_config\n", "file_path": "lcli/src/eth1_genesis.rs", "rank": 34, "score": 204755.7696832556 }, { "content": "/// Delays for `slots`, plus half a slot extra.\n\nfn slot_delay(slots: Slot, slot_duration: Duration) -> impl Future<Item = (), Error = String> {\n\n let duration = slot_duration * slots.as_u64() as u32 + slot_duration / 2;\n\n\n\n Delay::new(Instant::now() + duration).map_err(|e| format!(\"Epoch delay failed: {:?}\", e))\n\n}\n\n\n", "file_path": "tests/beacon_chain_sim/src/checks.rs", "rank": 35, "score": 204009.37431142537 }, { "content": "// Increase the size of an array to 48 bytes\n\nfn pad_to_48(array: &mut Vec<u8>) {\n\n while array.len() < 48 {\n\n array.insert(0, 0);\n\n }\n\n}\n", "file_path": "tests/ef_tests/src/cases/bls_priv_to_pub.rs", "rank": 36, "score": 203407.34873943933 }, { "content": "/// HTTP handler to which accepts a `ValidatorRequest` and returns a `ValidatorResponse` for\n\n/// each of the given `pubkeys`. When `state_root` is `None`, the canonical head is used.\n\n///\n\n/// This method allows for a basically unbounded list of `pubkeys`, where as the `get_validators`\n\n/// request is limited by the max number of pubkeys you can fit in a URL.\n\npub fn post_validators<T: BeaconChainTypes>(\n\n req: Request<Body>,\n\n beacon_chain: Arc<BeaconChain<T>>,\n\n) -> BoxFut {\n\n let response_builder = ResponseBuilder::new(&req);\n\n\n\n let future = req\n\n .into_body()\n\n .concat2()\n\n .map_err(|e| ApiError::ServerError(format!(\"Unable to get request body: {:?}\", e)))\n\n .and_then(|chunks| {\n\n serde_json::from_slice::<ValidatorRequest>(&chunks).map_err(|e| {\n\n ApiError::BadRequest(format!(\n\n \"Unable to parse JSON into ValidatorRequest: {:?}\",\n\n e\n\n ))\n\n })\n\n })\n\n .and_then(|bulk_request| {\n\n validator_responses_by_pubkey(\n\n beacon_chain,\n\n bulk_request.state_root,\n\n bulk_request.pubkeys,\n\n )\n\n })\n\n .and_then(|validators| response_builder?.body(&validators));\n\n\n\n Box::new(future)\n\n}\n\n\n", "file_path": "beacon_node/rest_api/src/beacon.rs", "rank": 37, "score": 203056.77214493003 }, { "content": "/// HTTP handler to return a `BeaconState` at a given `root` or `slot`.\n\n///\n\n/// Will not return a state if the request slot is in the future. Will return states higher than\n\n/// the current head by skipping slots.\n\npub fn get_state<T: BeaconChainTypes>(\n\n req: Request<Body>,\n\n beacon_chain: Arc<BeaconChain<T>>,\n\n) -> ApiResult {\n\n let head_state = beacon_chain.head().beacon_state;\n\n\n\n let (key, value) = match UrlQuery::from_request(&req) {\n\n Ok(query) => {\n\n // We have *some* parameters, just check them.\n\n let query_params = [\"root\", \"slot\"];\n\n query.first_of(&query_params)?\n\n }\n\n Err(ApiError::BadRequest(_)) => {\n\n // No parameters provided at all, use current slot.\n\n (String::from(\"slot\"), head_state.slot.to_string())\n\n }\n\n Err(e) => {\n\n return Err(e);\n\n }\n\n };\n", "file_path": "beacon_node/rest_api/src/beacon.rs", "rank": 38, "score": 203054.06704242018 }, { "content": "/// HTTP handler to return the `Fork` of the current head.\n\npub fn get_fork<T: BeaconChainTypes>(\n\n req: Request<Body>,\n\n beacon_chain: Arc<BeaconChain<T>>,\n\n) -> ApiResult {\n\n ResponseBuilder::new(&req)?.body(&beacon_chain.head().beacon_state.fork)\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, Serialize, Deserialize, Encode, Decode)]\n\npub struct ValidatorResponse {\n\n pub pubkey: PublicKeyBytes,\n\n pub validator_index: Option<usize>,\n\n pub balance: Option<u64>,\n\n pub validator: Option<Validator>,\n\n}\n\n\n", "file_path": "beacon_node/rest_api/src/beacon.rs", "rank": 39, "score": 203048.10556813187 }, { "content": "/// HTTP handler to return a `BeaconBlock` at a given `root` or `slot`.\n\npub fn get_block<T: BeaconChainTypes>(\n\n req: Request<Body>,\n\n beacon_chain: Arc<BeaconChain<T>>,\n\n) -> ApiResult {\n\n let query_params = [\"root\", \"slot\"];\n\n let (key, value) = UrlQuery::from_request(&req)?.first_of(&query_params)?;\n\n\n\n let block_root = match (key.as_ref(), value) {\n\n (\"slot\", value) => {\n\n let target = parse_slot(&value)?;\n\n\n\n block_root_at_slot(&beacon_chain, target).ok_or_else(|| {\n\n ApiError::NotFound(format!(\"Unable to find BeaconBlock for slot {:?}\", target))\n\n })?\n\n }\n\n (\"root\", value) => parse_root(&value)?,\n\n _ => return Err(ApiError::ServerError(\"Unexpected query parameter\".into())),\n\n };\n\n\n\n let block = beacon_chain\n", "file_path": "beacon_node/rest_api/src/beacon.rs", "rank": 40, "score": 203047.87512131355 }, { "content": "/// HTTP handler to return all validators, each as a `ValidatorResponse`.\n\npub fn get_all_validators<T: BeaconChainTypes>(\n\n req: Request<Body>,\n\n beacon_chain: Arc<BeaconChain<T>>,\n\n) -> ApiResult {\n\n let query = UrlQuery::from_request(&req)?;\n\n\n\n let state_root_opt = if let Some((_key, value)) = query.first_of_opt(&[\"state_root\"]) {\n\n Some(parse_root(&value)?)\n\n } else {\n\n None\n\n };\n\n\n\n let mut state = get_state_from_root_opt(&beacon_chain, state_root_opt)?;\n\n state.update_pubkey_cache()?;\n\n\n\n let validators = state\n\n .validators\n\n .iter()\n\n .map(|validator| validator_response_by_pubkey(&state, validator.pubkey.clone()))\n\n .collect::<Result<Vec<_>, _>>()?;\n\n\n\n ResponseBuilder::new(&req)?.body(&validators)\n\n}\n\n\n", "file_path": "beacon_node/rest_api/src/beacon.rs", "rank": 41, "score": 203042.2637742838 }, { "content": "/// Returns the full set of Prometheus metrics for the Beacon Node application.\n\n///\n\n/// # Note\n\n///\n\n/// This is a HTTP handler method.\n\npub fn get_prometheus<T: BeaconChainTypes>(\n\n req: Request<Body>,\n\n beacon_chain: Arc<BeaconChain<T>>,\n\n db_path: PathBuf,\n\n freezer_db_path: PathBuf,\n\n) -> ApiResult {\n\n let mut buffer = vec![];\n\n let encoder = TextEncoder::new();\n\n\n\n // There are two categories of metrics:\n\n //\n\n // - Dynamically updated: things like histograms and event counters that are updated on the\n\n // fly.\n\n // - Statically updated: things which are only updated at the time of the scrape (used where we\n\n // can avoid cluttering up code with metrics calls).\n\n //\n\n // The `lighthouse_metrics` crate has a `DEFAULT_REGISTRY` global singleton (via `lazy_static`)\n\n // which keeps the state of all the metrics. Dynamically updated things will already be\n\n // up-to-date in the registry (because they update themselves) however statically updated\n\n // things need to be \"scraped\".\n", "file_path": "beacon_node/rest_api/src/metrics.rs", "rank": 42, "score": 203042.2637742838 }, { "content": "/// HTTP handler to return the full spec object.\n\npub fn get_spec<T: BeaconChainTypes>(\n\n req: Request<Body>,\n\n beacon_chain: Arc<BeaconChain<T>>,\n\n) -> ApiResult {\n\n ResponseBuilder::new(&req)?.body_no_ssz(&beacon_chain.spec)\n\n}\n\n\n", "file_path": "beacon_node/rest_api/src/spec.rs", "rank": 43, "score": 203042.2637742838 }, { "content": "/// HTTP handler\n\npub fn get_committees<T: BeaconChainTypes>(\n\n req: Request<Body>,\n\n beacon_chain: Arc<BeaconChain<T>>,\n\n) -> ApiResult {\n\n let query = UrlQuery::from_request(&req)?;\n\n\n\n let epoch = query.epoch()?;\n\n\n\n let mut state = get_state_for_epoch(&beacon_chain, epoch)?;\n\n\n\n let relative_epoch = RelativeEpoch::from_epoch(state.current_epoch(), epoch).map_err(|e| {\n\n ApiError::ServerError(format!(\"Failed to get state suitable for epoch: {:?}\", e))\n\n })?;\n\n\n\n state\n\n .build_committee_cache(relative_epoch, &beacon_chain.spec)\n\n .map_err(|e| ApiError::ServerError(format!(\"Unable to build committee cache: {:?}\", e)))?;\n\n\n\n let committees = state\n\n .get_beacon_committees_at_epoch(relative_epoch)\n", "file_path": "beacon_node/rest_api/src/beacon.rs", "rank": 44, "score": 203042.2637742838 }, { "content": "pub fn start_server<T: BeaconChainTypes>(\n\n config: &Config,\n\n executor: &TaskExecutor,\n\n beacon_chain: Arc<BeaconChain<T>>,\n\n network_info: NetworkInfo<T>,\n\n db_path: PathBuf,\n\n freezer_db_path: PathBuf,\n\n eth2_config: Eth2Config,\n\n log: slog::Logger,\n\n) -> Result<(exit_future::Signal, SocketAddr), hyper::Error> {\n\n let inner_log = log.clone();\n\n let eth2_config = Arc::new(eth2_config);\n\n\n\n // Define the function that will build the request handler.\n\n let make_service = make_service_fn(move |_socket: &AddrStream| {\n\n let beacon_chain = beacon_chain.clone();\n\n let log = inner_log.clone();\n\n let eth2_config = eth2_config.clone();\n\n let network_service = network_info.network_service.clone();\n\n let network_channel = Arc::new(RwLock::new(network_info.network_chan.clone()));\n", "file_path": "beacon_node/rest_api/src/lib.rs", "rank": 45, "score": 203042.2637742838 }, { "content": "/// HTTP Handler to publish an Attestation, which has been signed by a validator.\n\npub fn publish_attestation<T: BeaconChainTypes>(\n\n req: Request<Body>,\n\n beacon_chain: Arc<BeaconChain<T>>,\n\n network_chan: NetworkChannel,\n\n log: Logger,\n\n) -> BoxFut {\n\n try_future!(check_content_type_for_json(&req));\n\n let response_builder = ResponseBuilder::new(&req);\n\n\n\n Box::new(\n\n req.into_body()\n\n .concat2()\n\n .map_err(|e| ApiError::ServerError(format!(\"Unable to get request body: {:?}\", e)))\n\n .map(|chunk| chunk.iter().cloned().collect::<Vec<u8>>())\n\n .and_then(|chunks| {\n\n serde_json::from_slice(&chunks.as_slice()).map_err(|e| {\n\n ApiError::BadRequest(format!(\n\n \"Unable to deserialize JSON into a BeaconBlock: {:?}\",\n\n e\n\n ))\n", "file_path": "beacon_node/rest_api/src/validator.rs", "rank": 46, "score": 203042.2637742838 }, { "content": "/// HTTP handler to return the Discv5 ENR from the client's libp2p service.\n\n///\n\n/// ENR is encoded as base64 string.\n\npub fn get_enr<T: BeaconChainTypes>(\n\n req: Request<Body>,\n\n network: Arc<NetworkService<T>>,\n\n) -> ApiResult {\n\n ResponseBuilder::new(&req)?.body_no_ssz(&network.local_enr().to_base64())\n\n}\n\n\n", "file_path": "beacon_node/rest_api/src/network.rs", "rank": 47, "score": 203042.2637742838 }, { "content": "/// HTTP handler to which accepts a query string of a list of validator pubkeys and maps it to a\n\n/// `ValidatorResponse`.\n\n///\n\n/// This method is limited to as many `pubkeys` that can fit in a URL. See `post_validators` for\n\n/// doing bulk requests.\n\npub fn get_validators<T: BeaconChainTypes>(\n\n req: Request<Body>,\n\n beacon_chain: Arc<BeaconChain<T>>,\n\n) -> ApiResult {\n\n let query = UrlQuery::from_request(&req)?;\n\n\n\n let validator_pubkeys = query\n\n .all_of(\"validator_pubkeys\")?\n\n .iter()\n\n .map(|validator_pubkey_str| parse_pubkey_bytes(validator_pubkey_str))\n\n .collect::<Result<Vec<_>, _>>()?;\n\n\n\n let state_root_opt = if let Some((_key, value)) = query.first_of_opt(&[\"state_root\"]) {\n\n Some(parse_root(&value)?)\n\n } else {\n\n None\n\n };\n\n\n\n let validators =\n\n validator_responses_by_pubkey(beacon_chain, state_root_opt, validator_pubkeys)?;\n\n\n\n ResponseBuilder::new(&req)?.body(&validators)\n\n}\n\n\n", "file_path": "beacon_node/rest_api/src/beacon.rs", "rank": 48, "score": 203042.2637742838 }, { "content": "/// Checks that all of the validators have on-boarded by the start of the second eth1 voting\n\n/// period.\n\npub fn verify_validator_onboarding<E: EthSpec>(\n\n network: LocalNetwork<E>,\n\n slot_duration: Duration,\n\n expected_validator_count: usize,\n\n) -> impl Future<Item = (), Error = String> {\n\n slot_delay(\n\n Slot::new(E::SlotsPerEth1VotingPeriod::to_u64()),\n\n slot_duration,\n\n )\n\n .and_then(move |()| verify_validator_count(network, expected_validator_count))\n\n}\n\n\n", "file_path": "tests/beacon_chain_sim/src/checks.rs", "rank": 49, "score": 203042.2637742838 }, { "content": "pub fn run<T: EthSpec>(mut env: Environment<T>, matches: &ArgMatches) -> Result<(), String> {\n\n let endpoint = matches\n\n .value_of(\"eth1-endpoint\")\n\n .ok_or_else(|| \"eth1-endpoint not specified\")?;\n\n\n\n let account_index = matches\n\n .value_of(\"account-index\")\n\n .ok_or_else(|| \"No account-index\".to_string())?\n\n .parse::<usize>()\n\n .map_err(|e| format!(\"Unable to parse account-index: {}\", e))?;\n\n\n\n let password_opt = parse_password(matches)?;\n\n\n\n let testnet_dir = matches\n\n .value_of(\"testnet-dir\")\n\n .ok_or_else(|| ())\n\n .and_then(|dir| dir.parse::<PathBuf>().map_err(|_| ()))\n\n .unwrap_or_else(|_| {\n\n dirs::home_dir()\n\n .map(|home| home.join(\".lighthouse\").join(\"testnet\"))\n", "file_path": "lcli/src/refund_deposit_contract.rs", "rank": 50, "score": 202786.51849510445 }, { "content": "pub fn run<T: EthSpec>(mut env: Environment<T>, matches: &ArgMatches) -> Result<(), String> {\n\n let min_genesis_time = matches\n\n .value_of(\"min-genesis-time\")\n\n .ok_or_else(|| \"min_genesis_time not specified\")?\n\n .parse::<u64>()\n\n .map_err(|e| format!(\"Failed to parse min_genesis_time: {}\", e))?;\n\n\n\n let min_genesis_active_validator_count = matches\n\n .value_of(\"min-genesis-active-validator-count\")\n\n .ok_or_else(|| \"min-genesis-active-validator-count not specified\")?\n\n .parse::<u64>()\n\n .map_err(|e| format!(\"Failed to parse min-genesis-active-validator-count: {}\", e))?;\n\n\n\n let confirmations = matches\n\n .value_of(\"confirmations\")\n\n .ok_or_else(|| \"Confirmations not specified\")?\n\n .parse::<usize>()\n\n .map_err(|e| format!(\"Failed to parse confirmations: {}\", e))?;\n\n\n\n let output_dir = matches\n", "file_path": "lcli/src/deploy_deposit_contract.rs", "rank": 51, "score": 202786.51849510445 }, { "content": "fn new_state<T: EthSpec>(validator_count: usize, slot: Slot) -> BeaconState<T> {\n\n let spec = &T::default_spec();\n\n\n\n let mut builder =\n\n TestingBeaconStateBuilder::from_single_keypair(validator_count, &Keypair::random(), spec);\n\n\n\n builder.teleport_to_slot(slot);\n\n\n\n let (state, _keypairs) = builder.build();\n\n\n\n state\n\n}\n\n\n", "file_path": "eth2/types/src/beacon_state/committee_cache/tests.rs", "rank": 52, "score": 202158.60630744527 }, { "content": "#[allow(clippy::if_same_then_else)] // For readability and consistency with spec.\n\npub fn process_justification_and_finalization<T: EthSpec>(\n\n state: &mut BeaconState<T>,\n\n total_balances: &TotalBalances,\n\n) -> Result<(), Error> {\n\n if state.current_epoch() <= T::genesis_epoch() + 1 {\n\n return Ok(());\n\n }\n\n\n\n let previous_epoch = state.previous_epoch();\n\n let current_epoch = state.current_epoch();\n\n\n\n let old_previous_justified_checkpoint = state.previous_justified_checkpoint.clone();\n\n let old_current_justified_checkpoint = state.current_justified_checkpoint.clone();\n\n\n\n // Process justifications\n\n state.previous_justified_checkpoint = state.current_justified_checkpoint.clone();\n\n state.justification_bits.shift_up(1)?;\n\n\n\n if total_balances.previous_epoch_target_attesters * 3 >= total_balances.current_epoch * 2 {\n\n state.current_justified_checkpoint = Checkpoint {\n", "file_path": "eth2/state_processing/src/per_epoch_processing.rs", "rank": 53, "score": 201639.70326592054 }, { "content": "/// Finish up an epoch update.\n\n///\n\n/// Spec v0.9.1\n\npub fn process_final_updates<T: EthSpec>(\n\n state: &mut BeaconState<T>,\n\n spec: &ChainSpec,\n\n) -> Result<(), Error> {\n\n let current_epoch = state.current_epoch();\n\n let next_epoch = state.next_epoch();\n\n\n\n // Reset eth1 data votes.\n\n if (state.slot + 1) % T::SlotsPerEth1VotingPeriod::to_u64() == 0 {\n\n state.eth1_data_votes = VariableList::empty();\n\n }\n\n\n\n // Update effective balances with hysteresis (lag).\n\n for (index, validator) in state.validators.iter_mut().enumerate() {\n\n let balance = state.balances[index];\n\n let half_increment = spec.effective_balance_increment / 2;\n\n if balance < validator.effective_balance\n\n || validator.effective_balance + 3 * half_increment < balance\n\n {\n\n validator.effective_balance = std::cmp::min(\n", "file_path": "eth2/state_processing/src/per_epoch_processing.rs", "rank": 54, "score": 201639.70326592054 }, { "content": "/// HTTP handler to return a `BeaconState` root at a given `slot`.\n\n///\n\n/// Will not return a state if the request slot is in the future. Will return states higher than\n\n/// the current head by skipping slots.\n\npub fn get_state_root<T: BeaconChainTypes>(\n\n req: Request<Body>,\n\n beacon_chain: Arc<BeaconChain<T>>,\n\n) -> ApiResult {\n\n let slot_string = UrlQuery::from_request(&req)?.only_one(\"slot\")?;\n\n let slot = parse_slot(&slot_string)?;\n\n\n\n let root = state_root_at_slot(&beacon_chain, slot)?;\n\n\n\n ResponseBuilder::new(&req)?.body(&root)\n\n}\n\n\n", "file_path": "beacon_node/rest_api/src/beacon.rs", "rank": 55, "score": 200750.02859679537 }, { "content": "/// HTTP handler to return a `BeaconState` at the genesis block.\n\n///\n\n/// This is an undocumented convenience method used during testing. For production, simply do a\n\n/// state request at slot 0.\n\npub fn get_genesis_state<T: BeaconChainTypes>(\n\n req: Request<Body>,\n\n beacon_chain: Arc<BeaconChain<T>>,\n\n) -> ApiResult {\n\n let (_root, state) = state_at_slot(&beacon_chain, Slot::new(0))?;\n\n\n\n ResponseBuilder::new(&req)?.body(&state)\n\n}\n\n\n", "file_path": "beacon_node/rest_api/src/beacon.rs", "rank": 56, "score": 200748.75443254283 }, { "content": "/// Read the genesis time from the current beacon chain state.\n\npub fn get_genesis_time<T: BeaconChainTypes>(\n\n req: Request<Body>,\n\n beacon_chain: Arc<BeaconChain<T>>,\n\n) -> ApiResult {\n\n ResponseBuilder::new(&req)?.body(&beacon_chain.head().beacon_state.genesis_time)\n\n}\n", "file_path": "beacon_node/rest_api/src/beacon.rs", "rank": 57, "score": 200743.94385829137 }, { "content": "/// HTTP handler to return a `BeaconBlock` root at a given `slot`.\n\npub fn get_block_root<T: BeaconChainTypes>(\n\n req: Request<Body>,\n\n beacon_chain: Arc<BeaconChain<T>>,\n\n) -> ApiResult {\n\n let slot_string = UrlQuery::from_request(&req)?.only_one(\"slot\")?;\n\n let target = parse_slot(&slot_string)?;\n\n\n\n let root = block_root_at_slot(&beacon_chain, target).ok_or_else(|| {\n\n ApiError::NotFound(format!(\"Unable to find BeaconBlock for slot {:?}\", target))\n\n })?;\n\n\n\n ResponseBuilder::new(&req)?.body(&root)\n\n}\n\n\n", "file_path": "beacon_node/rest_api/src/beacon.rs", "rank": 58, "score": 200743.83667568874 }, { "content": "/// Helper function to return the state that can be used to determine the duties for some `epoch`.\n\npub fn get_state_for_epoch<T: BeaconChainTypes>(\n\n beacon_chain: &BeaconChain<T>,\n\n epoch: Epoch,\n\n) -> Result<BeaconState<T::EthSpec>, ApiError> {\n\n let slots_per_epoch = T::EthSpec::slots_per_epoch();\n\n let head_epoch = beacon_chain.head().beacon_state.current_epoch();\n\n\n\n if RelativeEpoch::from_epoch(head_epoch, epoch).is_ok() {\n\n Ok(beacon_chain.head().beacon_state)\n\n } else {\n\n let slot = if epoch > head_epoch {\n\n // Move to the first slot of the epoch prior to the request.\n\n //\n\n // Taking advantage of saturating epoch subtraction.\n\n (epoch - 1).start_slot(slots_per_epoch)\n\n } else {\n\n // Move to the end of the epoch following the target.\n\n //\n\n // Taking advantage of saturating epoch subtraction.\n\n (epoch + 2).start_slot(slots_per_epoch) - 1\n\n };\n\n\n\n beacon_chain.state_at_slot(slot).map_err(|e| {\n\n ApiError::ServerError(format!(\"Unable to load state for epoch {}: {:?}\", epoch, e))\n\n })\n\n }\n\n}\n\n\n", "file_path": "beacon_node/rest_api/src/validator.rs", "rank": 59, "score": 200743.814878645 }, { "content": "/// HTTP Handler to retrieve all validator duties for the given epoch.\n\npub fn get_all_validator_duties<T: BeaconChainTypes>(\n\n req: Request<Body>,\n\n beacon_chain: Arc<BeaconChain<T>>,\n\n) -> ApiResult {\n\n let query = UrlQuery::from_request(&req)?;\n\n\n\n let epoch = query.epoch()?;\n\n\n\n let state = get_state_for_epoch(&beacon_chain, epoch)?;\n\n\n\n let validator_pubkeys = state\n\n .validators\n\n .iter()\n\n .map(|validator| validator.pubkey.clone())\n\n .collect();\n\n\n\n let duties = return_validator_duties(beacon_chain, epoch, validator_pubkeys)?;\n\n\n\n ResponseBuilder::new(&req)?.body_no_ssz(&duties)\n\n}\n\n\n", "file_path": "beacon_node/rest_api/src/validator.rs", "rank": 60, "score": 200738.225328659 }, { "content": "/// HTTP handler to return all active validators, each as a `ValidatorResponse`.\n\npub fn get_active_validators<T: BeaconChainTypes>(\n\n req: Request<Body>,\n\n beacon_chain: Arc<BeaconChain<T>>,\n\n) -> ApiResult {\n\n let query = UrlQuery::from_request(&req)?;\n\n\n\n let state_root_opt = if let Some((_key, value)) = query.first_of_opt(&[\"state_root\"]) {\n\n Some(parse_root(&value)?)\n\n } else {\n\n None\n\n };\n\n\n\n let mut state = get_state_from_root_opt(&beacon_chain, state_root_opt)?;\n\n state.update_pubkey_cache()?;\n\n\n\n let validators = state\n\n .validators\n\n .iter()\n\n .filter(|validator| validator.is_active_at(state.current_epoch()))\n\n .map(|validator| validator_response_by_pubkey(&state, validator.pubkey.clone()))\n", "file_path": "beacon_node/rest_api/src/beacon.rs", "rank": 61, "score": 200738.225328659 }, { "content": "/// Checks that all of the validators have on-boarded by the start of the second eth1 voting\n\n/// period.\n\npub fn verify_initial_validator_count<E: EthSpec>(\n\n network: LocalNetwork<E>,\n\n slot_duration: Duration,\n\n initial_validator_count: usize,\n\n) -> impl Future<Item = (), Error = String> {\n\n slot_delay(Slot::new(1), slot_duration)\n\n .and_then(move |()| verify_validator_count(network, initial_validator_count))\n\n}\n\n\n", "file_path": "tests/beacon_chain_sim/src/checks.rs", "rank": 62, "score": 200738.225328659 }, { "content": "pub fn post_individual_votes<T: BeaconChainTypes>(\n\n req: Request<Body>,\n\n beacon_chain: Arc<BeaconChain<T>>,\n\n) -> BoxFut {\n\n let response_builder = ResponseBuilder::new(&req);\n\n\n\n let future = req\n\n .into_body()\n\n .concat2()\n\n .map_err(|e| ApiError::ServerError(format!(\"Unable to get request body: {:?}\", e)))\n\n .and_then(|chunks| {\n\n serde_json::from_slice::<IndividualVotesRequest>(&chunks).map_err(|e| {\n\n ApiError::BadRequest(format!(\n\n \"Unable to parse JSON into ValidatorDutiesRequest: {:?}\",\n\n e\n\n ))\n\n })\n\n })\n\n .and_then(move |body| {\n\n let epoch = body.epoch;\n", "file_path": "beacon_node/rest_api/src/consensus.rs", "rank": 63, "score": 200738.225328659 }, { "content": "/// HTTP handler to return the list of peers connected to the client's libp2p service.\n\n///\n\n/// Peers are presented as a list of `PeerId::to_string()`.\n\npub fn get_peer_list<T: BeaconChainTypes>(\n\n req: Request<Body>,\n\n network: Arc<NetworkService<T>>,\n\n) -> ApiResult {\n\n let connected_peers: Vec<String> = network\n\n .connected_peer_set()\n\n .iter()\n\n .map(PeerId::to_string)\n\n .collect();\n\n ResponseBuilder::new(&req)?.body_no_ssz(&connected_peers)\n\n}\n", "file_path": "beacon_node/rest_api/src/network.rs", "rank": 64, "score": 200738.225328659 }, { "content": "/// HTTP Handler to retrieve a the duties for a set of validators during a particular epoch. This\n\n/// method allows for collecting bulk sets of validator duties without risking exceeding the max\n\n/// URL length with query pairs.\n\npub fn post_validator_duties<T: BeaconChainTypes>(\n\n req: Request<Body>,\n\n beacon_chain: Arc<BeaconChain<T>>,\n\n) -> BoxFut {\n\n let response_builder = ResponseBuilder::new(&req);\n\n\n\n let future = req\n\n .into_body()\n\n .concat2()\n\n .map_err(|e| ApiError::ServerError(format!(\"Unable to get request body: {:?}\", e)))\n\n .and_then(|chunks| {\n\n serde_json::from_slice::<ValidatorDutiesRequest>(&chunks).map_err(|e| {\n\n ApiError::BadRequest(format!(\n\n \"Unable to parse JSON into ValidatorDutiesRequest: {:?}\",\n\n e\n\n ))\n\n })\n\n })\n\n .and_then(|bulk_request| {\n\n return_validator_duties(\n\n beacon_chain,\n\n bulk_request.epoch,\n\n bulk_request.pubkeys.into_iter().map(Into::into).collect(),\n\n )\n\n })\n\n .and_then(|duties| response_builder?.body_no_ssz(&duties));\n\n\n\n Box::new(future)\n\n}\n\n\n", "file_path": "beacon_node/rest_api/src/validator.rs", "rank": 65, "score": 200738.225328659 }, { "content": "/// HTTP handler to return the list of libp2p multiaddr the client is listening on.\n\n///\n\n/// Returns a list of `Multiaddr`, serialized according to their `serde` impl.\n\npub fn get_listen_addresses<T: BeaconChainTypes>(\n\n req: Request<Body>,\n\n network: Arc<NetworkService<T>>,\n\n) -> ApiResult {\n\n let multiaddresses: Vec<Multiaddr> = network.listen_multiaddrs();\n\n ResponseBuilder::new(&req)?.body_no_ssz(&multiaddresses)\n\n}\n\n\n", "file_path": "beacon_node/rest_api/src/network.rs", "rank": 66, "score": 200738.225328659 }, { "content": "/// HTTP handler to return the `PeerId` from the client's libp2p service.\n\n///\n\n/// PeerId is encoded as base58 string.\n\npub fn get_peer_id<T: BeaconChainTypes>(\n\n req: Request<Body>,\n\n network: Arc<NetworkService<T>>,\n\n) -> ApiResult {\n\n ResponseBuilder::new(&req)?.body_no_ssz(&network.local_peer_id().to_base58())\n\n}\n\n\n", "file_path": "beacon_node/rest_api/src/network.rs", "rank": 67, "score": 200738.225328659 }, { "content": "/// HTTP handler to return the full Eth2Config object.\n\npub fn get_eth2_config<T: BeaconChainTypes>(\n\n req: Request<Body>,\n\n eth2_config: Arc<Eth2Config>,\n\n) -> ApiResult {\n\n ResponseBuilder::new(&req)?.body_no_ssz(eth2_config.as_ref())\n\n}\n\n\n", "file_path": "beacon_node/rest_api/src/spec.rs", "rank": 68, "score": 200738.225328659 }, { "content": "/// HTTP Handler to publish a BeaconBlock, which has been signed by a validator.\n\npub fn publish_beacon_block<T: BeaconChainTypes>(\n\n req: Request<Body>,\n\n beacon_chain: Arc<BeaconChain<T>>,\n\n network_chan: NetworkChannel,\n\n log: Logger,\n\n) -> BoxFut {\n\n try_future!(check_content_type_for_json(&req));\n\n let response_builder = ResponseBuilder::new(&req);\n\n\n\n let body = req.into_body();\n\n Box::new(\n\n body.concat2()\n\n .map_err(|e| ApiError::ServerError(format!(\"Unable to get request body: {:?}\", e)))\n\n .and_then(|chunks| {\n\n serde_json::from_slice(&chunks).map_err(|e| {\n\n ApiError::BadRequest(format!(\"Unable to parse JSON into BeaconBlock: {:?}\", e))\n\n })\n\n })\n\n .and_then(move |block: BeaconBlock<T::EthSpec>| {\n\n let slot = block.slot;\n", "file_path": "beacon_node/rest_api/src/validator.rs", "rank": 69, "score": 200738.225328659 }, { "content": "/// HTTP handler return a `VoteCount` for some given `Epoch`.\n\npub fn get_vote_count<T: BeaconChainTypes>(\n\n req: Request<Body>,\n\n beacon_chain: Arc<BeaconChain<T>>,\n\n) -> ApiResult {\n\n let query = UrlQuery::from_request(&req)?;\n\n\n\n let epoch = query.epoch()?;\n\n // This is the last slot of the given epoch (one prior to the first slot of the next epoch).\n\n let target_slot = (epoch + 1).start_slot(T::EthSpec::slots_per_epoch()) - 1;\n\n\n\n let (_root, state) = state_at_slot(&beacon_chain, target_slot)?;\n\n let spec = &beacon_chain.spec;\n\n\n\n let mut validator_statuses = ValidatorStatuses::new(&state, spec)?;\n\n validator_statuses.process_attestations(&state, spec)?;\n\n\n\n let report: VoteCount = validator_statuses.total_balances.into();\n\n\n\n ResponseBuilder::new(&req)?.body(&report)\n\n}\n", "file_path": "beacon_node/rest_api/src/consensus.rs", "rank": 70, "score": 200738.225328659 }, { "content": "/// HTTP handler to return the network port the client is listening on.\n\n///\n\n/// Returns the TCP port number in its plain form (which is also valid JSON serialization)\n\npub fn get_listen_port<T: BeaconChainTypes>(\n\n req: Request<Body>,\n\n network: Arc<NetworkService<T>>,\n\n) -> ApiResult {\n\n ResponseBuilder::new(&req)?.body(&network.listen_port())\n\n}\n\n\n", "file_path": "beacon_node/rest_api/src/network.rs", "rank": 71, "score": 200738.225328659 }, { "content": "/// HTTP handler to return the number of peers connected in the client's libp2p service.\n\npub fn get_peer_count<T: BeaconChainTypes>(\n\n req: Request<Body>,\n\n network: Arc<NetworkService<T>>,\n\n) -> ApiResult {\n\n ResponseBuilder::new(&req)?.body(&network.connected_peers())\n\n}\n\n\n", "file_path": "beacon_node/rest_api/src/network.rs", "rank": 72, "score": 200738.225328659 }, { "content": "pub fn get_block_at_preceeding_slot<T: Store<E>, E: EthSpec>(\n\n store: &T,\n\n slot: Slot,\n\n start_root: Hash256,\n\n) -> Result<Option<(Hash256, BeaconBlock<E>)>, Error> {\n\n Ok(\n\n match get_at_preceeding_slot::<_, E>(store, slot, start_root)? {\n\n Some((hash, bytes)) => Some((hash, BeaconBlock::<E>::from_ssz_bytes(&bytes)?)),\n\n None => None,\n\n },\n\n )\n\n}\n\n\n", "file_path": "beacon_node/store/src/block_at_slot.rs", "rank": 73, "score": 200270.05735235603 }, { "content": "/// Returns `Some(eth1_data)` if adding the given `eth1_data` to `state.eth1_data_votes` would\n\n/// result in a change to `state.eth1_data`.\n\n///\n\n/// Spec v0.9.1\n\npub fn get_new_eth1_data<T: EthSpec>(\n\n state: &BeaconState<T>,\n\n eth1_data: &Eth1Data,\n\n) -> Option<Eth1Data> {\n\n let num_votes = state\n\n .eth1_data_votes\n\n .iter()\n\n .filter(|vote| *vote == eth1_data)\n\n .count();\n\n\n\n // The +1 is to account for the `eth1_data` supplied to the function.\n\n if 2 * (num_votes + 1) > T::SlotsPerEth1VotingPeriod::to_usize() {\n\n Some(eth1_data.clone())\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "eth2/state_processing/src/per_block_processing.rs", "rank": 74, "score": 199407.22613802215 }, { "content": "/// HTTP Handler to retrieve all active validator duties for the given epoch.\n\npub fn get_active_validator_duties<T: BeaconChainTypes>(\n\n req: Request<Body>,\n\n beacon_chain: Arc<BeaconChain<T>>,\n\n) -> ApiResult {\n\n let query = UrlQuery::from_request(&req)?;\n\n\n\n let epoch = query.epoch()?;\n\n\n\n let state = get_state_for_epoch(&beacon_chain, epoch)?;\n\n\n\n let validator_pubkeys = state\n\n .validators\n\n .iter()\n\n .filter(|validator| validator.is_active_at(state.current_epoch()))\n\n .map(|validator| validator.pubkey.clone())\n\n .collect();\n\n\n\n let duties = return_validator_duties(beacon_chain, epoch, validator_pubkeys)?;\n\n\n\n ResponseBuilder::new(&req)?.body_no_ssz(&duties)\n\n}\n\n\n", "file_path": "beacon_node/rest_api/src/validator.rs", "rank": 75, "score": 198510.73734513047 }, { "content": "/// Update the global metrics `DEFAULT_REGISTRY` with info from the slot clock.\n\npub fn scrape_for_metrics<T: EthSpec, U: SlotClock>(clock: &U) {\n\n let present_slot = match clock.now() {\n\n Some(slot) => slot,\n\n _ => Slot::new(0),\n\n };\n\n\n\n set_gauge(&PRESENT_SLOT, present_slot.as_u64() as i64);\n\n set_gauge(\n\n &PRESENT_EPOCH,\n\n present_slot.epoch(T::slots_per_epoch()).as_u64() as i64,\n\n );\n\n set_gauge(&SLOTS_PER_EPOCH, T::slots_per_epoch() as i64);\n\n set_gauge(\n\n &MILLISECONDS_PER_SLOT,\n\n clock.slot_duration().as_millis() as i64,\n\n );\n\n}\n", "file_path": "eth2/utils/slot_clock/src/metrics.rs", "rank": 76, "score": 198064.4155845361 }, { "content": "fn set_gauge_by_hash(gauge: &Result<IntGauge>, value: Hash256) {\n\n set_gauge(gauge, value.to_low_u64_le() as i64);\n\n}\n\n\n", "file_path": "beacon_node/beacon_chain/src/metrics.rs", "rank": 77, "score": 195897.14516434568 }, { "content": "pub fn publish_attestation_to_network<T: BeaconChainTypes + 'static>(\n\n chan: Arc<RwLock<mpsc::UnboundedSender<NetworkMessage>>>,\n\n attestation: Attestation<T::EthSpec>,\n\n) -> Result<(), ApiError> {\n\n // create the network topic to send on\n\n let topic = GossipTopic::BeaconAttestation;\n\n let message = PubsubMessage::Attestation(attestation.as_ssz_bytes());\n\n\n\n // Publish the attestation to the p2p network via gossipsub.\n\n if let Err(e) = chan.write().try_send(NetworkMessage::Publish {\n\n topics: vec![topic.into()],\n\n message,\n\n }) {\n\n return Err(ApiError::ServerError(format!(\n\n \"Unable to send new attestation to network: {:?}\",\n\n e\n\n )));\n\n }\n\n\n\n Ok(())\n", "file_path": "beacon_node/rest_api/src/helpers.rs", "rank": 78, "score": 195687.86084782513 }, { "content": "#[cfg(feature = \"fake_crypto\")]\n\npub fn verify_signature_sets<'a>(_iter: impl Iterator<Item = SignatureSet<'a>>) -> bool {\n\n true\n\n}\n\n\n", "file_path": "eth2/utils/bls/src/signature_set.rs", "rank": 79, "score": 194245.21594202024 }, { "content": "type RootAndSlot = (Hash256, Slot);\n\n\n\nlazy_static! {\n\n /// A lazy-static instance of a `BeaconChainHarness` that contains two forks.\n\n ///\n\n /// Reduces test setup time by providing a common harness.\n\n static ref FORKED_HARNESS: ForkedHarness = ForkedHarness::new();\n\n}\n\n\n", "file_path": "eth2/lmd_ghost/tests/test.rs", "rank": 80, "score": 193901.098425814 }, { "content": "pub fn publish_beacon_block_to_network<T: BeaconChainTypes + 'static>(\n\n chan: Arc<RwLock<mpsc::UnboundedSender<NetworkMessage>>>,\n\n block: BeaconBlock<T::EthSpec>,\n\n) -> Result<(), ApiError> {\n\n // create the network topic to send on\n\n let topic = GossipTopic::BeaconBlock;\n\n let message = PubsubMessage::Block(block.as_ssz_bytes());\n\n\n\n // Publish the block to the p2p network via gossipsub.\n\n if let Err(e) = chan.write().try_send(NetworkMessage::Publish {\n\n topics: vec![topic.into()],\n\n message,\n\n }) {\n\n return Err(ApiError::ServerError(format!(\n\n \"Unable to send new block to network: {:?}\",\n\n e\n\n )));\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "beacon_node/rest_api/src/helpers.rs", "rank": 81, "score": 193533.17064375337 }, { "content": "fn read_parent_root_from_block_bytes(bytes: &[u8]) -> Result<Hash256, DecodeError> {\n\n let previous_bytes = Slot::ssz_fixed_len();\n\n let slice = bytes\n\n .get(previous_bytes..previous_bytes + Hash256::ssz_fixed_len())\n\n .ok_or_else(|| DecodeError::BytesInvalid(\"Not enough bytes.\".to_string()))?;\n\n\n\n Hash256::from_ssz_bytes(slice)\n\n}\n\n\n", "file_path": "beacon_node/store/src/block_at_slot.rs", "rank": 82, "score": 192345.69998776002 }, { "content": "fn write_block<T: EthSpec>(block: &BeaconBlock<T>, root: Hash256, log: &Logger) {\n\n if WRITE_BLOCK_PROCESSING_SSZ {\n\n let filename = format!(\"block_slot_{}_root{}.ssz\", block.slot, root);\n\n let mut path = std::env::temp_dir().join(\"lighthouse\");\n\n let _ = fs::create_dir_all(path.clone());\n\n path = path.join(filename);\n\n\n\n match fs::File::create(path.clone()) {\n\n Ok(mut file) => {\n\n let _ = file.write_all(&block.as_ssz_bytes());\n\n }\n\n Err(e) => error!(\n\n log,\n\n \"Failed to log block\";\n\n \"path\" => format!(\"{:?}\", path),\n\n \"error\" => format!(\"{:?}\", e)\n\n ),\n\n }\n\n }\n\n}\n", "file_path": "beacon_node/beacon_chain/src/beacon_chain.rs", "rank": 83, "score": 190017.04194779924 }, { "content": "/// Writes the configs in `self` to `self.data_dir`.\n\n///\n\n/// Returns an error if `self.data_dir` already exists.\n\npub fn create_new_datadir(client_config: &ClientConfig, eth2_config: &Eth2Config) -> Result<()> {\n\n if client_config.data_dir.exists() {\n\n return Err(format!(\n\n \"Data dir already exists at {:?}\",\n\n client_config.data_dir\n\n ))?;\n\n }\n\n\n\n // Create `datadir` and any non-existing parent directories.\n\n fs::create_dir_all(&client_config.data_dir)\n\n .map_err(|e| format!(\"Failed to create data dir: {}\", e))?;\n\n\n\n macro_rules! write_to_file {\n\n ($file: ident, $variable: ident) => {\n\n let file = client_config.data_dir.join($file);\n\n if file.exists() {\n\n return Err(format!(\"Datadir is not clean, {} exists.\", $file));\n\n } else {\n\n // Write the onfig to a TOML file in the datadir.\n\n write_to_file(client_config.data_dir.join($file), $variable)\n", "file_path": "beacon_node/src/config.rs", "rank": 84, "score": 187337.88447169834 }, { "content": "#[test]\n\nfn iterators() {\n\n let num_blocks_produced = MinimalEthSpec::slots_per_epoch() * 2 - 1;\n\n\n\n let harness = get_harness(VALIDATOR_COUNT);\n\n\n\n harness.extend_chain(\n\n num_blocks_produced as usize,\n\n BlockStrategy::OnCanonicalHead,\n\n // No need to produce attestations for this test.\n\n AttestationStrategy::SomeValidators(vec![]),\n\n );\n\n\n\n let block_roots: Vec<(Hash256, Slot)> = harness.chain.rev_iter_block_roots().collect();\n\n let state_roots: Vec<(Hash256, Slot)> = harness.chain.rev_iter_state_roots().collect();\n\n\n\n assert_eq!(\n\n block_roots.len(),\n\n state_roots.len(),\n\n \"should be an equal amount of block and state roots\"\n\n );\n", "file_path": "beacon_node/beacon_chain/tests/tests.rs", "rank": 85, "score": 186717.16362247284 }, { "content": "fn cache_state<T: EthSpec>(state: &mut BeaconState<T>) -> Result<(), Error> {\n\n let previous_state_root = state.update_tree_hash_cache()?;\n\n\n\n // Note: increment the state slot here to allow use of our `state_root` and `block_root`\n\n // getter/setter functions.\n\n //\n\n // This is a bit hacky, however it gets the job safely without lots of code.\n\n let previous_slot = state.slot;\n\n state.slot += 1;\n\n\n\n // Store the previous slot's post state transition root.\n\n state.set_state_root(previous_slot, previous_state_root)?;\n\n\n\n // Cache latest block header state root\n\n if state.latest_block_header.state_root == Hash256::zero() {\n\n state.latest_block_header.state_root = previous_state_root;\n\n }\n\n\n\n // Cache block root\n\n let latest_block_root = state.latest_block_header.canonical_root();\n", "file_path": "eth2/state_processing/src/per_slot_processing.rs", "rank": 86, "score": 181265.1487707312 }, { "content": "#[test]\n\nfn does_not_finalize_without_attestation() {\n\n let num_blocks_produced = MinimalEthSpec::slots_per_epoch() * 5;\n\n\n\n let harness = get_harness(VALIDATOR_COUNT);\n\n\n\n harness.extend_chain(\n\n num_blocks_produced as usize,\n\n BlockStrategy::OnCanonicalHead,\n\n AttestationStrategy::SomeValidators(vec![]),\n\n );\n\n\n\n let state = &harness.chain.head().beacon_state;\n\n\n\n assert_eq!(\n\n state.slot, num_blocks_produced,\n\n \"head should be at the current slot\"\n\n );\n\n assert_eq!(\n\n state.current_epoch(),\n\n num_blocks_produced / MinimalEthSpec::slots_per_epoch(),\n", "file_path": "beacon_node/beacon_chain/tests/tests.rs", "rank": 87, "score": 181138.37713612133 }, { "content": "#[test]\n\nfn finalizes_with_full_participation() {\n\n let num_blocks_produced = MinimalEthSpec::slots_per_epoch() * 5;\n\n\n\n let harness = get_harness(VALIDATOR_COUNT);\n\n\n\n harness.extend_chain(\n\n num_blocks_produced as usize,\n\n BlockStrategy::OnCanonicalHead,\n\n AttestationStrategy::AllValidators,\n\n );\n\n\n\n let state = &harness.chain.head().beacon_state;\n\n\n\n assert_eq!(\n\n state.slot, num_blocks_produced,\n\n \"head should be at the current slot\"\n\n );\n\n assert_eq!(\n\n state.current_epoch(),\n\n num_blocks_produced / MinimalEthSpec::slots_per_epoch(),\n", "file_path": "beacon_node/beacon_chain/tests/tests.rs", "rank": 88, "score": 181138.37713612133 }, { "content": "#[test]\n\nfn attestations_with_increasing_slots() {\n\n let num_blocks_produced = MinimalEthSpec::slots_per_epoch() * 5;\n\n\n\n let harness = get_harness(VALIDATOR_COUNT);\n\n\n\n let mut attestations = vec![];\n\n\n\n for _ in 0..num_blocks_produced {\n\n harness.extend_chain(\n\n 2,\n\n BlockStrategy::OnCanonicalHead,\n\n // Don't produce & include any attestations (we'll collect them later).\n\n AttestationStrategy::SomeValidators(vec![]),\n\n );\n\n\n\n attestations.append(&mut harness.get_free_attestations(\n\n &AttestationStrategy::AllValidators,\n\n &harness.chain.head().beacon_state,\n\n harness.chain.head().beacon_block_root,\n\n harness.chain.head().beacon_block.slot,\n", "file_path": "beacon_node/beacon_chain/tests/tests.rs", "rank": 89, "score": 180960.49746178975 }, { "content": "fn run_skip_slot_test(skip_slots: u64) {\n\n let num_validators = 8;\n\n let harness_a = get_harness(num_validators);\n\n let harness_b = get_harness(num_validators);\n\n\n\n for _ in 0..skip_slots {\n\n harness_a.advance_slot();\n\n harness_b.advance_slot();\n\n }\n\n\n\n harness_a.extend_chain(\n\n 1,\n\n BlockStrategy::OnCanonicalHead,\n\n // No attestation required for test.\n\n AttestationStrategy::SomeValidators(vec![]),\n\n );\n\n\n\n assert_eq!(\n\n harness_a.chain.head().beacon_block.slot,\n\n Slot::new(skip_slots + 1)\n", "file_path": "beacon_node/beacon_chain/tests/tests.rs", "rank": 90, "score": 180307.32151694357 }, { "content": "/// Returns the unix-epoch seconds at the start of the given `slot`.\n\nfn slot_start_seconds<T: EthSpec>(\n\n genesis_unix_seconds: u64,\n\n milliseconds_per_slot: u64,\n\n slot: Slot,\n\n) -> u64 {\n\n genesis_unix_seconds + slot.as_u64() * milliseconds_per_slot / 1_000\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use environment::null_logger;\n\n use types::{test_utils::DepositTestTask, MinimalEthSpec};\n\n\n\n type E = MinimalEthSpec;\n\n\n\n fn get_eth1_data(i: u64) -> Eth1Data {\n\n Eth1Data {\n\n block_hash: Hash256::from_low_u64_be(i),\n\n deposit_root: Hash256::from_low_u64_be(u64::max_value() - i),\n", "file_path": "beacon_node/beacon_chain/src/eth1_chain.rs", "rank": 91, "score": 180107.78858001073 }, { "content": "/// Compute an approximate maximum cover using a greedy algorithm.\n\n///\n\n/// * Time complexity: `O(limit * items_iter.len())`\n\n/// * Space complexity: `O(item_iter.len())`\n\npub fn maximum_cover<I, T>(items_iter: I, limit: usize) -> Vec<T::Object>\n\nwhere\n\n I: IntoIterator<Item = T>,\n\n T: MaxCover,\n\n{\n\n // Construct an initial vec of all items, marked available.\n\n let mut all_items: Vec<_> = items_iter\n\n .into_iter()\n\n .map(MaxCoverItem::new)\n\n .filter(|x| x.item.score() != 0)\n\n .collect();\n\n\n\n let mut result = vec![];\n\n\n\n for _ in 0..limit {\n\n // Select the item with the maximum score.\n\n let (best_item, best_cover) = match all_items\n\n .iter_mut()\n\n .filter(|x| x.available && x.item.score() != 0)\n\n .max_by_key(|x| x.item.score())\n", "file_path": "eth2/operation_pool/src/max_cover.rs", "rank": 92, "score": 179340.32129951936 }, { "content": "/// Verify `Deposit.pubkey` signed `Deposit.signature`.\n\n///\n\n/// Spec v0.9.1\n\npub fn verify_deposit_signature(deposit_data: &DepositData, spec: &ChainSpec) -> Result<()> {\n\n let deposit_signature_message = deposit_pubkey_signature_message(&deposit_data)\n\n .ok_or_else(|| error(DepositInvalid::BadBlsBytes))?;\n\n\n\n verify!(\n\n deposit_signature_set(&deposit_signature_message, spec).is_valid(),\n\n DepositInvalid::BadSignature\n\n );\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "eth2/state_processing/src/per_block_processing/verify_deposit.rs", "rank": 93, "score": 178751.78918032034 }, { "content": "#[test]\n\nfn finalizes_with_two_thirds_participation() {\n\n let num_blocks_produced = MinimalEthSpec::slots_per_epoch() * 5;\n\n\n\n let harness = get_harness(VALIDATOR_COUNT);\n\n\n\n let two_thirds = (VALIDATOR_COUNT / 3) * 2;\n\n let attesters = (0..two_thirds).collect();\n\n\n\n harness.extend_chain(\n\n num_blocks_produced as usize,\n\n BlockStrategy::OnCanonicalHead,\n\n AttestationStrategy::SomeValidators(attesters),\n\n );\n\n\n\n let state = &harness.chain.head().beacon_state;\n\n\n\n assert_eq!(\n\n state.slot, num_blocks_produced,\n\n \"head should be at the current slot\"\n\n );\n", "file_path": "beacon_node/beacon_chain/tests/tests.rs", "rank": 94, "score": 178476.01719442348 }, { "content": "#[test]\n\nfn finalizes_after_resuming_from_db() {\n\n let validator_count = 16;\n\n let num_blocks_produced = MinimalEthSpec::slots_per_epoch() * 8;\n\n let first_half = num_blocks_produced / 2;\n\n\n\n let db_path = tempdir().unwrap();\n\n let store = get_store(&db_path);\n\n\n\n let harness = BeaconChainHarness::new_with_disk_store(\n\n MinimalEthSpec,\n\n store.clone(),\n\n KEYPAIRS[0..validator_count].to_vec(),\n\n );\n\n\n\n harness.advance_slot();\n\n\n\n harness.extend_chain(\n\n first_half as usize,\n\n BlockStrategy::OnCanonicalHead,\n\n AttestationStrategy::AllValidators,\n", "file_path": "beacon_node/beacon_chain/tests/persistence_tests.rs", "rank": 95, "score": 178476.01719442348 }, { "content": "#[test]\n\nfn split_slot_restore() {\n\n let db_path = tempdir().unwrap();\n\n\n\n let split_slot = {\n\n let store = get_store(&db_path);\n\n let harness = get_harness(store.clone(), VALIDATOR_COUNT);\n\n\n\n let num_blocks = 4 * E::slots_per_epoch();\n\n\n\n harness.extend_chain(\n\n num_blocks as usize,\n\n BlockStrategy::OnCanonicalHead,\n\n AttestationStrategy::AllValidators,\n\n );\n\n\n\n store.get_split_slot()\n\n };\n\n assert_ne!(split_slot, Slot::new(0));\n\n\n\n // Re-open the store\n\n let store = get_store(&db_path);\n\n\n\n assert_eq!(store.get_split_slot(), split_slot);\n\n}\n\n\n", "file_path": "beacon_node/beacon_chain/tests/store_tests.rs", "rank": 96, "score": 178301.360176957 }, { "content": "#[test]\n\nfn does_not_finalize_with_less_than_two_thirds_participation() {\n\n let num_blocks_produced = MinimalEthSpec::slots_per_epoch() * 5;\n\n\n\n let harness = get_harness(VALIDATOR_COUNT);\n\n\n\n let two_thirds = (VALIDATOR_COUNT / 3) * 2;\n\n let less_than_two_thirds = two_thirds - 1;\n\n let attesters = (0..less_than_two_thirds).collect();\n\n\n\n harness.extend_chain(\n\n num_blocks_produced as usize,\n\n BlockStrategy::OnCanonicalHead,\n\n AttestationStrategy::SomeValidators(attesters),\n\n );\n\n\n\n let state = &harness.chain.head().beacon_state;\n\n\n\n assert_eq!(\n\n state.slot, num_blocks_produced,\n\n \"head should be at the current slot\"\n", "file_path": "beacon_node/beacon_chain/tests/tests.rs", "rank": 97, "score": 175908.4089048647 }, { "content": "#[test]\n\nfn produces_and_processes_with_genesis_skip_slots() {\n\n for i in 0..MinimalEthSpec::slots_per_epoch() * 4 {\n\n run_skip_slot_test(i)\n\n }\n\n}\n", "file_path": "beacon_node/beacon_chain/tests/tests.rs", "rank": 98, "score": 175736.8598520095 }, { "content": "/// Determine whether a candidate genesis state is suitable for starting the chain.\n\n///\n\n/// Spec v0.9.1\n\npub fn is_valid_genesis_state<T: EthSpec>(state: &BeaconState<T>, spec: &ChainSpec) -> bool {\n\n state.genesis_time >= spec.min_genesis_time\n\n && state.get_active_validator_indices(T::genesis_epoch()).len() as u64\n\n >= spec.min_genesis_active_validator_count\n\n}\n\n\n", "file_path": "eth2/state_processing/src/genesis.rs", "rank": 99, "score": 174426.51802589122 } ]
Rust
truck-meshalgo/src/analyzers/collision.rs
leomcelroy/truck
081a6938f479b37f3516c3b380ce69e403f64d42
use super::*; pub trait Collision { fn collide_with(&self, other: &PolygonMesh) -> Option<(Point3, Point3)>; fn extract_interference(&self, other: &PolygonMesh) -> Vec<(Point3, Point3)>; } impl Collision for PolygonMesh { #[inline(always)] fn collide_with(&self, other: &PolygonMesh) -> Option<(Point3, Point3)> { are_colliding(self, other) } #[inline(always)] fn extract_interference(&self, other: &PolygonMesh) -> Vec<(Point3, Point3)> { collision(self, other) } } #[repr(u8)] #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] enum EndPointType { Front, Back, } #[derive(Clone, Copy, Debug, PartialEq, PartialOrd)] struct EndPoint { entity: f64, r#type: EndPointType, segnum: usize, index: usize, } impl EndPoint { #[inline(always)] fn new(entity: f64, r#type: EndPointType, segnum: usize, index: usize) -> EndPoint { EndPoint { entity, r#type, segnum, index, } } #[inline(always)] fn from_seg(seg: (f64, f64), segnum: usize, index: usize) -> Vec<EndPoint> { vec![ EndPoint::new(seg.0, EndPointType::Front, segnum, index), EndPoint::new(seg.1, EndPointType::Back, segnum, index), ] } } fn take_one_unit() -> Vector3 { loop { let normal = Vector3::new( 2.0 * rand::random::<f64>() - 1.0, 2.0 * rand::random::<f64>() - 1.0, 2.0 * rand::random::<f64>() - 1.0, ); if !normal.so_small() { return normal.normalize(); } } } fn tri_to_seg(tri: [Point3; 3], unit: Vector3) -> (f64, f64) { let a = tri[0].to_vec().dot(unit); let b = tri[1].to_vec().dot(unit); let c = tri[2].to_vec().dot(unit); (f64::min(f64::min(a, b), c), f64::max(f64::max(a, b), c)) } fn sorted_endpoints<I, J>(iter0: I, iter1: J) -> Vec<EndPoint> where I: IntoIterator<Item = [Point3; 3]>, J: IntoIterator<Item = [Point3; 3]>, { let unit = take_one_unit(); let mut res: Vec<EndPoint> = iter0 .into_iter() .enumerate() .filter(|(_, tri)| !(tri[1] - tri[0]).cross(tri[2] - tri[0]).so_small()) .flat_map(|(i, tri)| EndPoint::from_seg(tri_to_seg(tri, unit), 0, i)) .chain( iter1 .into_iter() .enumerate() .filter(|(_, tri)| !(tri[1] - tri[0]).cross(tri[2] - tri[0]).so_small()) .flat_map(|(i, tri)| EndPoint::from_seg(tri_to_seg(tri, unit), 1, i)), ) .collect(); res.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Greater)); res } fn colliding_segment_pairs(sort_endpoints: Vec<EndPoint>) -> impl Iterator<Item = (usize, usize)> { let mut current = [Vec::<usize>::new(), Vec::<usize>::new()]; sort_endpoints .into_iter() .filter_map( move |EndPoint { r#type, segnum, index, .. }| match r#type { EndPointType::Front => { current[segnum].push(index); Some(current[1 - segnum].clone().into_iter().map(move |i| { if segnum == 0 { (index, i) } else { (i, index) } })) } EndPointType::Back => { let i = current[segnum] .iter() .enumerate() .find(|(_, idx)| **idx == index) .unwrap() .0; current[segnum].swap_remove(i); None } }, ) .flatten() } fn disjoint_bdbs(tri0: [Point3; 3], tri1: [Point3; 3]) -> bool { let bdb0: BoundingBox<Point3> = tri0.iter().collect(); let bdb1: BoundingBox<Point3> = tri1.iter().collect(); bdb0.max()[0] < bdb1.min()[0] || bdb1.max()[0] < bdb0.min()[0] || bdb0.max()[1] < bdb1.min()[1] || bdb1.max()[1] < bdb0.min()[1] || bdb0.max()[2] < bdb1.min()[2] || bdb1.max()[2] < bdb0.min()[2] } fn collide_seg_triangle(seg: [Point3; 2], tri: [Point3; 3]) -> Option<Point3> { let ab = tri[1] - tri[0]; let bc = tri[2] - tri[1]; let ca = tri[0] - tri[2]; let nor = ab.cross(ca); if nor.so_small() { return None; } let ap = seg[0] - tri[0]; let aq = seg[1] - tri[0]; let dotapnor = ap.dot(nor); let dotaqnor = aq.dot(nor); if dotapnor * dotaqnor > 0.0 { return None; } let h = seg[0] + dotapnor / (dotapnor - dotaqnor) * (seg[1] - seg[0]); if f64::signum(ab.cross(nor).dot(h - tri[0]) + TOLERANCE2) + f64::signum(bc.cross(nor).dot(h - tri[1]) + TOLERANCE2) + f64::signum(ca.cross(nor).dot(h - tri[2]) + TOLERANCE2) >= 2.0 { Some(h) } else { None } } fn collide_triangles(tri0: [Point3; 3], tri1: [Point3; 3]) -> Option<(Point3, Point3)> { let mut tuple = (None, None); [ collide_seg_triangle([tri0[0], tri0[1]], tri1), collide_seg_triangle([tri0[1], tri0[2]], tri1), collide_seg_triangle([tri0[2], tri0[0]], tri1), collide_seg_triangle([tri1[0], tri1[1]], tri0), collide_seg_triangle([tri1[1], tri1[2]], tri0), collide_seg_triangle([tri1[2], tri1[0]], tri0), ] .iter() .for_each(|pt| match tuple { (None, _) => tuple.0 = *pt, (Some(_), None) => tuple.1 = *pt, (Some(ref mut p), Some(ref mut q)) => { if let Some(pt) = pt { let dist0 = pt.distance2(*p); let dist1 = pt.distance2(*q); let dist2 = p.distance2(*q); if dist2 < dist0 { *q = *pt; } else if dist2 < dist1 { *p = *pt; } } } }); match tuple { (Some(a), Some(b)) => Some((a, b)), _ => None, } } fn collision(poly0: &PolygonMesh, poly1: &PolygonMesh) -> Vec<(Point3, Point3)> { let tris0 = poly0.faces().triangle_iter().collect::<Vec<_>>(); let tris1 = poly1.faces().triangle_iter().collect::<Vec<_>>(); let iter0 = tris0.iter().map(|face| { [ poly0.positions()[face[0].pos], poly0.positions()[face[1].pos], poly0.positions()[face[2].pos], ] }); let iter1 = tris1.iter().map(|face| { [ poly1.positions()[face[0].pos], poly1.positions()[face[1].pos], poly1.positions()[face[2].pos], ] }); colliding_segment_pairs(sorted_endpoints(iter0, iter1)) .filter_map(|(idx0, idx1)| { let face0 = tris0[idx0]; let tri0 = [ poly0.positions()[face0[0].pos], poly0.positions()[face0[1].pos], poly0.positions()[face0[2].pos], ]; let face1 = tris1[idx1]; let tri1 = [ poly1.positions()[face1[0].pos], poly1.positions()[face1[1].pos], poly1.positions()[face1[2].pos], ]; if disjoint_bdbs(tri0, tri1) { None } else { collide_triangles(tri0, tri1) } }) .collect() } fn are_colliding(poly0: &PolygonMesh, poly1: &PolygonMesh) -> Option<(Point3, Point3)> { let tris0 = poly0.faces().triangle_iter().collect::<Vec<_>>(); let tris1 = poly1.faces().triangle_iter().collect::<Vec<_>>(); let iter0 = tris0.iter().map(|face| { [ poly0.positions()[face[0].pos], poly0.positions()[face[1].pos], poly0.positions()[face[2].pos], ] }); let iter1 = tris1.iter().map(|face| { [ poly1.positions()[face[0].pos], poly1.positions()[face[1].pos], poly1.positions()[face[2].pos], ] }); colliding_segment_pairs(sorted_endpoints(iter0, iter1)).find_map(|(idx0, idx1)| { let face0 = tris0[idx0]; let tri0 = [ poly0.positions()[face0[0].pos], poly0.positions()[face0[1].pos], poly0.positions()[face0[2].pos], ]; let face1 = tris1[idx1]; let tri1 = [ poly1.positions()[face1[0].pos], poly1.positions()[face1[1].pos], poly1.positions()[face1[2].pos], ]; if disjoint_bdbs(tri0, tri1) { None } else { collide_triangles(tri0, tri1) } }) } #[test] fn collide_triangles_test() { let tri0 = [ Point3::origin(), Point3::new(1.0, 0.0, 0.0), Point3::new(0.0, 1.0, 0.0), ]; let tri1 = [ Point3::new(0.0, 0.0, -1.0), Point3::new(-1.0, -1.0, 1.0), Point3::new(1.0, 1.0, 1.0), ]; assert!(collide_triangles(tri0, tri1).is_some()); let tri0 = [ Point3::new(0.0, 0.0, 0.0), Point3::new(1.0, 0.0, 0.0), Point3::new(0.0, 1.0, 0.0), ]; let tri1 = [ Point3::new(0.0, 0.0, 0.5), Point3::new(1.0, 0.0, 1.0), Point3::new(1.0, 1.0, 1.0), ]; assert!(collide_triangles(tri0, tri1).is_none()); }
use super::*; pub trait Collision { fn collide_with(&self, other: &PolygonMesh) -> Option<(Point3, Point3)>; fn extract_interference(&self, other: &PolygonMesh) -> Vec<(Point3, Point3)>; } impl Collision for PolygonMesh { #[inline(always)] fn collide_with(&self, other: &PolygonMesh) -> Option<(Point3, Point3)> { are_colliding(self, other) } #[inline(always)] fn extract_interference(&self, other: &PolygonMesh) -> Vec<(Point3, Point3)> { collision(self, other) } } #[repr(u8)] #[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] enum EndPointType { Front, Back, } #[derive(Clone, Copy, Debug, PartialEq, PartialOrd)] struct EndPoint { entity: f64, r#type: EndPointType, segnum: usize, index: usize, } impl EndPoint { #[inline(always)] fn new(entity: f64, r#type: EndPointType, segnum: usize, index: usize) -> EndPoint { EndPoint { entity, r#type, segnum, index, } } #[inline(always)] fn from_seg(seg: (f64, f64), segnum: usize, index: usize) -> Vec<EndPoint> { vec![ EndPoint::new(seg.0, EndPointType::Front, segnum, index), EndPoint::new(seg.1, EndPointType::Back, segnum, index), ] } } fn take_one_unit() -> Vector3 { loop { let normal = Vector3::new( 2.0 * rand::random::<f64>() - 1.0, 2.0 * rand::random::<f64>() - 1.0, 2.0 * rand::random::<f64>() - 1.0, ); if !normal.so_small() { return normal.normalize(); } } } fn tri_to_seg(tri: [Point3; 3], unit: Vector3) -> (f64, f64) { let a = tri[0].to_vec().dot(unit); let b = tri[1].to_vec().dot(unit); let c = tri[2].to_vec().dot(unit); (f64::min(f64::min(a, b), c), f64::max(f64::max(a, b), c)) } fn sorted_endpoints<I, J>(iter0: I, iter1: J) -> Vec<EndPoint> where I: IntoIterator<Item = [Point3; 3]>, J: IntoIterator<Item = [Point3; 3]>, { let unit = take_one_unit(); let mut res: Vec<EndPoint> = iter0 .into_iter() .enumerate() .filter(|(_, tri)| !(tri[1] - tri[0]).cross(tri[2] - tri[0]).so_small()) .flat_map(|(i, tri)| EndPoint::from_seg(tri_to_seg(tri, unit), 0, i)) .chain( iter1 .into_iter() .enumerate() .filter(|(_, tri)| !(tri[1] - tri[0]).cross(tri[2] - tri[0]).so_small()) .flat_map(|(i, tri)| EndPoint::from_seg(tri_to_seg(tri, unit), 1, i)), ) .collect(); res.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Greater)); res } fn colliding_segment_pairs(sort_endpoints: Vec<EndPoint>) -> impl Iterator<Item = (usize, usize)> { let mut current = [Vec::<usize>::new(), Vec::<usize>::new()]; sort_endpoints .into_iter() .filter_map( move |EndPoint { r#type, segnum, index, .. }| match r#type { EndPointType::Front => { current[segnum].push(index); Some(current[1 - segnum].clone().into_iter().map(move |i| { if segnum == 0 { (index, i) } else { (i, index) } })) } EndPointType::Back => { let i = current[segnum] .iter() .enumerate() .find(|(_, idx)| **idx == index) .unwrap() .0; current[segnum].swap_remove(i); None } }, ) .flatten() } fn disjoint_bdbs(tri0: [Point3; 3], tri1: [Point3; 3]) -> bool { let bdb0: BoundingBox<Point3> = tri0.iter().collect(); let bdb1: BoundingBox<Point3> = tri1.iter().collect(); bdb0.max()[0] < bdb1.min()[0] || bdb1.max()[0] < bdb0.min()[0] || bdb0.max()[1] < bdb1.min()[1] || bdb1.max()[1] < bdb0.min()[1] || bdb0.max()[2] < bdb1.min()[2] || bdb1.max()[2] < bdb0.min()[2] } fn collide_seg_triangle(seg: [Point3; 2], tri: [Point3; 3]) -> Option<Point3> { let ab = tri[1] - tri[0]; let bc = tri[2] - tri[1]; let ca = tri[0] - tri[2]; let nor = ab.cross(ca); if nor.so_small() { return None; } let ap = seg[0] - tri[0]; let aq = seg[1] - tri[0]; let dotapnor = ap.dot(nor); let dotaqnor = aq.dot(nor); if dotapnor * dotaqnor > 0.0 { return None; } let h = seg[0] + dotapnor / (dotapnor - dotaqnor) * (seg[1] - seg[0]); if f64::signum(ab.cross(nor).dot(h - tri[0]) + TOLERANCE2) + f64::signum(bc.cross(nor).dot(h - tri[1]) + TOLERANCE2) + f64::signum(ca.cross(nor).dot(h - tri[2]) + TOLERANCE2) >= 2.0 { Some(h) } else { None } } fn collide_triangles(tri0: [Point3; 3], tri1: [Point3; 3]) -> Option<(Point3, Point3)> { let mut tuple = (None, None); [ collide_seg_triangle([tri0[0], tri0[1]], tri1), collide_seg_triangle([tri0[1], tri0[2]], tri1), collide_seg_triangle([tri0[2], tri0[0]], tri1),
each(|pt| match tuple { (None, _) => tuple.0 = *pt, (Some(_), None) => tuple.1 = *pt, (Some(ref mut p), Some(ref mut q)) => { if let Some(pt) = pt { let dist0 = pt.distance2(*p); let dist1 = pt.distance2(*q); let dist2 = p.distance2(*q); if dist2 < dist0 { *q = *pt; } else if dist2 < dist1 { *p = *pt; } } } }); match tuple { (Some(a), Some(b)) => Some((a, b)), _ => None, } } fn collision(poly0: &PolygonMesh, poly1: &PolygonMesh) -> Vec<(Point3, Point3)> { let tris0 = poly0.faces().triangle_iter().collect::<Vec<_>>(); let tris1 = poly1.faces().triangle_iter().collect::<Vec<_>>(); let iter0 = tris0.iter().map(|face| { [ poly0.positions()[face[0].pos], poly0.positions()[face[1].pos], poly0.positions()[face[2].pos], ] }); let iter1 = tris1.iter().map(|face| { [ poly1.positions()[face[0].pos], poly1.positions()[face[1].pos], poly1.positions()[face[2].pos], ] }); colliding_segment_pairs(sorted_endpoints(iter0, iter1)) .filter_map(|(idx0, idx1)| { let face0 = tris0[idx0]; let tri0 = [ poly0.positions()[face0[0].pos], poly0.positions()[face0[1].pos], poly0.positions()[face0[2].pos], ]; let face1 = tris1[idx1]; let tri1 = [ poly1.positions()[face1[0].pos], poly1.positions()[face1[1].pos], poly1.positions()[face1[2].pos], ]; if disjoint_bdbs(tri0, tri1) { None } else { collide_triangles(tri0, tri1) } }) .collect() } fn are_colliding(poly0: &PolygonMesh, poly1: &PolygonMesh) -> Option<(Point3, Point3)> { let tris0 = poly0.faces().triangle_iter().collect::<Vec<_>>(); let tris1 = poly1.faces().triangle_iter().collect::<Vec<_>>(); let iter0 = tris0.iter().map(|face| { [ poly0.positions()[face[0].pos], poly0.positions()[face[1].pos], poly0.positions()[face[2].pos], ] }); let iter1 = tris1.iter().map(|face| { [ poly1.positions()[face[0].pos], poly1.positions()[face[1].pos], poly1.positions()[face[2].pos], ] }); colliding_segment_pairs(sorted_endpoints(iter0, iter1)).find_map(|(idx0, idx1)| { let face0 = tris0[idx0]; let tri0 = [ poly0.positions()[face0[0].pos], poly0.positions()[face0[1].pos], poly0.positions()[face0[2].pos], ]; let face1 = tris1[idx1]; let tri1 = [ poly1.positions()[face1[0].pos], poly1.positions()[face1[1].pos], poly1.positions()[face1[2].pos], ]; if disjoint_bdbs(tri0, tri1) { None } else { collide_triangles(tri0, tri1) } }) } #[test] fn collide_triangles_test() { let tri0 = [ Point3::origin(), Point3::new(1.0, 0.0, 0.0), Point3::new(0.0, 1.0, 0.0), ]; let tri1 = [ Point3::new(0.0, 0.0, -1.0), Point3::new(-1.0, -1.0, 1.0), Point3::new(1.0, 1.0, 1.0), ]; assert!(collide_triangles(tri0, tri1).is_some()); let tri0 = [ Point3::new(0.0, 0.0, 0.0), Point3::new(1.0, 0.0, 0.0), Point3::new(0.0, 1.0, 0.0), ]; let tri1 = [ Point3::new(0.0, 0.0, 0.5), Point3::new(1.0, 0.0, 1.0), Point3::new(1.0, 1.0, 1.0), ]; assert!(collide_triangles(tri0, tri1).is_none()); }
collide_seg_triangle([tri1[0], tri1[1]], tri0), collide_seg_triangle([tri1[1], tri1[2]], tri0), collide_seg_triangle([tri1[2], tri1[0]], tri0), ] .iter() .for_
function_block-random_span
[ { "content": "fn tri_to_seg(tri: [Point3; 3], unit: Vector3, tol: f64) -> (f64, f64) {\n\n let a = tri[0].to_vec().dot(unit);\n\n let b = tri[1].to_vec().dot(unit);\n\n let c = tri[2].to_vec().dot(unit);\n\n (\n\n f64::min(f64::min(a, b), c) - tol,\n\n f64::max(f64::max(a, b), c) + tol,\n\n )\n\n}\n\n\n", "file_path": "truck-meshalgo/src/analyzers/point_cloud/sort_end_points.rs", "rank": 1, "score": 412358.87283872196 }, { "content": "fn sorted_endpoints<'a, I, J>(iter0: I, iter1: J, tol: f64) -> Vec<EndPoint>\n\nwhere\n\n I: IntoIterator<Item = [Point3; 3]>,\n\n J: IntoIterator<Item = &'a Point3>, {\n\n let unit = take_one_unit();\n\n let mut res: Vec<EndPoint> = iter0\n\n .into_iter()\n\n .enumerate()\n\n .filter(|(_, tri)| !(tri[1] - tri[0]).cross(tri[2] - tri[0]).so_small())\n\n .flat_map(|(i, tri)| EndPoint::from_seg(tri_to_seg(tri, unit, tol), i))\n\n .chain(\n\n iter1\n\n .into_iter()\n\n .enumerate()\n\n .map(|(i, point)| EndPoint::new(point.to_vec().dot(unit), EndPointType::Middle, i)),\n\n )\n\n .collect();\n\n res.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Greater));\n\n res\n\n}\n\n\n", "file_path": "truck-meshalgo/src/analyzers/point_cloud/sort_end_points.rs", "rank": 2, "score": 402039.7572518372 }, { "content": "/// Creates the curve division\n\n/// \n\n/// # Panics\n\n/// \n\n/// `tol` must be more than `TOLERANCE`.\n\npub fn parameter_division<C>(curve: &C, range: (f64, f64), tol: f64) -> (Vec<f64>, Vec<C::Point>)\n\nwhere\n\n C: ParametricCurve,\n\n C::Point: EuclideanSpace<Scalar = f64> + MetricSpace<Metric = f64>, {\n\n nonpositive_tolerance!(tol);\n\n sub_parameter_division(\n\n curve,\n\n range,\n\n (curve.subs(range.0), curve.subs(range.1)),\n\n tol,\n\n 100,\n\n )\n\n}\n\n\n", "file_path": "truck-geotrait/src/algo/curve.rs", "rank": 5, "score": 357038.39569826296 }, { "content": "/// Defines a tolerance in the whole package\n\npub trait Tolerance: AbsDiffEq<Epsilon = f64> + Debug {\n\n /// The \"distance\" is less than `TOLERANCE`.\n\n fn near(&self, other: &Self) -> bool { self.abs_diff_eq(other, TOLERANCE) }\n\n\n\n /// The \"distance\" is less than `TOLERANCR2`.\n\n fn near2(&self, other: &Self) -> bool { self.abs_diff_eq(other, TOLERANCE2) }\n\n}\n\n\n\nimpl<T: AbsDiffEq<Epsilon = f64> + Debug> Tolerance for T {}\n\n\n\n/// assert near\n\n#[macro_export]\n\nmacro_rules! assert_near {\n\n ($left: expr, $right: expr $(,)?) => {\n\n assert!($left.near(&$right), \"assertion failed: `left` is near `right`\n\nleft: {:?},\n\nright: {:?}\", $left, $right)\n\n };\n\n ($left: expr, $right: expr, $($arg: tt)+) => {\n\n assert!($left.near(&$right), \"assertion failed: `left` is near `right`\n\nleft: {:?},\n\nright: {:?}: {}\", $left, $right, format_args!($($arg)+))\n\n };\n\n}\n\n\n", "file_path": "truck-base/src/tolerance.rs", "rank": 6, "score": 348772.12717919715 }, { "content": "fn is_in_the_plane(positions: &[Point3], normals: &[Vector3], face: &[Vertex], tol2: f64) -> bool {\n\n let n = FaceNormal::new(positions, face, 0).normal;\n\n for v in face {\n\n if let Some(nor) = v.nor {\n\n if n.distance2(normals[nor]) < tol2 {\n\n return true;\n\n }\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "truck-meshalgo/src/analyzers/splitting.rs", "rank": 7, "score": 332887.1817374001 }, { "content": "/// Divides the domain into equal parts, examines all the values, and returns `t` such that `curve.subs(t)` is closest to `point`.\n\n/// This method is useful to get an efficient hint of `search_nearest_parameter`.\n\npub fn presearch<C>(curve: &C, point: C::Point, range: (f64, f64), division: usize) -> f64\n\nwhere\n\n C: ParametricCurve,\n\n C::Point: MetricSpace<Metric = f64> + Copy, {\n\n let (t0, t1) = range;\n\n let mut res = t0;\n\n let mut min = std::f64::INFINITY;\n\n for i in 0..=division {\n\n let p = i as f64 / division as f64;\n\n let t = t0 * (1.0 - p) + t1 * p;\n\n let dist = curve.subs(t).distance2(point);\n\n if dist < min {\n\n min = dist;\n\n res = t;\n\n }\n\n }\n\n res\n\n}\n\n\n", "file_path": "truck-geotrait/src/algo/curve.rs", "rank": 9, "score": 315385.60323388746 }, { "content": "/// Searches the parameter by Newton's method.\n\npub fn search_parameter<C>(curve: &C, point: C::Point, hint: f64, trials: usize) -> Option<f64>\n\nwhere\n\n C: ParametricCurve,\n\n C::Point: EuclideanSpace<Scalar = f64, Diff = C::Vector>,\n\n C::Vector: InnerSpace<Scalar = f64> + Tolerance, {\n\n search_nearest_parameter(curve, point, hint, trials).and_then(|t| {\n\n match point.to_vec().near(&curve.subs(t).to_vec()) {\n\n true => Some(t),\n\n false => None,\n\n }\n\n })\n\n}\n\n\n", "file_path": "truck-geotrait/src/algo/curve.rs", "rank": 11, "score": 299118.7753687851 }, { "content": "pub fn sphere(center: Point3, radius: f64, udiv: usize, vdiv: usize) -> PolygonMesh {\n\n let positions = (0..udiv)\n\n .flat_map(move |i| {\n\n (0..vdiv).map(move |j| {\n\n let u = 2.0 * PI * i as f64 / udiv as f64;\n\n let v = PI * j as f64 / (vdiv - 1) as f64;\n\n center + radius * Vector3::new(u.cos() * v.sin(), u.sin() * v.sin(), v.cos())\n\n })\n\n })\n\n .collect::<Vec<_>>();\n\n let faces = Faces::from_iter((0..udiv).flat_map(move |i| {\n\n (0..vdiv - 1).map(move |j| {\n\n [\n\n i * vdiv + j,\n\n i * vdiv + (j + 1) % vdiv,\n\n (i + 1) % udiv * vdiv + (j + 1) % vdiv,\n\n (i + 1) % udiv * vdiv + j,\n\n ]\n\n })\n\n }));\n\n PolygonMesh::new(\n\n StandardAttributes {\n\n positions,\n\n ..Default::default()\n\n },\n\n faces,\n\n )\n\n}\n", "file_path": "truck-meshalgo/tests/common/shapes.rs", "rank": 12, "score": 295389.52528110944 }, { "content": "fn sub_parameter_division<S>(surface: &S, (udiv, vdiv): (&mut Vec<f64>, &mut Vec<f64>), tol: f64)\n\nwhere\n\n S: ParametricSurface,\n\n S::Point: EuclideanSpace<Scalar = f64> + MetricSpace<Metric = f64>, {\n\n let mut divide_flag0 = vec![false; udiv.len() - 1];\n\n let mut divide_flag1 = vec![false; vdiv.len() - 1];\n\n\n\n for (u, ub) in udiv.windows(2).zip(&mut divide_flag0) {\n\n for (v, vb) in vdiv.windows(2).zip(&mut divide_flag1) {\n\n let p = 0.5 + (0.2 * rand::random::<f64>() - 0.1);\n\n let q = 0.5 + (0.2 * rand::random::<f64>() - 0.1);\n\n let pt00 = surface.subs(u[0], v[0]);\n\n let pt01 = surface.subs(u[0], v[1]);\n\n let pt10 = surface.subs(u[1], v[0]);\n\n let pt11 = surface.subs(u[1], v[1]);\n\n let pt = S::Point::from_vec(\n\n pt00.to_vec() * (1.0 - p) * (1.0 - q)\n\n + pt01.to_vec() * (1.0 - p) * q\n\n + pt10.to_vec() * p * (1.0 - q)\n\n + pt11.to_vec() * p * q,\n", "file_path": "truck-geotrait/src/algo/surface.rs", "rank": 13, "score": 287888.22590360534 }, { "content": "fn all_nor_mut(faces: &mut Faces) -> impl Iterator<Item = &mut usize> {\n\n faces\n\n .face_iter_mut()\n\n .flatten()\n\n .filter_map(move |v| v.nor.as_mut())\n\n}\n\n\n\nimpl OptimizingFilter for PolygonMesh {\n\n fn remove_unused_attrs(&mut self) -> &mut Self {\n\n let mut mesh = self.debug_editor();\n\n let PolygonMeshEditor {\n\n attributes:\n\n StandardAttributes {\n\n positions,\n\n uv_coords,\n\n normals,\n\n },\n\n faces,\n\n ..\n\n } = &mut mesh;\n", "file_path": "truck-meshalgo/src/filters/optimizing.rs", "rank": 14, "score": 286423.7198568573 }, { "content": "#[inline(always)]\n\npub fn bezier(vertex0: &Vertex, vertex1: &Vertex, mut inter_points: Vec<Point3>) -> Edge {\n\n let pt0 = vertex0.get_point();\n\n let pt1 = vertex1.get_point();\n\n let mut pre_ctrl_pts = vec![pt0];\n\n pre_ctrl_pts.append(&mut inter_points);\n\n pre_ctrl_pts.push(pt1);\n\n let ctrl_pts: Vec<_> = pre_ctrl_pts\n\n .into_iter()\n\n .map(|pt| pt.to_homogeneous())\n\n .collect();\n\n let knot_vec = KnotVec::bezier_knot(ctrl_pts.len() - 1);\n\n let curve = BSplineCurve::new(knot_vec, ctrl_pts);\n\n Edge::new(vertex0, vertex1, Curve::NURBSCurve(NURBSCurve::new(curve)))\n\n}\n\n\n\n/// Returns a homotopic face from `edge0` to `edge1`.\n\n/// # Examples\n\n/// ```\n\n/// use truck_modeling::*;\n\n///\n", "file_path": "truck-modeling/src/builder.rs", "rank": 15, "score": 286121.0796866962 }, { "content": "/// positive test implementation for `Cut` by random transformation\n\npub fn cut_random_test<C>(curve: &C, trials: usize)\n\nwhere\n\n C: Cut,\n\n C::Point: Debug + Tolerance,\n\n C::Vector: Debug + Tolerance, {\n\n (0..trials).for_each(move |_| exec_cut_random_test(curve))\n\n}\n\n\n", "file_path": "truck-geotrait/src/traits/curve.rs", "rank": 16, "score": 285862.165407158 }, { "content": "fn all_uv_mut(faces: &mut Faces) -> impl Iterator<Item = &mut usize> {\n\n faces\n\n .face_iter_mut()\n\n .flatten()\n\n .filter_map(move |v| v.uv.as_mut())\n\n}\n\n\n", "file_path": "truck-meshalgo/src/filters/optimizing.rs", "rank": 17, "score": 282539.295272664 }, { "content": "fn all_pos_mut(faces: &mut Faces) -> impl Iterator<Item = &mut usize> {\n\n faces.face_iter_mut().flatten().map(move |v| &mut v.pos)\n\n}\n\n\n", "file_path": "truck-meshalgo/src/filters/optimizing.rs", "rank": 18, "score": 282539.29527266405 }, { "content": "/// positive test implementation for `ParameterTransform` by random transformation\n\npub fn parameter_transform_random_test<C>(curve: &C, trials: usize)\n\nwhere\n\n C: ParameterTransform,\n\n C::Point: Debug + Tolerance,\n\n C::Vector: Debug + Tolerance + std::ops::Mul<f64, Output = C::Vector>, {\n\n (0..trials).for_each(move |_| exec_parameter_transform_random_test(curve))\n\n}\n\n\n", "file_path": "truck-geotrait/src/traits/curve.rs", "rank": 19, "score": 281878.1003223986 }, { "content": "pub fn construct_polylines(lines: &[(Point3, Point3)]) -> Vec<PolylineCurve<Point3>> {\n\n\tlet mut graph: Graph = lines.iter().collect();\n\n\tlet mut res = Vec::new();\n\n\twhile !graph.is_empty() {\n\n\t\tlet (mut idx, node) = graph.get_one();\n\n\t\tlet mut wire: VecDeque<_> = vec![node.coord].into();\n\n\t\twhile let Some((idx0, pt)) = graph.get_a_next_node(idx) {\n\n\t\t\tidx = idx0;\n\n\t\t\twire.push_back(pt);\n\n\t\t}\n\n\t\tlet mut idx = PointIndex::from(wire[0]);\n\n\t\twhile let Some((idx0, pt)) = graph.get_a_next_node(idx) {\n\n\t\t\tidx = idx0;\n\n\t\t\twire.push_front(pt);\n\n\t\t}\n\n\t\tres.push(PolylineCurve(wire.into()));\n\n\t}\n\n\tres\n\n}\n\n\n", "file_path": "truck-shapeops/src/polyline_construction/mod.rs", "rank": 20, "score": 276728.3029462459 }, { "content": "/// 3D parametric curve\n\npub trait ParametricCurve3D: ParametricCurve<Point = Point3, Vector = Vector3> {}\n\nimpl<C: ParametricCurve<Point = Point3, Vector = Vector3>> ParametricCurve3D for C {}\n\n\n", "file_path": "truck-geotrait/src/traits/curve.rs", "rank": 22, "score": 266619.7930132751 }, { "content": "/// 3D parametric surface\n\npub trait ParametricSurface3D: ParametricSurface<Point = Point3, Vector = Vector3> {\n\n /// Returns the normal vector at `(u, v)`.\n\n fn normal(&self, u: f64, v: f64) -> Vector3 {\n\n self.uder(u, v).cross(self.vder(u, v)).normalize()\n\n }\n\n}\n\n\n\nimpl<'a, S: ParametricSurface3D> ParametricSurface3D for &'a S {\n\n fn normal(&self, u: f64, v: f64) -> Vector3 { (*self).normal(u, v) }\n\n}\n\n\n", "file_path": "truck-geotrait/src/traits/surface.rs", "rank": 23, "score": 266619.7930132751 }, { "content": "pub fn random_texture(scene: &mut Scene) -> Vec<u8> {\n\n let plane = new_plane!(\"shaders/plane.wgsl\", \"vs_main\", \"random_texture\");\n\n render_one(scene, &plane)\n\n}\n\n\n", "file_path": "truck-rendimpl/tests/common.rs", "rank": 24, "score": 259709.26451303373 }, { "content": "pub fn gradation_texture(scene: &mut Scene) -> Vec<u8> {\n\n let plane = new_plane!(\"shaders/plane.wgsl\", \"vs_main\", \"gradation_texture\");\n\n render_one(scene, &plane)\n\n}\n\n\n", "file_path": "truck-rendimpl/tests/common.rs", "rank": 25, "score": 259709.26451303373 }, { "content": "pub fn create_loops_stores<C, S>(\n\n\tgeom_shell0: &Shell<Point3, C, S>,\n\n\tpoly_shell0: &Shell<Point3, PolylineCurve, PolygonMesh>,\n\n\tgeom_shell1: &Shell<Point3, C, S>,\n\n\tpoly_shell1: &Shell<Point3, PolylineCurve, PolygonMesh>,\n\n\ttol: f64,\n\n) -> Option<LoopsStoreQuadruple<C>>\n\nwhere\n\n\tC: SearchNearestParameter<Point = Point3, Parameter = f64>\n\n\t\t+ SearchParameter<Point = Point3, Parameter = f64>\n\n\t\t+ Cut<Point = Point3, Vector = Vector3>\n\n\t\t+ From<IntersectionCurve<PolylineCurve, S>>,\n\n\tS: ParametricSurface3D + SearchNearestParameter<Point = Point3, Parameter = (f64, f64)>,\n\n{\n\n\tlet mut geom_loops_store0: LoopsStore<_, _> = geom_shell0.face_iter().collect();\n\n\tlet mut poly_loops_store0: LoopsStore<_, _> = poly_shell0.face_iter().collect();\n\n\tlet mut geom_loops_store1: LoopsStore<_, _> = geom_shell1.face_iter().collect();\n\n\tlet mut poly_loops_store1: LoopsStore<_, _> = poly_shell1.face_iter().collect();\n\n\tlet store0_len = geom_loops_store0.len();\n\n\tlet store1_len = geom_loops_store1.len();\n", "file_path": "truck-shapeops/src/loops_store/mod.rs", "rank": 26, "score": 259404.94277360005 }, { "content": "pub fn nontex_answer_texture(scene: &mut Scene) -> Vec<u8> {\n\n let plane = new_plane!(\"shaders/plane.wgsl\", \"vs_main\", \"unicolor\");\n\n render_one(scene, &plane)\n\n}\n\n\n", "file_path": "truck-rendimpl/tests/common.rs", "rank": 27, "score": 256154.6911966709 }, { "content": "fn closed_polyline_orientation(pts: &[Point3]) -> bool {\n\n pts.windows(2).fold(0.0, |sum, pt| {\n\n sum + (pt[1][0] + pt[0][0]) * (pt[1][1] - pt[0][1])\n\n }) >= 0.0\n\n}\n\n\n\npub(super) fn attach_plane(mut pts: Vec<Point3>) -> Option<Plane> {\n\n let center = pts.iter().fold(Point3::origin(), |sum, pt| sum + pt.to_vec()) / pts.len() as f64;\n\n let normal = pts.windows(2).fold(Vector3::zero(), |sum, pt| {\n\n sum + (pt[0] - center).cross(pt[1] - center)\n\n });\n\n let n = match normal.so_small() {\n\n true => return None,\n\n false => normal.normalize(),\n\n };\n\n let a = match (n[2].abs() - 1.0).so_small() {\n\n true => Vector3::new(0.0, n[2], -n[1]).normalize(),\n\n false => Vector3::new(n[1], -n[0], 0.0).normalize(),\n\n };\n\n let mat: Matrix4 = Matrix3::from_cols(a, n.cross(a), n).into();\n", "file_path": "truck-modeling/src/geom_impls.rs", "rank": 28, "score": 252369.3243197179 }, { "content": "#[wasm_bindgen]\n\npub fn vertex(x: f64, y: f64, z: f64) -> Vertex { builder::vertex(Point3::new(x, y, z)).into() }\n\n/// Returns a line from `vertex0` to `vertex1`.\n", "file_path": "truck-js/src/builder.rs", "rank": 29, "score": 251339.94444090946 }, { "content": "fn add_weights(weights: &mut [f64], positions: &[Point3], face: &[Vertex]) {\n\n let area = (2..face.len()).fold(0.0, |sum, i| {\n\n let vec0 = positions[face[i - 1].pos] - positions[face[0].pos];\n\n let vec1 = positions[face[i].pos] - positions[face[0].pos];\n\n sum + (vec0.cross(vec1)).magnitude() / 2.0\n\n }) / (face.len() as f64);\n\n for v in face {\n\n weights[v.pos] += area;\n\n }\n\n}\n\n\n", "file_path": "truck-meshalgo/src/analyzers/splitting.rs", "rank": 30, "score": 248493.33038518523 }, { "content": "fn sphere(center: Point3, radius: f64, udiv: usize, vdiv: usize) -> PolygonMesh {\n\n let positions = (0..udiv)\n\n .flat_map(move |i| {\n\n (0..vdiv).map(move |j| {\n\n let u = 2.0 * PI * i as f64 / udiv as f64;\n\n let v = PI * j as f64 / (vdiv - 1) as f64;\n\n center + radius * Vector3::new(u.cos() * v.sin(), u.sin() * v.sin(), v.cos())\n\n })\n\n })\n\n .collect::<Vec<_>>();\n\n let normals = (0..udiv)\n\n .flat_map(move |i| {\n\n (0..vdiv).map(move |j| {\n\n let u = 2.0 * PI * i as f64 / udiv as f64;\n\n let v = PI * j as f64 / (vdiv - 1) as f64;\n\n Vector3::new(u.cos() * v.sin(), u.sin() * v.sin(), v.cos())\n\n })\n\n })\n\n .collect::<Vec<_>>();\n\n let faces = (0..udiv)\n", "file_path": "truck-rendimpl/examples/collision-sphere.rs", "rank": 31, "score": 245839.6253320934 }, { "content": "fn degenerate_triangle(tri: [Vertex; 3]) -> bool {\n\n tri[0].pos == tri[1].pos || tri[1].pos == tri[2].pos || tri[2].pos == tri[0].pos\n\n}\n\n\n", "file_path": "truck-meshalgo/src/filters/optimizing.rs", "rank": 33, "score": 241041.1738275726 }, { "content": "fn check_connectivity<T>(adjacency: &mut HashMap<T, Vec<T>>) -> bool\n\nwhere T: Eq + Clone + Hash {\n\n create_one_component(adjacency);\n\n adjacency.is_empty()\n\n}\n\n\n", "file_path": "truck-topology/src/shell.rs", "rank": 34, "score": 240700.06555599655 }, { "content": "/// Gathered the traits used in tessellation.\n\npub trait MeshableSurface: ParametricSurface3D + Invertible + ParameterDivision2D + SearchParameter<Point = Point3, Parameter = (f64, f64)> {}\n\n#[rustfmt::skip]\n\nimpl<S: ParametricSurface3D + Invertible + ParameterDivision2D + SearchParameter<Point = Point3, Parameter = (f64, f64)>> MeshableSurface for S {}\n\n\n", "file_path": "truck-meshalgo/src/tessellation/mod.rs", "rank": 35, "score": 238757.23883845223 }, { "content": "// https://iquilezles.org/www/articles/distfunctions/distfunctions.htm\n\nfn distance2_point_triangle(point: Point3, triangle: [Point3; 3]) -> f64 {\n\n let ab = triangle[1] - triangle[0];\n\n let ap = point - triangle[0];\n\n let bc = triangle[2] - triangle[1];\n\n let bp = point - triangle[1];\n\n let ca = triangle[0] - triangle[2];\n\n let cp = point - triangle[2];\n\n let nor = ab.cross(ca);\n\n\n\n let coef = f64::signum(ab.cross(nor).dot(ap))\n\n + f64::signum(bc.cross(nor).dot(bp))\n\n + f64::signum(ca.cross(nor).dot(cp));\n\n if coef < 2.0 || nor.magnitude().so_small() {\n\n let a = (ap - ab * f64::clamp(ab.dot(ap) / ab.dot(ab), 0.0, 1.0)).magnitude2();\n\n let b = (bp - bc * f64::clamp(bc.dot(bp) / bc.dot(bc), 0.0, 1.0)).magnitude2();\n\n let c = (cp - ca * f64::clamp(ca.dot(cp) / ca.dot(ca), 0.0, 1.0)).magnitude2();\n\n f64::min(f64::min(a, b), c)\n\n } else {\n\n nor.dot(ap) * nor.dot(ap) / nor.magnitude2()\n\n }\n\n}\n", "file_path": "truck-meshalgo/src/analyzers/point_cloud/mod.rs", "rank": 36, "score": 237575.75927732117 }, { "content": "fn create_independent_loop<P, C, D>(mut poly_curve0: C) -> Wire<P, D>\n\nwhere\n\n\tC: Cut<Point = P>,\n\n\tD: From<C>,\n\n{\n\n\tlet (t0, t1) = poly_curve0.parameter_range();\n\n\tlet t = (t0 + t1) / 2.0;\n\n\tlet poly_curve1 = poly_curve0.cut(t);\n\n\tlet v0 = Vertex::new(poly_curve0.front());\n\n\tlet v1 = Vertex::new(poly_curve1.front());\n\n\tlet edge0 = Edge::new(&v0, &v1, poly_curve0.into());\n\n\tlet edge1 = Edge::new(&v1, &v0, poly_curve1.into());\n\n\tvec![edge0, edge1].into()\n\n}\n\n\n\npub struct LoopsStoreQuadruple<C> {\n\n\tpub geom_loops_store0: LoopsStore<Point3, C>,\n\n\tpub poly_loops_store0: LoopsStore<Point3, PolylineCurve>,\n\n\tpub geom_loops_store1: LoopsStore<Point3, C>,\n\n\tpub poly_loops_store1: LoopsStore<Point3, PolylineCurve>,\n\n}\n\n\n", "file_path": "truck-shapeops/src/loops_store/mod.rs", "rank": 37, "score": 237211.2695679004 }, { "content": "#[inline(always)]\n\npub fn scaled<T: Mapped<Point3, Curve, Surface>>(elem: &T, origin: Point3, scalars: Vector3) -> T {\n\n let mat0 = Matrix4::from_translation(-origin.to_vec());\n\n let mat1 = Matrix4::from_nonuniform_scale(scalars[0], scalars[1], scalars[2]);\n\n let mat2 = Matrix4::from_translation(origin.to_vec());\n\n transformed(elem, mat2 * mat1 * mat0)\n\n}\n\n\n", "file_path": "truck-modeling/src/builder.rs", "rank": 38, "score": 235262.61574592796 }, { "content": "fn parabola_surfaces() -> (BSplineSurface<Point3>, BSplineSurface<Point3>) {\n\n\t// define surfaces\n\n\t#[rustfmt::skip]\n\n\tlet ctrl0 = vec![\n\n\t\tvec![Point3::new(-1.0, -1.0, 3.0), Point3::new(-1.0, 0.0, -1.0), Point3::new(-1.0, 1.0, 3.0)],\n\n\t\tvec![Point3::new(0.0, -1.0, -1.0), Point3::new(0.0, 0.0, -5.0), Point3::new(0.0, 1.0, -1.0)],\n\n\t\tvec![Point3::new(1.0, -1.0, 3.0), Point3::new(1.0, 0.0, -1.0), Point3::new(1.0, 1.0, 3.0)],\n\n\t];\n\n\t#[rustfmt::skip]\n\n\tlet ctrl1 = vec![\n\n\t\tvec![Point3::new(-1.0, -1.0, -3.0), Point3::new(-1.0, 0.0, 1.0), Point3::new(-1.0, 1.0, -3.0)],\n\n\t\tvec![Point3::new(0.0, -1.0, 1.0), Point3::new(0.0, 0.0, 5.0), Point3::new(0.0, 1.0, 1.0)],\n\n\t\tvec![Point3::new(1.0, -1.0, -3.0), Point3::new(1.0, 0.0, 1.0), Point3::new(1.0, 1.0, -3.0)],\n\n\t];\n\n\t(\n\n\t\tBSplineSurface::new((KnotVec::bezier_knot(2), KnotVec::bezier_knot(2)), ctrl0),\n\n\t\tBSplineSurface::new((KnotVec::bezier_knot(2), KnotVec::bezier_knot(2)), ctrl1),\n\n\t)\n\n}\n\n\n", "file_path": "truck-shapeops/src/loops_store/tests.rs", "rank": 39, "score": 234774.6857908434 }, { "content": "#[inline(always)]\n\npub fn rsweep<T: ClosedSweep<Point3, Curve, Surface>, R: Into<Rad<f64>>>(\n\n elem: &T,\n\n origin: Point3,\n\n axis: Vector3,\n\n angle: R,\n\n) -> T::Swept {\n\n let angle = angle.into();\n\n if angle.0.abs() < 2.0 * PI.0 {\n\n partial_rsweep(elem, origin, axis, angle)\n\n } else if angle.0 > 0.0 {\n\n whole_rsweep(elem, origin, axis)\n\n } else {\n\n whole_rsweep(elem, origin, -axis)\n\n }\n\n}\n\n\n", "file_path": "truck-modeling/src/builder.rs", "rank": 41, "score": 230932.70766645315 }, { "content": "/// Whether the surface includes the boundary curve.\n\npub trait IncludeCurve<C: ParametricCurve> {\n\n /// Returns whether the curve `curve` is included in the surface `self`.\n\n fn include(&self, curve: &C) -> bool;\n\n}\n\n\n", "file_path": "truck-geotrait/src/traits/surface.rs", "rank": 42, "score": 228191.21201915044 }, { "content": "#[doc(hidden)]\n\n#[inline(always)]\n\npub fn inv_or_zero(delta: f64) -> f64 {\n\n if delta.so_small() {\n\n 0.0\n\n } else {\n\n 1.0 / delta\n\n }\n\n}\n", "file_path": "truck-geometry/src/nurbs/mod.rs", "rank": 43, "score": 227964.722262607 }, { "content": "pub fn render_one<R: Rendered>(scene: &mut Scene, object: &R) -> Vec<u8> {\n\n scene.add_object(object);\n\n let res = pollster::block_on(scene.render_to_buffer());\n\n scene.remove_object(object);\n\n res\n\n}\n\n\n", "file_path": "truck-platform/tests/common.rs", "rank": 44, "score": 227962.4113968729 }, { "content": "pub fn render_one<R: Rendered>(scene: &mut Scene, object: &R) -> Vec<u8> {\n\n scene.add_object(object);\n\n let res = pollster::block_on(scene.render_to_buffer());\n\n scene.remove_object(object);\n\n res\n\n}\n\n\n", "file_path": "truck-rendimpl/tests/common.rs", "rank": 45, "score": 227962.4113968729 }, { "content": "#[inline(always)]\n\npub fn translated<T: Mapped<Point3, Curve, Surface>>(elem: &T, vector: Vector3) -> T {\n\n transformed(elem, Matrix4::from_translation(vector))\n\n}\n\n\n\n/// Returns a rotated vertex, edge, wire, face, shell or solid.\n", "file_path": "truck-modeling/src/builder.rs", "rank": 46, "score": 224416.60726559866 }, { "content": "#[inline(always)]\n\npub fn cone<R: Into<Rad<f64>>>(wire: &Wire, axis: Vector3, angle: R) -> Shell {\n\n let angle = angle.into();\n\n let closed = angle.0.abs() >= 2.0 * PI.0;\n\n let mut wire = wire.clone();\n\n if wire.is_empty() {\n\n return Shell::new();\n\n }\n\n let pt0 = wire.front_vertex().unwrap().get_point();\n\n let pt1 = wire.back_vertex().unwrap().get_point();\n\n let pt1_on_axis = (pt1 - pt0).cross(axis).so_small();\n\n if wire.len() == 1 && pt1_on_axis {\n\n let edge = wire.pop_back().unwrap();\n\n let v0 = edge.front().clone();\n\n let v2 = edge.back().clone();\n\n let mut curve = edge.get_curve();\n\n let (t0, t1) = curve.parameter_range();\n\n let t = (t0 + t1) * 0.5;\n\n let v1 = Vertex::new(curve.subs(t));\n\n let curve1 = curve.cut(t);\n\n wire.push_back(Edge::debug_new(&v0, &v1, curve));\n", "file_path": "truck-modeling/src/builder.rs", "rank": 47, "score": 221109.08371770888 }, { "content": "fn take_one_unit() -> Vector3 {\n\n loop {\n\n let normal = Vector3::new(\n\n 2.0 * rand::random::<f64>() - 1.0,\n\n 2.0 * rand::random::<f64>() - 1.0,\n\n 2.0 * rand::random::<f64>() - 1.0,\n\n );\n\n if !normal.so_small() {\n\n return normal.normalize();\n\n }\n\n }\n\n}\n\n\n", "file_path": "truck-meshalgo/src/analyzers/point_cloud/sort_end_points.rs", "rank": 48, "score": 221053.80742096796 }, { "content": "/// Searches the nearest parameter by Newton's method.\n\npub fn search_nearest_parameter<C>(\n\n curve: &C,\n\n point: C::Point,\n\n mut hint: f64,\n\n trials: usize,\n\n) -> Option<f64>\n\nwhere\n\n C: ParametricCurve,\n\n C::Point: EuclideanSpace<Scalar = f64, Diff = C::Vector>,\n\n C::Vector: InnerSpace<Scalar = f64> + Tolerance,\n\n{\n\n #[cfg(all(test, debug_assertions))]\n\n let mut log = Vec::new();\n\n for _ in 0..=trials {\n\n #[cfg(all(test, debug_assertions))]\n\n log.push(hint);\n\n let pt = curve.subs(hint);\n\n let der = curve.der(hint);\n\n let der2 = curve.der2(hint);\n\n let f = der.dot(pt - point);\n", "file_path": "truck-geotrait/src/algo/curve.rs", "rank": 49, "score": 220012.55651825716 }, { "content": "/// Sweeps a vertex, an edge, a wire, a face, or a shell by a vector.\n\n/// # Examples\n\n/// ```\n\n/// use truck_modeling::*;\n\n/// let vertex: Vertex = builder::vertex(Point3::new(0.0, 0.0, 0.0));\n\n/// let line: Edge = builder::tsweep(&vertex, Vector3::unit_x());\n\n/// let square: Face = builder::tsweep(&line, Vector3::unit_y());\n\n/// let cube: Solid = builder::tsweep(&square, Vector3::unit_z());\n\n/// #\n\n/// # let b_shell = &cube.boundaries()[0];\n\n/// # assert_eq!(b_shell.len(), 6); // This solid is a cube!\n\n/// # assert!(cube.is_geometric_consistent());\n\n/// #\n\n/// # let b_loop = &b_shell[0].boundaries()[0];\n\n/// # let mut loop_iter = b_loop.vertex_iter();\n\n/// # assert_eq!(loop_iter.next().unwrap().get_point(), Point3::new(0.0, 0.0, 0.0));\n\n/// # assert_eq!(loop_iter.next().unwrap().get_point(), Point3::new(0.0, 1.0, 0.0));\n\n/// # assert_eq!(loop_iter.next().unwrap().get_point(), Point3::new(1.0, 1.0, 0.0));\n\n/// # assert_eq!(loop_iter.next().unwrap().get_point(), Point3::new(1.0, 0.0, 0.0));\n\n/// # assert_eq!(loop_iter.next(), None);\n\n/// #\n\n/// # let b_loop = &b_shell[3].boundaries()[0];\n\n/// # let mut loop_iter = b_loop.vertex_iter();\n\n/// # assert_eq!(loop_iter.next().unwrap().get_point(), Point3::new(1.0, 1.0, 0.0));\n\n/// # assert_eq!(loop_iter.next().unwrap().get_point(), Point3::new(0.0, 1.0, 0.0));\n\n/// # assert_eq!(loop_iter.next().unwrap().get_point(), Point3::new(0.0, 1.0, 1.0));\n\n/// # assert_eq!(loop_iter.next().unwrap().get_point(), Point3::new(1.0, 1.0, 1.0));\n\n/// # assert_eq!(loop_iter.next(), None);\n\n/// #\n\n/// # let b_loop = &b_shell[5].boundaries()[0];\n\n/// # let mut loop_iter = b_loop.vertex_iter();\n\n/// # assert_eq!(loop_iter.next().unwrap().get_point(), Point3::new(0.0, 0.0, 1.0));\n\n/// # assert_eq!(loop_iter.next().unwrap().get_point(), Point3::new(1.0, 0.0, 1.0));\n\n/// # assert_eq!(loop_iter.next().unwrap().get_point(), Point3::new(1.0, 1.0, 1.0));\n\n/// # assert_eq!(loop_iter.next().unwrap().get_point(), Point3::new(0.0, 1.0, 1.0));\n\n/// # assert_eq!(loop_iter.next(), None);\n\n/// ```\n\npub fn tsweep<T: Sweep<Point3, Curve, Surface>>(elem: &T, vector: Vector3) -> T::Swept {\n\n let trsl = Matrix4::from_translation(vector);\n\n elem.sweep(\n\n &move |pt| trsl.transform_point(*pt),\n\n &move |curve| curve.transformed(trsl),\n\n &move |surface| surface.transformed(trsl),\n\n &move |pt0, pt1| Curve::BSplineCurve(geom_impls::line(*pt0, *pt1)),\n\n &move |curve0, curve1| {\n\n Surface::NURBSSurface(NURBSSurface::new(BSplineSurface::homotopy(\n\n curve0.clone().lift_up(),\n\n curve1.clone().lift_up(),\n\n )))\n\n },\n\n )\n\n}\n\n\n\n/// Sweeps a vertex, an edge, a wire, a face, or a shell by the rotation.\n\n/// # Details\n\n/// If the absolute value of `angle` is more than 2π rad, then the result is closed shape.\n\n/// For example, the result of sweeping a disk is a bent cylinder if `angle` is less than 2π rad\n", "file_path": "truck-modeling/src/builder.rs", "rank": 50, "score": 218140.7885206301 }, { "content": "#[inline(always)]\n\nfn calc_score(edge0: Vector3, edge1: Vector3, edge2: Vector3, edge3: Vector3) -> f64 {\n\n edge0.cos_angle(edge1).abs()\n\n + edge1.cos_angle(edge2).abs()\n\n + edge2.cos_angle(edge3).abs()\n\n + edge3.cos_angle(edge0).abs()\n\n}\n\n\n", "file_path": "truck-meshalgo/src/filters/structuring.rs", "rank": 51, "score": 217917.83433707117 }, { "content": "fn get_angle(positions: &[Point3], face: &[Vertex], idx0: usize, idx1: usize, idx2: usize) -> f64 {\n\n let vec0 = positions[face[idx1].pos] - positions[face[idx0].pos];\n\n let vec1 = positions[face[idx2].pos] - positions[face[idx0].pos];\n\n vec0.angle(vec1).0\n\n}\n\n\n", "file_path": "truck-meshalgo/src/analyzers/splitting.rs", "rank": 52, "score": 217445.58472173862 }, { "content": " /// Abstract sweeping, builds a circle-arc, a prism, a half torus, and so on.\n\n pub trait Sweep<P, C, S> {\n\n /// The struct of sweeped topology.\n\n type Swept;\n\n /// Transform topologies and connect vertices and edges in boundaries.\n\n fn sweep<\n\n FP: Fn(&P) -> P,\n\n FC: Fn(&C) -> C,\n\n FS: Fn(&S) -> S,\n\n CP: Fn(&P, &P) -> C,\n\n CE: Fn(&C, &C) -> S,\n\n >(\n\n &self,\n\n point_mapping: &FP,\n\n curve_mapping: &FC,\n\n surface_mapping: &FS,\n\n connect_points: &CP,\n\n connect_curve: &CE,\n\n ) -> Self::Swept;\n\n }\n\n\n", "file_path": "truck-modeling/src/lib.rs", "rank": 53, "score": 216184.59493744242 }, { "content": "fn write2vec<V: std::ops::Index<usize, Output = f64>, W: Write>(\n\n writer: &mut BufWriter<W>,\n\n vecs: &[V],\n\n prefix: &str,\n\n) -> Result<()> {\n\n for vec in vecs {\n\n writer.write_fmt(format_args!(\"{} {:.10e} {:.10e}\\n\", prefix, vec[0], vec[1]))?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "truck-polymesh/src/obj.rs", "rank": 54, "score": 216041.6472618644 }, { "content": "fn write3vec<V: std::ops::Index<usize, Output = f64>, W: Write>(\n\n writer: &mut BufWriter<W>,\n\n vecs: &[V],\n\n prefix: &str,\n\n) -> Result<()> {\n\n for vec in vecs {\n\n writer.write_fmt(format_args!(\n\n \"{} {:.10e} {:.10e} {:.10e}\\n\",\n\n prefix, vec[0], vec[1], vec[2]\n\n ))?;\n\n }\n\n Ok(())\n\n}\n\n\n\nimpl Vertex {\n\n fn write<W: Write>(&self, writer: &mut W) -> std::io::Result<()> {\n\n match (self.uv, self.nor) {\n\n (None, None) => writer.write_fmt(format_args!(\"{}\", self.pos + 1)),\n\n (Some(uv), None) => writer.write_fmt(format_args!(\"{}/{}\", self.pos + 1, uv + 1)),\n\n (None, Some(nor)) => writer.write_fmt(format_args!(\"{}//{}\", self.pos + 1, nor + 1)),\n", "file_path": "truck-polymesh/src/obj.rs", "rank": 55, "score": 216041.6472618644 }, { "content": " /// Abstract multi sweeping, builds a circle-arc, a prism, a half torus, and so on.\n\n pub trait MultiSweep<P, C, S> {\n\n /// The struct of sweeped topology.\n\n type Swept;\n\n /// Transform topologies and connect vertices and edges in boundaries.\n\n fn multi_sweep<\n\n FP: Fn(&P) -> P,\n\n FC: Fn(&C) -> C,\n\n FS: Fn(&S) -> S,\n\n CP: Fn(&P, &P) -> C,\n\n CE: Fn(&C, &C) -> S,\n\n >(\n\n &self,\n\n point_mapping: &FP,\n\n curve_mapping: &FC,\n\n surface_mapping: &FS,\n\n connect_points: &CP,\n\n connect_curve: &CE,\n\n division: usize,\n\n ) -> Self::Swept;\n\n }\n\n\n", "file_path": "truck-modeling/src/lib.rs", "rank": 56, "score": 213092.17852365787 }, { "content": "fn create_mesh<S>(surface: &S, div0: Vec<f64>, div1: Vec<f64>) -> StructuredMesh\n\nwhere S: ParametricSurface3D {\n\n let mut positions = vec![Vec::with_capacity(div1.len()); div0.len()];\n\n let mut normals = vec![Vec::with_capacity(div1.len()); div0.len()];\n\n div0.iter()\n\n .zip(positions.iter_mut().zip(normals.iter_mut()))\n\n .for_each(|(u, (prow, nrow))| {\n\n div1.iter().for_each(move |v| {\n\n prow.push(surface.subs(*u, *v));\n\n nrow.push(surface.normal(*u, *v));\n\n })\n\n });\n\n StructuredMesh {\n\n positions,\n\n uv_division: Some((div0, div1)),\n\n normals: Some(normals),\n\n }\n\n}\n", "file_path": "truck-polymesh/src/meshing_shape.rs", "rank": 57, "score": 213091.3888850098 }, { "content": "/// divide the graph to the connected components.\n\n/// # Arguments\n\n/// * adjacency - the adjacency matrix\n\n/// # Return\n\n/// * the list of the indices of faces contained in each components\n\nfn get_components(adjacency: &[Vec<usize>]) -> Vec<Vec<usize>> {\n\n let mut unchecked = vec![true; adjacency.len()];\n\n let mut components = Vec::new();\n\n loop {\n\n let first = match unchecked.iter().position(|x| *x) {\n\n Some(idx) => idx,\n\n None => return components,\n\n };\n\n let mut stack = vec![first];\n\n let mut component = vec![first];\n\n unchecked[first] = false;\n\n while !stack.is_empty() {\n\n let cursor = stack.pop().unwrap();\n\n for i in &adjacency[cursor] {\n\n if unchecked[*i] {\n\n unchecked[*i] = false;\n\n component.push(*i);\n\n stack.push(*i);\n\n }\n\n }\n\n }\n\n components.push(component);\n\n }\n\n}\n\n\n", "file_path": "truck-meshalgo/src/analyzers/splitting.rs", "rank": 58, "score": 212076.92395617432 }, { "content": "pub fn divide_faces<C, S>(\n\n\tshell: &Shell<Point3, C, S>,\n\n\tloops_store: &LoopsStore<Point3, C>,\n\n\ttol: f64,\n\n) -> Option<FacesClassification<Point3, C, S>>\n\nwhere\n\n\tC: ParametricCurve<Point = Point3> + ParameterDivision1D<Point = Point3>,\n\n\tS: Clone + SearchParameter<Point = Point3, Parameter = (f64, f64)>,\n\n{\n\n\tlet mut res = FacesClassification::<Point3, C, S>::default();\n\n\tshell\n\n\t\t.iter()\n\n\t\t.zip(loops_store)\n\n\t\t.try_for_each(|(face, loops)| {\n\n\t\t\tif loops\n\n\t\t\t\t.iter()\n\n\t\t\t\t.all(|wire| wire.status() == ShapesOpStatus::Unknown)\n\n\t\t\t{\n\n\t\t\t\tres.push(face.clone(), ShapesOpStatus::Unknown);\n\n\t\t\t} else {\n", "file_path": "truck-shapeops/src/divide_face/mod.rs", "rank": 59, "score": 210830.0975691831 }, { "content": "/// Filters for adding normals\n\npub trait NormalFilters {\n\n /// Normalize all normals and assign `None` to the `nor` index of the vertices\n\n /// that has irregular normals.\n\n /// # Examples\n\n /// ```\n\n /// use std::iter::FromIterator;\n\n /// use truck_polymesh::*;\n\n /// use truck_meshalgo::filters::*;\n\n ///\n\n /// // Morbid data only for testing\n\n /// let mut mesh = PolygonMesh::new(\n\n /// StandardAttributes {\n\n /// positions: vec![Point3::new(0.0, 0.0, 0.0)],\n\n /// normals: vec![\n\n /// Vector3::new(100.0, 20.0, 56.0),\n\n /// Vector3::new(1.0e-12, 3.536e10, std::f64::NAN),\n\n /// Vector3::new(0.0, 1.0, 0.0),\n\n /// ],\n\n /// ..Default::default()\n\n /// },\n", "file_path": "truck-meshalgo/src/filters/normal_filters.rs", "rank": 60, "score": 210660.17192297516 }, { "content": " /// Mapping, duplicates and moves a topological element.\n\n pub trait Mapped<P, C, S>: Sized {\n\n /// Returns a new topology whose points are mapped by `point_closure`,\n\n /// curves are mapped by `curve_closure`,\n\n /// and surfaces are mapped by `surface_closure`.\n\n #[doc(hidden)]\n\n fn mapped<FP: Fn(&P) -> P, FC: Fn(&C) -> C, FS: Fn(&S) -> S>(\n\n &self,\n\n point_mapping: &FP,\n\n curve_mapping: &FC,\n\n surface_mapping: &FS,\n\n ) -> Self;\n\n\n\n /// Returns another topology whose points, curves, and surfaces are cloned.\n\n fn topological_clone(&self) -> Self\n\n where\n\n P: Clone,\n\n C: Clone,\n\n S: Clone, {\n\n self.mapped(&Clone::clone, &Clone::clone, &Clone::clone)\n\n }\n\n }\n\n\n", "file_path": "truck-modeling/src/lib.rs", "rank": 61, "score": 207980.48779853433 }, { "content": " /// closed sweep, builds a closed torus, and so on.\n\n pub trait ClosedSweep<P, C, S>: MultiSweep<P, C, S> {\n\n /// Transform topologies and connect vertices and edges in boundaries.\n\n fn closed_sweep<\n\n FP: Fn(&P) -> P,\n\n FC: Fn(&C) -> C,\n\n FS: Fn(&S) -> S,\n\n CP: Fn(&P, &P) -> C,\n\n CE: Fn(&C, &C) -> S,\n\n >(\n\n &self,\n\n point_mapping: &FP,\n\n curve_mapping: &FC,\n\n surface_mapping: &FS,\n\n connect_points: &CP,\n\n connect_curves: &CE,\n\n division: usize,\n\n ) -> Self::Swept;\n\n }\n\n}\n\npub use topo_traits::*;\n", "file_path": "truck-modeling/src/lib.rs", "rank": 62, "score": 205550.57234712713 }, { "content": "/// By implementing `IntoSTLIterator` for a type, you define how it will be converted to an iterator.\n\n/// This is common for types which describe a collection of some kind.\n\npub trait IntoSTLIterator {\n\n /// Which kind of iterator are we turning this into?\n\n type IntoIter: ExactSizeIterator<Item = STLFace>;\n\n /// Creates an iterator from a value.\n\n fn into_iter(self) -> Self::IntoIter;\n\n}\n\n\n\n/// STL face generate from `PolygonMesh`\n\n#[derive(Debug)]\n\npub struct PolygonMeshSTLFaceIterator<'a> {\n\n positions: &'a Vec<Point3>,\n\n faces: faces::TriangleIterator<'a, Vertex>,\n\n len: usize,\n\n}\n\n\n\nimpl<'a> Iterator for PolygonMeshSTLFaceIterator<'a> {\n\n type Item = STLFace;\n\n fn next(&mut self) -> Option<STLFace> {\n\n self.faces.next().map(|face| {\n\n let p = [\n", "file_path": "truck-polymesh/src/stl.rs", "rank": 63, "score": 205413.26074307284 }, { "content": "fn create_components<T>(adjacency: &mut HashMap<T, Vec<T>>) -> Vec<Vec<T>>\n\nwhere T: Eq + Clone + Hash {\n\n let mut res = Vec::new();\n\n loop {\n\n let component = create_one_component(adjacency);\n\n match component.is_empty() {\n\n true => break,\n\n false => res.push(component),\n\n }\n\n }\n\n res\n\n}\n\n\n", "file_path": "truck-topology/src/shell.rs", "rank": 64, "score": 204657.0715908903 }, { "content": "#[wasm_bindgen]\n\npub fn rotated(shape: &AbstractShape, origin: &[f64], axis: &[f64], angle: f64) -> AbstractShape {\n\n intopt!(Point3, origin, Vector3, axis);\n\n derive_all_shape!(shape, builder::rotated, (origin, axis, Rad(angle)))\n\n}\n\n\n\n/// Returns a scaled vertex, edge, wire, face, shell or solid.\n", "file_path": "truck-js/src/builder.rs", "rank": 65, "score": 203593.2338384954 }, { "content": "#[wasm_bindgen]\n\npub fn rsweep(shape: &AbstractShape, origin: &[f64], axis: &[f64], angle: f64) -> AbstractShape {\n\n intopt!(Point3, origin, Vector3, axis);\n\n derive_all_sweepable!(shape, builder::rsweep, (origin, axis, Rad(angle)))\n\n}\n", "file_path": "truck-js/src/builder.rs", "rank": 66, "score": 203593.2338384954 }, { "content": "fn sub_connect_wires<P: Clone, C: Clone, S: Clone, CP: Fn(&P, &P) -> C, CC: Fn(&C, &C) -> S>(\n\n edge0: &Edge<P, C>,\n\n edge1: &Edge<P, C>,\n\n connect_points: &CP,\n\n connect_curves: &CC,\n\n vemap: &mut HashMap<VertexID<P>, Edge<P, C>>,\n\n) -> Face<P, C, S> {\n\n let edge2 = vemap\n\n .entry(edge0.front().id())\n\n .or_insert_with(|| connect_vertices(edge0.front(), edge1.front(), connect_points))\n\n .clone();\n\n let edge3 = vemap\n\n .entry(edge0.back().id())\n\n .or_insert_with(|| connect_vertices(edge0.back(), edge1.back(), connect_points))\n\n .clone();\n\n let ori = edge0.orientation();\n\n let wire = match ori {\n\n true => Wire::from(vec![edge0.clone(), edge3, edge1.inverse(), edge2.inverse()]),\n\n false => Wire::from(vec![edge2, edge1.clone(), edge3.inverse(), edge0.inverse()]),\n\n };\n", "file_path": "truck-modeling/src/topo_impls.rs", "rank": 67, "score": 202641.29639701644 }, { "content": "fn circum_center(pt0: Point3, pt1: Point3, pt2: Point3) -> Point3 {\n\n let vec0 = pt1 - pt0;\n\n let vec1 = pt2 - pt0;\n\n let a2 = vec0.dot(vec0);\n\n let ab = vec0.dot(vec1);\n\n let b2 = vec1.dot(vec1);\n\n let det = a2 * b2 - ab * ab;\n\n let u = (b2 * a2 - ab * b2) / (2.0 * det);\n\n let v = (-ab * a2 + b2 * a2) / (2.0 * det);\n\n pt0 + u * vec0 + v * vec1\n\n}\n\n\n\npub(super) fn circle_arc(\n\n point: Vector4,\n\n origin: Point3,\n\n axis: Vector3,\n\n angle: Rad<f64>,\n\n) -> BSplineCurve<Vector4> {\n\n let tmp = Point3::from_homogeneous(point);\n\n let origin = origin + (axis.dot(tmp - origin)) * axis;\n", "file_path": "truck-modeling/src/geom_impls.rs", "rank": 68, "score": 202165.20769701694 }, { "content": "fn tex_raytracing(scene: &mut Scene) -> Vec<u8> {\n\n let shader = include_str!(\"../src/shaders/microfacet-module.wgsl\").to_string()\n\n + include_str!(\"shaders/raytraces.wgsl\");\n\n let plane = Plane {\n\n shader: &shader,\n\n vs_entpt: \"vs_main\",\n\n fs_entpt: \"tex_raytracing\",\n\n id: RenderID::gen(),\n\n };\n\n common::render_one(scene, &plane)\n\n}\n\n\n", "file_path": "truck-rendimpl/tests/render.rs", "rank": 69, "score": 201704.79845956014 }, { "content": "fn nontex_raytracing(scene: &mut Scene) -> Vec<u8> {\n\n let mut shader = include_str!(\"../src/shaders/microfacet-module.wgsl\").to_string();\n\n shader += include_str!(\"shaders/raytraces.wgsl\");\n\n let plane = Plane {\n\n shader: &shader,\n\n vs_entpt: \"vs_main\",\n\n fs_entpt: \"nontex_raytracing\",\n\n id: RenderID::gen(),\n\n };\n\n common::render_one(scene, &plane)\n\n}\n\n\n", "file_path": "truck-rendimpl/tests/render.rs", "rank": 70, "score": 201704.79845956014 }, { "content": "#[wasm_bindgen]\n\npub fn scaled(shape: &AbstractShape, origin: &[f64], scalars: &[f64]) -> AbstractShape {\n\n intopt!(Point3, origin);\n\n if scalars.len() == 1 {\n\n let s = Vector3::new(scalars[0], scalars[0], scalars[0]);\n\n derive_all_shape!(shape, builder::scaled, (origin, s))\n\n } else if scalars.len() == 3 {\n\n let s = Vector3::new(scalars[0], scalars[1], scalars[2]);\n\n derive_all_shape!(shape, builder::scaled, (origin, s))\n\n } else {\n\n panic!(\"The length of scalars is not 1 or 3.\");\n\n }\n\n}\n\n\n\nmacro_rules! derive_all_sweepable{\n\n ($shape: expr, $function: expr, ($($arg: expr),*)) => {\n\n transform_if_chain!(\n\n $shape,\n\n $function,\n\n ($($arg),*),\n\n panic!(\"sweep is only implemented to Vertex, Edge, Wire and Face.\"),\n\n as_vertex,\n\n as_edge,\n\n as_wire,\n\n as_face\n\n )\n\n };\n\n}\n\n\n\n/// Sweeps a vertex, an edge, a wire, a face, or a shell by a vector.\n", "file_path": "truck-js/src/builder.rs", "rank": 71, "score": 201502.57310991068 }, { "content": "fn sub_remove_unused_attrs<'a, I: Iterator<Item = &'a mut usize>>(\n\n iter: I,\n\n old_len: usize,\n\n) -> Vec<usize> {\n\n let mut new2old = Vec::new();\n\n let mut old2new = vec![None; old_len];\n\n for idx in iter {\n\n *idx = match old2new[*idx] {\n\n Some(k) => k,\n\n None => {\n\n let k = new2old.len();\n\n new2old.push(*idx);\n\n old2new[*idx] = Some(k);\n\n k\n\n }\n\n };\n\n }\n\n new2old\n\n}\n\n\n", "file_path": "truck-meshalgo/src/filters/optimizing.rs", "rank": 72, "score": 200178.2877551019 }, { "content": "fn exec_cut_random_test<C>(curve: &C)\n\nwhere\n\n C: Cut,\n\n C::Point: Debug + Tolerance,\n\n C::Vector: Debug + Tolerance, {\n\n let mut part0 = curve.clone();\n\n let (t0, t1) = curve.parameter_range();\n\n let p = rand::random::<f64>();\n\n let t = t0 * (1.0 - p) + t1 * p;\n\n let part1 = part0.cut(t);\n\n assert_near!(part0.parameter_range().0, t0);\n\n assert_near!(part0.parameter_range().1, t);\n\n assert_near!(part1.parameter_range().0, t);\n\n assert_near!(part1.parameter_range().1, t1);\n\n\n\n let p = rand::random::<f64>();\n\n let s = t0 * (1.0 - p) + t * p;\n\n assert_near!(part0.subs(s), curve.subs(s));\n\n assert_near!(part0.der(s), curve.der(s));\n\n assert_near!(part0.der2(s), curve.der2(s));\n", "file_path": "truck-geotrait/src/traits/curve.rs", "rank": 73, "score": 199296.53231365862 }, { "content": "fn random_normal_3d() -> Vector3 {\n\n\tlet mut x = rand::random::<f64>();\n\n\tif x.so_small() {\n\n\t\tx = rand::random::<f64>();\n\n\t}\n\n\tlet mut y = rand::random::<f64>();\n\n\tif y.so_small() {\n\n\t\ty = rand::random::<f64>();\n\n\t}\n\n\tlet mut z = rand::random::<f64>();\n\n\tif z.so_small() {\n\n\t\tz = rand::random::<f64>();\n\n\t}\n\n\tlet mut w = rand::random::<f64>();\n\n\tif w.so_small() {\n\n\t\tw = rand::random::<f64>();\n\n\t}\n\n\tVector3::new(\n\n\t\tf64::sqrt(-2.0 * f64::ln(x)) * f64::cos(2.0 * PI * y),\n\n\t\tf64::sqrt(-2.0 * f64::ln(x)) * f64::sin(2.0 * PI * y),\n\n\t\tf64::sqrt(-2.0 * f64::ln(z)) * f64::cos(2.0 * PI * w),\n\n\t)\n\n}\n\n\n", "file_path": "truck-meshalgo/src/analyzers/in_out_judge.rs", "rank": 74, "score": 197978.81290412534 }, { "content": "fn random_normal_3d() -> Vector3 {\n\n\tlet mut x = rand::random::<f64>();\n\n\tif x.so_small() {\n\n\t\tx = rand::random::<f64>();\n\n\t}\n\n\tlet mut y = rand::random::<f64>();\n\n\tif y.so_small() {\n\n\t\ty = rand::random::<f64>();\n\n\t}\n\n\tlet mut z = rand::random::<f64>();\n\n\tif z.so_small() {\n\n\t\tz = rand::random::<f64>();\n\n\t}\n\n\tlet mut w = rand::random::<f64>();\n\n\tif w.so_small() {\n\n\t\tw = rand::random::<f64>();\n\n\t}\n\n\tVector3::new(\n\n\t\tf64::sqrt(-2.0 * f64::ln(x)) * f64::cos(2.0 * PI * y),\n\n\t\tf64::sqrt(-2.0 * f64::ln(x)) * f64::sin(2.0 * PI * y),\n\n\t\tf64::sqrt(-2.0 * f64::ln(z)) * f64::cos(2.0 * PI * w),\n\n\t)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests;\n", "file_path": "truck-shapeops/src/integrate/mod.rs", "rank": 75, "score": 197978.81290412534 }, { "content": "fn exec_parameter_transform_random_test<C>(curve: &C)\n\nwhere\n\n C: ParameterTransform,\n\n C::Point: Debug + Tolerance,\n\n C::Vector: Debug + Tolerance + std::ops::Mul<f64, Output = C::Vector>, {\n\n let a = rand::random::<f64>() + 0.5;\n\n let b = rand::random::<f64>() * 2.0;\n\n let transformed = curve.parameter_transformed(a, b);\n\n\n\n let (t0, t1) = curve.parameter_range();\n\n assert_near!(transformed.parameter_range().0, t0 * a + b);\n\n assert_near!(transformed.parameter_range().1, t1 * a + b);\n\n let p = rand::random::<f64>();\n\n let t = (1.0 - p) * t0 + p * t1;\n\n assert_near!(transformed.subs(t * a + b), curve.subs(t));\n\n assert_near!(transformed.der(t * a + b) * a, curve.der(t));\n\n assert_near!(transformed.der2(t * a + b) * a * a, curve.der2(t));\n\n assert_near!(transformed.front(), curve.front());\n\n assert_near!(transformed.back(), curve.back());\n\n}\n\n\n", "file_path": "truck-geotrait/src/traits/curve.rs", "rank": 76, "score": 196743.86676315876 }, { "content": "fn signup_vector(vector: [f32; 3], map: &mut HashMap<[i64; 3], usize>) -> usize {\n\n let vector = [\n\n ((vector[0] as f64 + TOLERANCE * 0.25) / (TOLERANCE * 0.5)) as i64,\n\n ((vector[1] as f64 + TOLERANCE * 0.25) / (TOLERANCE * 0.5)) as i64,\n\n ((vector[2] as f64 + TOLERANCE * 0.25) / (TOLERANCE * 0.5)) as i64,\n\n ];\n\n let len = map.len();\n\n *map.entry(vector).or_insert_with(|| len)\n\n}\n\n\n\nimpl std::iter::FromIterator<STLFace> for PolygonMesh {\n\n fn from_iter<I: IntoIterator<Item = STLFace>>(iter: I) -> PolygonMesh {\n\n let mut positions = HashMap::<[i64; 3], usize>::default();\n\n let mut normals = HashMap::<[i64; 3], usize>::default();\n\n let faces: Vec<[Vertex; 3]> = iter\n\n .into_iter()\n\n .map(|face| {\n\n let n = signup_vector(face.normal, &mut normals);\n\n let p = [\n\n signup_vector(face.vertices[0], &mut positions),\n", "file_path": "truck-polymesh/src/stl.rs", "rank": 77, "score": 196680.49602667446 }, { "content": "pub fn same_buffer(vec0: &[u8], vec1: &[u8]) -> bool {\n\n vec0.par_iter()\n\n .zip(vec1)\n\n .all(move |(i, j)| std::cmp::max(i, j) - std::cmp::min(i, j) < 3)\n\n}\n\n\n", "file_path": "truck-platform/tests/common.rs", "rank": 78, "score": 196298.21998384662 }, { "content": "pub fn same_buffer(vec0: &[u8], vec1: &[u8]) -> bool {\n\n vec0.par_iter()\n\n .zip(vec1)\n\n .all(move |(i, j)| std::cmp::max(i, j) - std::cmp::min(i, j) < 3)\n\n}\n\n\n", "file_path": "truck-rendimpl/tests/common.rs", "rank": 79, "score": 196298.21998384662 }, { "content": "#[inline(always)]\n\npub fn rotated<T: Mapped<Point3, Curve, Surface>>(\n\n elem: &T,\n\n origin: Point3,\n\n axis: Vector3,\n\n angle: Rad<f64>,\n\n) -> T {\n\n let mat0 = Matrix4::from_translation(-origin.to_vec());\n\n let mat1 = Matrix4::from_axis_angle(axis, angle);\n\n let mat2 = Matrix4::from_translation(origin.to_vec());\n\n transformed(elem, mat2 * mat1 * mat0)\n\n}\n\n\n\n/// Returns a scaled vertex, edge, wire, face, shell or solid.\n", "file_path": "truck-modeling/src/builder.rs", "rank": 80, "score": 195966.17493263166 }, { "content": "fn create_one_component<T>(adjacency: &mut HashMap<T, Vec<T>>) -> Vec<T>\n\nwhere T: Eq + Hash + Clone {\n\n let mut iter = adjacency.keys();\n\n let first = match iter.next() {\n\n Some(key) => key.clone(),\n\n None => return Vec::new(),\n\n };\n\n let mut stack = vec![first];\n\n let mut res = Vec::new();\n\n while !stack.is_empty() {\n\n let i = stack.pop().unwrap();\n\n if let Some(vec) = adjacency.remove(&i) {\n\n res.push(i);\n\n for j in vec {\n\n stack.push(j);\n\n }\n\n }\n\n }\n\n res\n\n}\n", "file_path": "truck-topology/src/shell.rs", "rank": 81, "score": 194913.42925100145 }, { "content": "fn combinatorial(n: usize) -> Vec<usize> {\n\n let mut res = vec![1];\n\n for i in 1..=n {\n\n res.push(res[i - 1] * (n - i + 1) / i);\n\n }\n\n res\n\n}\n\n\n", "file_path": "truck-geometry/src/nurbs/bspsurface.rs", "rank": 82, "score": 193989.47633598658 }, { "content": "#[derive(Clone, Copy, Debug, Deref, DerefMut)]\n\nstruct Triangle([Point3; 3]);\n\n\n\nimpl Triangle {\n\n\t#[inline(always)]\n\n\tfn normal(self) -> Vector3 { (self[1] - self[0]).cross(self[2] - self[0]).normalize() }\n\n\n\n\tfn is_crossing(self, ray: Ray) -> bool {\n\n\t\tlet a = self[0] - self[1];\n\n\t\tlet b = self[0] - self[2];\n\n\t\tlet mat = Matrix3::from_cols(a, b, ray.direction);\n\n\t\tif mat.determinant().so_small() {\n\n\t\t\tfalse\n\n\t\t} else {\n\n\t\t\tlet inv = mat.invert().unwrap();\n\n\t\t\tlet uvt = inv * (self[0] - ray.origin);\n\n\t\t\tuvt[0] > 0.0 && uvt[1] > 0.0 && uvt[0] + uvt[1] < 1.0 && uvt[2] > 0.0\n\n\t\t}\n\n\t}\n\n}\n\n\n", "file_path": "truck-meshalgo/src/analyzers/in_out_judge.rs", "rank": 83, "score": 193062.19670948642 }, { "content": "#[wasm_bindgen]\n\npub fn tsweep(shape: &AbstractShape, vector: &[f64]) -> AbstractShape {\n\n intopt!(Vector3, vector);\n\n derive_all_sweepable!(shape, builder::tsweep, (vector))\n\n}\n\n\n\n/// Sweeps a vertex, an edge, a wire, a face, or a shell by the rotation.\n", "file_path": "truck-js/src/builder.rs", "rank": 84, "score": 190564.5380164464 }, { "content": "#[wasm_bindgen]\n\npub fn translated(shape: &AbstractShape, vector: &[f64]) -> AbstractShape {\n\n intopt!(Vector3, vector);\n\n derive_all_shape!(shape, builder::translated, (vector))\n\n}\n\n\n\n/// Returns a rotated vertex, edge, wire, face, shell or solid.\n", "file_path": "truck-js/src/builder.rs", "rank": 85, "score": 190564.5380164464 }, { "content": "#[inline(always)]\n\npub fn vertex(pt: Point3) -> Vertex { Vertex::new(pt) }\n\n\n\n/// Returns a line from `vertex0` to `vertex1`.\n\n/// # Examples\n\n/// ```\n\n/// use truck_modeling::*;\n\n///\n\n/// // draw a line\n\n/// let vertex0 = builder::vertex(Point3::new(1.0, 2.0, 3.0));\n\n/// let vertex1 = builder::vertex(Point3::new(6.0, 5.0, 4.0));\n\n/// let line = builder::line(&vertex0, &vertex1);\n\n/// # let curve = line.oriented_curve();\n\n/// # let pt0 = Point3::new(1.0, 2.0, 3.0);\n\n/// # let pt1 = Point3::new(6.0, 5.0, 4.0);\n\n/// # const N: usize = 10;\n\n/// # for i in 0..=N {\n\n/// # let t = i as f64 / N as f64;\n\n/// # assert!(curve.subs(t).near2(&(pt0 + t * (pt1 - pt0))));\n\n/// # }\n\n/// ```\n", "file_path": "truck-modeling/src/builder.rs", "rank": 86, "score": 189261.92001940278 }, { "content": "fn line(v0: &Vertex<Point3>, v1: &Vertex<Point3>) -> Edge<Point3, BSplineCurve<Point3>> {\n\n\tlet curve = BSplineCurve::new(\n\n\t\tKnotVec::bezier_knot(1),\n\n\t\tvec![v0.get_point(), v1.get_point()],\n\n\t);\n\n\tEdge::new(v0, v1, curve)\n\n}\n\n\n", "file_path": "truck-shapeops/src/divide_face/tests.rs", "rank": 87, "score": 186319.96284148833 }, { "content": "#[inline(always)]\n\nfn write_binary<I: IntoSTLIterator, W: Write>(iter: I, writer: &mut W) -> Result<()> {\n\n let mut iter = iter.into_iter();\n\n let len = iter.len() as u32;\n\n writer.write_all(&[0u8; 80])?;\n\n writer.write_all(&len.to_le_bytes())?;\n\n iter.try_for_each(|face| {\n\n writer.write_all(bytemuck::cast_slice(&[face]))?;\n\n writer.write_all(&[0u8, 0u8])?;\n\n Ok(())\n\n })\n\n}\n\n\n", "file_path": "truck-polymesh/src/stl.rs", "rank": 88, "score": 186145.4342804098 }, { "content": "/// Writes ASCII STL data\n\nfn write_ascii<I: IntoSTLIterator, W: Write>(iter: I, writer: &mut W) -> Result<()> {\n\n let mut iter = iter.into_iter();\n\n writer.write_all(b\"solid\\n\")?;\n\n iter.try_for_each::<_, Result<()>>(|face| {\n\n writer.write_fmt(format_args!(\n\n \" facet normal {:e} {:e} {:e}\\n\",\n\n face.normal[0], face.normal[1], face.normal[2]\n\n ))?;\n\n writer.write_all(b\" outer loop\\n\")?;\n\n face.vertices.iter().try_for_each(|pt| {\n\n writer.write_fmt(format_args!(\n\n \" vertex {:e} {:e} {:e}\\n\",\n\n pt[0], pt[1], pt[2]\n\n ))\n\n })?;\n\n writer.write_all(b\" endloop\\n endfacet\\n\")?;\n\n Ok(())\n\n })?;\n\n writer.write_all(b\"endsolid\\n\")?;\n\n Ok(())\n\n}\n\n\n\n/// Writes binary STL data\n", "file_path": "truck-polymesh/src/stl.rs", "rank": 89, "score": 186145.4342804098 }, { "content": "/// OR operation between two solids.\n\npub fn or<C: ShapeOpsCurve<S>, S: ShapeOpsSurface>(\n\n\tsolid0: &Solid<Point3, C, S>,\n\n\tsolid1: &Solid<Point3, C, S>,\n\n\ttol: f64,\n\n) -> Option<Solid<Point3, C, S>> {\n\n\tlet mut iter0 = solid0.boundaries().iter();\n\n\tlet mut iter1 = solid1.boundaries().iter();\n\n\tlet shell0 = iter0.next().unwrap();\n\n\tlet shell1 = iter1.next().unwrap();\n\n\tlet [_, mut or_shell] = process_one_pair_of_shells(shell0, shell1, tol)?;\n\n\tfor shell in iter0 {\n\n\t\tlet [_, res] = process_one_pair_of_shells(&or_shell, shell, tol)?;\n\n\t\tor_shell = res;\n\n\t}\n\n\tfor shell in iter1 {\n\n\t\tlet [_, res] = process_one_pair_of_shells(&or_shell, shell, tol)?;\n\n\t\tor_shell = res;\n\n\t}\n\n\tlet boundaries = or_shell.connected_components();\n\n\tSome(Solid::new(boundaries))\n\n}\n\n\n", "file_path": "truck-shapeops/src/integrate/mod.rs", "rank": 90, "score": 185653.29662442865 }, { "content": "/// AND operation between two solids.\n\npub fn and<C: ShapeOpsCurve<S>, S: ShapeOpsSurface>(\n\n\tsolid0: &Solid<Point3, C, S>,\n\n\tsolid1: &Solid<Point3, C, S>,\n\n\ttol: f64,\n\n) -> Option<Solid<Point3, C, S>> {\n\n\tlet mut iter0 = solid0.boundaries().iter();\n\n\tlet mut iter1 = solid1.boundaries().iter();\n\n\tlet shell0 = iter0.next().unwrap();\n\n\tlet shell1 = iter1.next().unwrap();\n\n\tlet [mut and_shell, _] = process_one_pair_of_shells(shell0, shell1, tol)?;\n\n\tfor shell in iter0 {\n\n\t\tlet [res, _] = process_one_pair_of_shells(&and_shell, shell, tol)?;\n\n\t\tand_shell = res;\n\n\t}\n\n\tfor shell in iter1 {\n\n\t\tlet [res, _] = process_one_pair_of_shells(&and_shell, shell, tol)?;\n\n\t\tand_shell = res;\n\n\t}\n\n\tlet boundaries = and_shell.connected_components();\n\n\tSome(Solid::new(boundaries))\n\n}\n\n\n", "file_path": "truck-shapeops/src/integrate/mod.rs", "rank": 91, "score": 185653.29662442865 }, { "content": "fn nontex_polygon(scene: &mut Scene, creator: &InstanceCreator) -> Vec<u8> {\n\n let cube: PolygonInstance = creator.create_instance(\n\n &obj::read(CUBE_OBJ).unwrap(),\n\n &PolygonState {\n\n material: Material {\n\n albedo: Vector4::new(1.0, 1.0, 1.0, 1.0),\n\n roughness: 0.5,\n\n reflectance: 0.25,\n\n ambient_ratio: 0.02,\n\n background_ratio: 0.0,\n\n alpha_blend: false,\n\n },\n\n ..Default::default()\n\n },\n\n );\n\n common::render_one(scene, &cube)\n\n}\n\n\n", "file_path": "truck-rendimpl/tests/render.rs", "rank": 92, "score": 185308.9754620312 }, { "content": "#[rustfmt::skip]\n\npub trait PolylineableCurve: ParametricCurve3D + Invertible + ParameterDivision1D<Point = Point3> {}\n\n#[rustfmt::skip]\n\nimpl<C: ParametricCurve3D + Invertible + ParameterDivision1D<Point = Point3>> PolylineableCurve for C {}\n\n#[rustfmt::skip]\n", "file_path": "truck-meshalgo/src/tessellation/mod.rs", "rank": 93, "score": 185291.9128945528 }, { "content": "fn sub_put_together_same_attrs<T: Copy + CastIntVector>(attrs: &[T]) -> Vec<usize> {\n\n let mut res = Vec::new();\n\n let mut map = HashMap::default();\n\n for (i, attr) in attrs.iter().enumerate() {\n\n let v = ((*attr).add_element_wise(TOLERANCE * 2.0) / (TOLERANCE * 4.0)).cast_int();\n\n res.push(*map.entry(v).or_insert(i));\n\n }\n\n res\n\n}\n\n\n", "file_path": "truck-meshalgo/src/filters/optimizing.rs", "rank": 94, "score": 184463.0959691322 }, { "content": "fn curve_surface_projection<C, S>(\n\n\tcurve: &C,\n\n\tcurve_hint: Option<f64>,\n\n\tsurface: &S,\n\n\tsurface_hint: Option<(f64, f64)>,\n\n\tpoint: Point3,\n\n\ttrials: usize,\n\n) -> Option<(Point3, f64, Point2)>\n\nwhere\n\n\tC: ParametricCurve3D + SearchNearestParameter<Point = Point3, Parameter = f64>,\n\n\tS: ParametricSurface3D + SearchNearestParameter<Point = Point3, Parameter = (f64, f64)>,\n\n{\n\n\tif trials == 0 {\n\n\t\treturn None;\n\n\t}\n\n\tlet t = curve.search_nearest_parameter(point, curve_hint, 10)?;\n\n\tlet pt0 = curve.subs(t);\n\n\tlet (u, v) = surface.search_nearest_parameter(point, surface_hint, 10)?;\n\n\tlet pt1 = surface.subs(u, v);\n\n\tif point.near(&pt0) && point.near(&pt1) && pt0.near(&pt1) {\n", "file_path": "truck-shapeops/src/loops_store/mod.rs", "rank": 95, "score": 183629.14982600926 }, { "content": "/// positive test implementation for `Concat`.\n\npub fn concat_random_test<C0, C1>(curve0: &C0, curve1: &C1, trials: usize)\n\nwhere\n\n C0: Concat<C1>,\n\n C0::Point: Debug + Tolerance,\n\n C0::Vector: Debug + Tolerance,\n\n C0::Output: ParametricCurve<Point = C0::Point, Vector = C0::Vector> + Debug,\n\n C1: ParametricCurve<Point = C0::Point, Vector = C0::Vector>, {\n\n (0..trials).for_each(move |_| exec_concat_random_test(curve0, curve1))\n\n}\n\n\n", "file_path": "truck-geotrait/src/traits/curve.rs", "rank": 96, "score": 181676.60148472147 }, { "content": "#[wasm_bindgen]\n\npub fn circle_arc(vertex0: &Vertex, vertex1: &Vertex, transit: &[f64]) -> Edge {\n\n intopt!(Point3, transit);\n\n builder::circle_arc(&*vertex0, &*vertex1, transit).into()\n\n}\n\n/// Returns a Bezier curve from `vertex0` to `vertex1` with inter control points `inter_points`.\n", "file_path": "truck-js/src/builder.rs", "rank": 97, "score": 180565.58479247382 }, { "content": "#[wasm_bindgen]\n\npub fn bezier(vertex0: &Vertex, vertex1: &Vertex, inter_points: &[f64]) -> Edge {\n\n assert!(\n\n inter_points.len() % 3 == 0,\n\n \"inter_points cannot convert to 3-dimentional points!\"\n\n );\n\n let inter_points = inter_points\n\n .chunks(3)\n\n .map(|p| Point3::new(p[0], p[1], p[2]))\n\n .collect();\n\n builder::bezier(&*vertex0, &*vertex1, inter_points).into()\n\n}\n\n/// Returns a homotopic face from `edge0` to `edge1`.\n", "file_path": "truck-js/src/builder.rs", "rank": 98, "score": 180565.58479247382 } ]
Rust
testspace/src/history.rs
galacticfungus/Egg
a8d80d046f28ae9688432c69890dd44bf6516315
use std::fs; use std::path; use std::vec; #[derive(Clone, Debug)] pub enum FileItem { Directory(path::PathBuf), File(path::PathBuf), } #[derive(Debug, Clone)] pub struct FileHistory { history: vec::Vec<FileItem>, allow_cleanup: bool, } impl Default for FileHistory { fn default() -> FileHistory { FileHistory { history: vec::Vec::default(), allow_cleanup: true, } } } impl FileHistory { pub fn allow_cleanup(&mut self, cleanup: bool) { self.allow_cleanup = cleanup; } pub fn record_directory<P: AsRef<path::Path>>(&mut self, path: P) { let path = path.as_ref(); self.history.push(FileItem::Directory(path.to_path_buf())); } pub fn record_file<P: AsRef<path::Path>>(&mut self, path: P) { let path = path.as_ref(); self.history.push(FileItem::File(path.to_path_buf())); } pub fn cleanup(&mut self) { while let Some(file_item) = self.history.pop() { match file_item { FileItem::Directory(path) => { fs::remove_dir_all(&path).unwrap_or_else(|err| { eprintln!( "Failed to cleanup the test directory {}, error was {}", path.display(), err ); }); eprintln!("Deleted {}", path.display()); } FileItem::File(path) => { fs::remove_file(path.as_path()).unwrap_or_else(|err| { eprintln!( "Failed to cleanup the test file {}, error was {}", path.display(), err ); }); eprintln!("Deleted {}", path.display()); } } } self.history.clear(); } } #[cfg(test)] mod tests { use super::FileHistory; use std::fs; #[test] fn test_history() { let mut history = FileHistory::default(); let mut temp_dir = std::env::temp_dir(); temp_dir.push("remove_me"); let temp_path = temp_dir.as_path(); fs::create_dir(temp_path).unwrap_or_else(|err| { panic!( "Failed to create the temporary directory, error was {}", err ); }); assert!(temp_path.exists()); history.record_directory(temp_path); history.cleanup(); assert_eq!(temp_path.exists(), false); } #[test] fn test_file_history() { use byteorder::{self, LittleEndian, WriteBytesExt}; let mut history = FileHistory::default(); let mut temp_file = std::env::temp_dir(); temp_file.push("remove_me_123"); let temp_path = temp_file.as_path(); { let mut file = fs::OpenOptions::new() .create(true) .create_new(true) .write(true) .read(true) .open(temp_path) .unwrap_or_else(|err| { panic!("Failed to create the temp file, error was {}", err); }); file.write_u64::<LittleEndian>(12345).unwrap_or_else(|err| { panic!( "Failed writing test data during history test, error was {}", err ); }); } assert!(temp_path.exists()); history.record_file(temp_path); history.cleanup(); assert_eq!(temp_path.exists(), false); } }
use std::fs; use std::path; use std::vec; #[derive(Clone, Debug)] pub enum FileItem { Directory(path::PathBuf), File(path::PathBuf), } #[derive(Debug, Clone)] pub struct FileHistory { history: vec::Vec<FileItem>, allow_cleanup: bool, } impl Default for FileHistory { fn default() -> FileHistory { FileHistory { history: vec::Vec::default(), allow_cleanup: true, } } } impl FileHistory { pub fn allow_cleanup(&mut self, cleanup: bool) { self.allow_cleanup = cleanup; } pub fn record_directory<P: AsRef<path::Path>>(&mut self, path: P) { let path = path.as_ref(); self.history.push(FileItem::Directory(path.to_path_buf())); } pub fn record_file<P: AsRef<path::Path>>(&mut self, path: P) { let path = path.as_ref(); self.history.push(FileItem::File(path.to_path_buf())); }
} #[cfg(test)] mod tests { use super::FileHistory; use std::fs; #[test] fn test_history() { let mut history = FileHistory::default(); let mut temp_dir = std::env::temp_dir(); temp_dir.push("remove_me"); let temp_path = temp_dir.as_path(); fs::create_dir(temp_path).unwrap_or_else(|err| { panic!( "Failed to create the temporary directory, error was {}", err ); }); assert!(temp_path.exists()); history.record_directory(temp_path); history.cleanup(); assert_eq!(temp_path.exists(), false); } #[test] fn test_file_history() { use byteorder::{self, LittleEndian, WriteBytesExt}; let mut history = FileHistory::default(); let mut temp_file = std::env::temp_dir(); temp_file.push("remove_me_123"); let temp_path = temp_file.as_path(); { let mut file = fs::OpenOptions::new() .create(true) .create_new(true) .write(true) .read(true) .open(temp_path) .unwrap_or_else(|err| { panic!("Failed to create the temp file, error was {}", err); }); file.write_u64::<LittleEndian>(12345).unwrap_or_else(|err| { panic!( "Failed writing test data during history test, error was {}", err ); }); } assert!(temp_path.exists()); history.record_file(temp_path); history.cleanup(); assert_eq!(temp_path.exists(), false); } }
pub fn cleanup(&mut self) { while let Some(file_item) = self.history.pop() { match file_item { FileItem::Directory(path) => { fs::remove_dir_all(&path).unwrap_or_else(|err| { eprintln!( "Failed to cleanup the test directory {}, error was {}", path.display(), err ); }); eprintln!("Deleted {}", path.display()); } FileItem::File(path) => { fs::remove_file(path.as_path()).unwrap_or_else(|err| { eprintln!( "Failed to cleanup the test file {}, error was {}", path.display(), err ); }); eprintln!("Deleted {}", path.display()); } } } self.history.clear(); }
function_block-full_function
[ { "content": "/// Represents a file in the working directory that we may wish to snapshot or otherwise investigate,\n\n/// This structure does not contain the path of the file since the path is used as a key inside a map of WorkingFiles\n\nstruct WorkingFile {\n\n hash: Option<Hash>,\n\n file_size: u64,\n\n modified_time: u128,\n\n}\n\n\n\n// TODO: String can't be used here, we must use a byte array since the data may not be valid utf8\n", "file_path": "egg/src/working.rs", "rank": 0, "score": 48571.658171448085 }, { "content": "fn main() {\n\n println!(\"Hello, world!\");\n\n}\n", "file_path": "server/src/main.rs", "rank": 1, "score": 48502.38788285546 }, { "content": "enum SliceType {\n\n Overlapping(usize, usize),\n\n Simple(usize),\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::RawZip;\n\n use super::{Move, ProspectiveDifference, ProspectiveMove};\n\n use crate::hash::Hash;\n\n use crate::working::WorkingDirectory;\n\n use smallvec::SmallVec;\n\n use std::collections::HashMap;\n\n use testspace::Alphabet;\n\n use testspace::{TestSpace, TestSpaceFile};\n\n\n\n #[test]\n\n fn basic_previously_removed_short_test() {\n\n let ts = TestSpace::new();\n\n let mut original_file = ts.create_text_file();\n", "file_path": "egg/src/working.rs", "rank": 2, "score": 47971.240131899656 }, { "content": "// Prospective moves are guarenteed to be moves however the lines that are being moved may be changed\n\n// TODO: This all needs to be changed to a\n\nstruct Move<'a> {\n\n line: &'a str,\n\n source_line: usize,\n\n new_line: usize,\n\n}\n\n\n\nimpl<'a> Move<'a> {\n\n pub fn get_lines(&self) -> (usize, usize) {\n\n (self.source_line, self.new_line)\n\n }\n\n}\n\n\n\nimpl<'a> std::fmt::Debug for Move<'a> {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.write_fmt(format_args!(\n\n \"The line '{}' moved from {} to {}\",\n\n self.line, self.source_line, self.new_line\n\n ))\n\n }\n\n}\n", "file_path": "egg/src/working.rs", "rank": 3, "score": 47967.97324678937 }, { "content": "struct AtomicRecovery;\n\n\n\nimpl AtomicRecovery {\n\n\n\n/// Checks to see if the operations that were under way are recoverable\n\n fn can_restore(sw_count: usize, sc_count: usize, cw_count: usize, cc_count: usize, rw_count: usize, rp_count: usize) -> bool {\n\n fn test_working_complete(working_count: usize, complete_count: usize, replace_previous: usize) -> bool {\n\n match (working_count > 0, complete_count > 0, replace_previous > 0) {\n\n (_, true, _) => true, // If complete contains files at all then recovery is possible\n\n (_, _, true) => true, // If previous contains files at all then recovery is possible\n\n (true, false, false) => false, // If only working contains files then recovery is not possible\n\n (false, false, false) => true, // No files found so either the operation was interrupted at a later stage or that operation wasn't performed\n\n }\n\n }\n\n let store_recoverable = test_working_complete(sw_count, sc_count, rp_count);\n\n let create_recoverable = test_working_complete(cw_count, cc_count, rp_count);\n\n // Explicit test to avoid confusion over naming\n\n // TODO: This is flawed since if stage 2 is under way, both store and create operations will return that they can't complete since working may have files but complete will be empty, stage 2 is only used in replace\n\n // Explicitly test rp - if rp > 0 then working is fine\n\n let replace_recoverable = match (rw_count > 0, rp_count > 0) {\n", "file_path": "egg/src/atomic/recovery.rs", "rank": 4, "score": 47156.095299558656 }, { "content": "fn main() {\n\n\n\n println!(\"Hello, world!\");\n\n let matches = parse_arguments();\n\n\n\n // You can also match on a sub-command's name\n\n match matches.subcommand() {\n\n (\"init\", Some(init_matches)) => {\n\n // Value is either set or is a default\n\n let to_init = init_matches.value_of(\"init_path\").unwrap();\n\n let path_to_init = path::Path::new(to_init);\n\n if path_to_init.exists() == false {\n\n if let Err(error) = fs::create_dir_all(path_to_init) {\n\n println!(\"Could not create the directory {} when initializing the repository, error was {}\", path_to_init.display(), error);\n\n return;\n\n }\n\n }\n\n if let Err(error) = egg::Repository::create_repository(path_to_init) {\n\n println!(\"Failed to create the repository, error was {}\", error);\n\n return;\n", "file_path": "egg-cli/src/main.rs", "rank": 5, "score": 46963.95902055611 }, { "content": "/// The atomic operation to perform, we store a path relative to the working directory\n\nenum FileOperation {\n\n /// Files being replaced start in rw, then move to rc, but first files are moved from current to rp, finally rc is moved to current\n\n Replace(path::PathBuf),\n\n /// Files being created start in cw, then once complete are moved to cc\n\n Create(path::PathBuf),\n\n /// Files being stored are copied to sw, then moved to sc\n\n Store(path::PathBuf),\n\n}\n\n\n\n// TODO: AtomicUpdate needs to support versioning since a repository might be updated with an old interrupted operation\n\n/// Responsible for making sure that all files are updated atomically\n\npub struct AtomicUpdate<'a> {\n\n atomic_jobs: Vec<FileOperation>,\n\n path_to_working: &'a path::Path,\n\n path_to_repository: &'a path::Path,\n\n path_to_create_working: path::PathBuf,\n\n path_to_create_complete: path::PathBuf,\n\n path_to_replace_working: path::PathBuf,\n\n path_to_replace_complete: path::PathBuf,\n\n path_to_replace_previous: path::PathBuf,\n", "file_path": "egg/src/atomic/mod.rs", "rank": 6, "score": 46567.92724891857 }, { "content": "#[derive(PartialEq)]\n\nenum ProspectiveDifference<'a> {\n\n DuplicateRemove(&'a str, VecDeque<usize>),\n\n DuplicateInsert(&'a str, VecDeque<usize>),\n\n Remove(&'a str, usize),\n\n Insert(&'a str, usize),\n\n}\n\n\n\nimpl<'a> std::fmt::Debug for ProspectiveDifference<'a> {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n match self {\n\n // Convert to BTreeSet\n\n ProspectiveDifference::DuplicateInsert(line, duplicates) => f.write_fmt(format_args!(\n\n \"The line '{}' has multiple inserts at {:?} in the new file\",\n\n line, duplicates\n\n )),\n\n ProspectiveDifference::DuplicateRemove(line, duplicates) => f.write_fmt(format_args!(\n\n \"The line '{}' has duplicate removals at {:?} in the original file\",\n\n line, duplicates\n\n )),\n\n ProspectiveDifference::Insert(line, line_number) => f.write_fmt(format_args!(\n", "file_path": "egg/src/working.rs", "rank": 7, "score": 45910.61163228377 }, { "content": "enum AtomicRecoveryList<'a> {\n\n Create(Vec<&'a str>),\n\n Store(Vec<&'a str>),\n\n Replace(Vec<&'a str>),\n\n}\n", "file_path": "egg/src/atomic/recovery.rs", "rank": 8, "score": 43213.06918677756 }, { "content": "enum AtomicRecoveryJob<'a> {\n\n StageOne(Vec<AtomicRecoveryList<'a>>),\n\n StageTwo(Vec<AtomicRecoveryList<'a>>),\n\n StageThree(Vec<AtomicRecoveryList<'a>>),\n\n StageFour(Vec<AtomicRecoveryList<'a>>),\n\n StageFive(Vec<AtomicRecoveryList<'a>>),\n\n}\n\n\n\nimpl<'a> AtomicRecoveryList<'a> {\n\n pub fn get_names(&'a self) -> &[&'a str] {\n\n match self {\n\n AtomicRecoveryList::Create(file_names) => file_names.as_slice(),\n\n AtomicRecoveryList::Store(file_names) => file_names.as_slice(),\n\n AtomicRecoveryList::Replace(file_names) => file_names.as_slice(),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> AtomicRecoveryJob<'a> {\n\n pub fn get_jobs(&'a self) -> &'a [AtomicRecoveryList] {\n", "file_path": "egg/src/atomic/recovery.rs", "rank": 9, "score": 43213.06918677756 }, { "content": "#[test]\n\nfn egg_get_staged_files_test() {\n\n \n\n}\n\n\n", "file_path": "egg/tests/test_snapshots.rs", "rank": 10, "score": 41986.31029350722 }, { "content": "#[test]\n\nfn egg_take_snapshot_with_child_test() {\n\n let ts = TestSpace::new().allow_cleanup(false);\n\n let mut ts2 = ts.create_child();\n\n let mut files_to_snapshot = ts2.create_random_files(2, 4096);\n\n let working_path = ts.get_path();\n\n let mut repo = egg::Repository::create_repository(working_path).expect(\"Failed to create repository\");\n\n println!(\"working: {}\", working_path.display());\n\n for file in &files_to_snapshot {\n\n println!(\"file: {}\", file.display());\n\n }\n\n let snapshot_id = repo.take_snapshot(None, \"Test Message\", files_to_snapshot.clone()).expect(\"Failed to take snapshot\");\n\n let mut new_files = ts2.create_random_files(2, 2048);\n\n files_to_snapshot.append(&mut new_files);\n\n let child_id = repo.take_snapshot(Some(snapshot_id), \"A child snapshot\", files_to_snapshot).expect(\"Failed to get child snapshot\");\n\n let child_snapshot = repo.get_snapshot(child_id).expect(\"Child not found\");\n\n for file in child_snapshot.get_files() {\n\n println!(\"{:?}\", file.path());\n\n }\n\n}\n", "file_path": "egg/tests/test_snapshots.rs", "rank": 11, "score": 41986.31029350722 }, { "content": "#[test]\n\nfn egg_initialize_a_simple_repository_test() {\n\n // Since ts always returns absolute paths\n\n let ts = TestSpace::new();\n\n let working_path = ts.get_path().canonicalize().unwrap();\n\n let egg = egg::Repository::create_repository(working_path.as_path()).expect(\"Failed to create repository\");\n\n let loaded_egg = egg::Repository::find_egg(working_path.as_path()).expect(\"Failed to find egg\");\n\n // assert_eq!(egg, loaded_egg);\n\n assert_eq!(loaded_egg.get_working_path(), working_path.as_path());\n\n}\n\n\n", "file_path": "egg/tests/test_snapshots.rs", "rank": 12, "score": 41986.31029350722 }, { "content": "#[test]\n\nfn egg_taking_simple_snapshot_test() {\n\n let ts = TestSpace::new();\n\n let mut ts2 = ts.create_child();\n\n let files_to_snapshot = ts2.create_random_files(2, 4096);\n\n let working_path = ts.get_path();\n\n let mut repo = egg::Repository::create_repository(working_path).expect(\"Failed to create repository\");\n\n println!(\"working: {}\", working_path.display());\n\n for file in &files_to_snapshot {\n\n println!(\"file: {}\", file.display());\n\n }\n\n let snapshot_id = repo.take_snapshot(None, \"Test Message\", files_to_snapshot.clone()).expect(\"Failed to take snapshot\");\n\n let snapshot = repo.get_snapshot(snapshot_id).expect(\"Could not retrieve the snapshot that was just taken\");\n\n let files_snapshotted = snapshot.get_files();\n\n for (index, file) in files_snapshotted.iter().enumerate() {\n\n assert_eq!(file.filesize(), 4096);\n\n let original_absolute = files_to_snapshot[index].canonicalize().expect(\"Failed to convert test path to an absolute path\");\n\n assert_eq!(original_absolute.as_path(), file.path());\n\n }\n\n}\n\n\n", "file_path": "egg/tests/test_snapshots.rs", "rank": 13, "score": 41986.31029350722 }, { "content": "#[test]\n\nfn egg_get_recent_snapshot_test() {\n\n let ts = TestSpace::new();\n\n let mut ts2 = ts.create_child();\n\n let files_to_snapshot = ts2.create_random_files(2, 4096);\n\n let working_path = ts.get_path();\n\n let mut repo = egg::Repository::create_repository(working_path).expect(\"Failed to create repository\");\n\n println!(\"working: {}\", working_path.display());\n\n for file in &files_to_snapshot {\n\n println!(\"file: {}\", file.display());\n\n }\n\n let snapshot_id = repo.take_snapshot(None, \"Test Message\", files_to_snapshot.clone()).expect(\"Failed to take snapshot\");\n\n let latest_snapshot = repo.get_latest_snapshot().expect(\"Failed to retrieve latest snapshot\");\n\n match latest_snapshot {\n\n Some(latest_snapshot) => assert_eq!(latest_snapshot, snapshot_id),\n\n None => panic!(\"No latest snapshot was found\")\n\n }\n\n \n\n}\n\n\n", "file_path": "egg/tests/test_snapshots.rs", "rank": 14, "score": 41986.31029350722 }, { "content": "fn parse_arguments<'a>() -> clap::ArgMatches<'a> {\n\n let matches = clap::App::new(\"Egg-Cli\")\n\n .version(\"0.1\")\n\n .subcommand(clap::SubCommand::with_name(\"init\")\n\n .about(\"Initialize a repository\")\n\n .version(\"0.1\")\n\n .author(\"Someone E. <someone_else@other.com>\")\n\n .arg(clap::Arg::with_name(\"init_path\")\n\n .index(1)\n\n .help(\"path to create the repository in\")\n\n .default_value(\".\")\n\n .takes_value(true)))\n\n .subcommand(clap::SubCommand::with_name(\"snapshot\")\n\n .alias(\"s\")\n\n .about(\"Take a snapshot of the working directory\")\n\n .version(\"0.1\")\n\n .author(\"Someone E. <someone_else@other.com>\")\n\n .subcommand(clap::SubCommand::with_name(\"files\")\n\n .alias(\"f\")\n\n .about(\"Take a snapshot of the following files\")\n", "file_path": "egg-cli/src/main.rs", "rank": 15, "score": 36479.26929325083 }, { "content": "pub trait WriteEggExt: Write + WriteBytesExt + Seek {\n\n fn write_optional_hash(&mut self, hash_to_write: Option<&hash::Hash>) -> Result<()> {\n\n // Are we writing a hash or not?\n\n match hash_to_write {\n\n Some(hash_to_write) => {\n\n debug_assert_eq!(hash_to_write.len(), 64, \"Hash being written was not 64 bytes long\");\n\n // Write hash length of 64 of blake2 hash\n\n // TODO: This doesn't need to be u16\n\n if let Err(error) = self.write_u16::<LittleEndian>(64) {\n\n // TODO: Fix error message\n\n return Err(Error::write_error(Some(UnderlyingError::from(error)))\n\n .add_debug_message(\"Failed to write length of optional (Some) hash, error was {}\"));\n\n };\n\n if let Err(error) = self.write_hash(hash_to_write) {\n\n return Err(error.add_debug_message(\"While writing an optional hash\"));\n\n };\n\n },\n\n None => {\n\n // We can write a hash length of zero here\n\n if let Err(error) = self.write_u16::<LittleEndian>(0) {\n", "file_path": "egg/src/storage/stream.rs", "rank": 22, "score": 33992.917992704504 }, { "content": "//todo: Swap all string lengths to use a u64\n\n/// Reads a path from the reader, it first reads the path length and then reads the path bytes\n\npub trait ReadEggExt: io::Read + ReadBytesExt + io::Seek {\n\n fn read_string(&mut self) -> Result<String> {\n\n let string_length = match self.read_u16::<byteorder::LittleEndian>() {\n\n Ok(string_length) => string_length,\n\n Err(error) => return Err(Error::parsing_error(Some(UnderlyingError::from(error)))\n\n .add_debug_message(\"Failed to read the length of a string with the Read trait extensions\")),\n\n };\n\n // let mut byte_data = Vec::with_capacity(string_length as usize);\n\n let mut string_reader = self.take(string_length as u64);\n\n let mut string_from_bytes = String::with_capacity(usize::from(string_length));\n\n if let Err(error) = string_reader.read_to_string(&mut string_from_bytes) {\n\n \n\n return Err(Error::parsing_error(Some(UnderlyingError::from(error)))\n\n .add_debug_message(\"Failed to read a string with Read Trait extensions\"));\n\n }\n\n Ok(string_from_bytes)\n\n }\n\n\n\n fn read_optional_hash(&mut self) -> Result<Option<hash::Hash>> {\n\n // Read hash length\n", "file_path": "egg/src/storage/stream.rs", "rank": 23, "score": 30902.386400836018 }, { "content": "use std::path;\n\n\n\nmod alphabet;\n\nmod history;\n\nmod modification;\n\nmod testspace;\n\nmod testspacefile;\n\nmod testspacetextfile;\n\n\n\npub struct TestSpace {\n\n working_directory: path::PathBuf,\n\n history: history::FileHistory,\n\n allow_cleanup: bool,\n\n}\n\n\n\npub struct TestSpaceFile {\n\n path_to_file: path::PathBuf,\n\n history: history::FileHistory,\n\n allow_cleanup: bool,\n\n}\n", "file_path": "testspace/src/lib.rs", "rank": 24, "score": 24.680892904521656 }, { "content": "use crate::history::{self};\n\nuse crate::Alphabet;\n\nuse crate::TestSpaceFile;\n\nuse crate::{LineModification, TestSpace, TestSpaceTextFile, TextModifier};\n\nuse rand::{self, Rng};\n\nuse std::fs;\n\nuse std::path;\n\n\n\nimpl TestSpace {\n\n const SUFFIX: &'static str = \"_ts\";\n\n /// Creates a test space that represents a directory in the temporary system directory.\n\n pub fn new() -> TestSpace {\n\n let temp_path = std::env::temp_dir();\n\n let random_path = TestSpace::create_rand_dir(temp_path.as_path());\n\n let mut history = history::FileHistory::default();\n\n history.record_directory(random_path.as_path());\n\n TestSpace {\n\n working_directory: random_path,\n\n history,\n\n allow_cleanup: true,\n", "file_path": "testspace/src/testspace.rs", "rank": 25, "score": 22.702965974954687 }, { "content": "use super::history;\n\nuse crate::testspace;\n\nuse crate::Alphabet;\n\nuse crate::TestSpace;\n\nuse crate::TestSpaceFile;\n\nuse rand::{self, Rng};\n\nuse std::fs;\n\nuse std::io::{self, Seek, SeekFrom, Write};\n\nuse std::path;\n\n\n\nimpl TestSpaceFile {\n\n pub fn new(test_space: &TestSpace) -> TestSpaceFile {\n\n let path = TestSpaceFile::create_rand_file(test_space.get_path(), None);\n\n let mut history = history::FileHistory::default();\n\n history.record_file(path.as_path());\n\n TestSpaceFile {\n\n path_to_file: path,\n\n history,\n\n allow_cleanup: test_space.is_cleaning(),\n\n }\n", "file_path": "testspace/src/testspacefile.rs", "rank": 26, "score": 21.110518427635565 }, { "content": " fn from(path: &path::Path) -> Self {\n\n TestSpaceFile::from_path(path)\n\n }\n\n}\n\n\n\nimpl From<path::PathBuf> for TestSpaceFile {\n\n fn from(path: path::PathBuf) -> Self {\n\n TestSpaceFile::from_path(path)\n\n }\n\n}\n\n\n\nimpl Drop for TestSpaceFile {\n\n fn drop(&mut self) {\n\n if self.allow_cleanup {\n\n self.history.cleanup()\n\n }\n\n }\n\n}\n\n\n\nimpl TestSpaceFile {\n", "file_path": "testspace/src/testspacefile.rs", "rank": 27, "score": 20.86101826690366 }, { "content": " /// Creates a randomly named file in the given directory, returns the path\n\n fn create_rand_file(base_path: &path::Path, suffix: Option<&str>) -> path::PathBuf {\n\n let mut random_name: String = TestSpace::get_random_string(9);\n\n if let Some(suffix) = suffix {\n\n random_name.push_str(suffix);\n\n }\n\n let file_path = base_path.join(random_name);\n\n file_path\n\n }\n\n}\n\n\n\nimpl TestSpaceFile {\n\n /// Should the TestSpace clean up after itself\n\n pub fn allow_cleanup(mut self, allow_cleanup: bool) -> Self {\n\n self.allow_cleanup = allow_cleanup;\n\n self\n\n }\n\n\n\n pub fn open_file(&self) -> fs::File {\n\n self.get_file_object()\n", "file_path": "testspace/src/testspacefile.rs", "rank": 28, "score": 19.903878988353025 }, { "content": "\n\n pub fn is_cleaning(&self) -> bool {\n\n self.allow_cleanup\n\n }\n\n\n\n /// Should the TestSpace clean up after itself\n\n pub fn allow_cleanup(mut self, allow_cleanup: bool) -> Self {\n\n self.allow_cleanup = allow_cleanup;\n\n self\n\n }\n\n\n\n pub fn create_random_files(\n\n &mut self,\n\n amount_to_create: u8,\n\n file_size: usize,\n\n ) -> Vec<path::PathBuf> {\n\n let mut path_list = Vec::new();\n\n for _ in 0..amount_to_create {\n\n let mut tsf = TestSpaceFile::with_suffix(self, \".file\").allow_cleanup(false);\n\n tsf.write_random_bytes(file_size);\n", "file_path": "testspace/src/testspace.rs", "rank": 29, "score": 19.53264475683282 }, { "content": " path_to_file: path.to_path_buf(),\n\n history,\n\n allow_cleanup: false,\n\n }\n\n }\n\n\n\n pub fn with_name<F: AsRef<path::Path>>(file_name: F, test_space: &TestSpace) -> TestSpaceFile {\n\n let path = file_name.as_ref();\n\n let mut history = history::FileHistory::default();\n\n let new_path = test_space.get_path().join(path);\n\n history.record_file(new_path.as_path());\n\n TestSpaceFile {\n\n path_to_file: new_path,\n\n history,\n\n allow_cleanup: test_space.is_cleaning(),\n\n }\n\n }\n\n}\n\n\n\nimpl From<&path::Path> for TestSpaceFile {\n", "file_path": "testspace/src/testspacefile.rs", "rank": 30, "score": 18.409510789176153 }, { "content": "\n\n// Essentially uses partial Eq for equivalence\n\nimpl Eq for Hash {}\n\n\n\nimpl PartialEq for Hash {\n\n fn eq(&self, other: &Hash) -> bool {\n\n for index in 0..64 {\n\n if self.bytes[index] != other.bytes[index] {\n\n return false;\n\n }\n\n }\n\n true\n\n }\n\n}\n\n\n\nimpl From<&[u8]> for Hash {\n\n fn from(byte_slice: &[u8]) -> Self {\n\n // TODO: If this is bottleneck then use unitialized memory in unsafe\n\n let mut bytes: [u8;64] = [0;64];\n\n bytes.copy_from_slice(byte_slice);\n", "file_path": "egg/src/hash.rs", "rank": 31, "score": 17.707911030420696 }, { "content": "use crate::{Alphabet, TestSpace, TestSpaceTextFile};\n\nuse std::fs;\n\nuse std::io::{BufRead, BufReader, BufWriter, Read, Write};\n\nuse std::path;\n\n\n\nimpl TestSpaceTextFile {\n\n pub fn new(path_to_file: path::PathBuf, alphabet: Alphabet) -> TestSpaceTextFile {\n\n TestSpaceTextFile {\n\n alphabet,\n\n lines_of_text: Vec::new(),\n\n path_to_file,\n\n auto_flush: true,\n\n }\n\n }\n\n\n\n pub fn get_path(&self) -> &path::Path {\n\n self.path_to_file.as_path()\n\n }\n\n\n\n pub fn open_file(path_to_file: path::PathBuf) -> TestSpaceTextFile {\n", "file_path": "testspace/src/testspacetextfile.rs", "rank": 32, "score": 17.587049569195212 }, { "content": " }\n\n\n\n /// Create a new randomly named test space file but with a specific suffix attached to the file name\n\n pub fn with_suffix(test_space: &TestSpace, suffix: &str) -> TestSpaceFile {\n\n let path = TestSpaceFile::create_rand_file(test_space.get_path(), Some(suffix));\n\n let mut history = history::FileHistory::default();\n\n history.record_file(path.as_path());\n\n TestSpaceFile {\n\n path_to_file: path,\n\n history,\n\n allow_cleanup: test_space.is_cleaning(),\n\n }\n\n }\n\n\n\n /// Convert any type that can be converted to a &Path to a TestSpaceFile\n\n fn from_path<P: AsRef<path::Path>>(path: P) -> TestSpaceFile {\n\n let path = path.as_ref();\n\n let mut history = history::FileHistory::default();\n\n history.record_file(path);\n\n TestSpaceFile {\n", "file_path": "testspace/src/testspacefile.rs", "rank": 33, "score": 17.47192828010978 }, { "content": "use crate::{Alphabet, LineModification, TextModifier};\n\nuse std::fmt::Debug;\n\n\n\nimpl LineModification {\n\n // Changed(usize, String, String),\n\n //Insert(usize, String),\n\n pub fn get_line_number(&self) -> usize {\n\n match self {\n\n Self::Insert(line_number, _) => *line_number,\n\n Self::Changed(line_number, _, _) => *line_number,\n\n Self::Remove(line_number) => *line_number,\n\n }\n\n }\n\n}\n\n\n\nimpl Debug for LineModification {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n match self {\n\n Self::Insert(line_number, line_inserted) => f.write_fmt(format_args!(\n\n \"Inserted a line at {}, the line was {}\",\n", "file_path": "testspace/src/modification.rs", "rank": 34, "score": 17.374672709182448 }, { "content": " use testspace::{TestSpace};\n\n\n\n // Only need this for testing\n\n impl PartialEq for Repository {\n\n fn eq(&self, other: &Repository) -> bool {\n\n if self.version != other.version {\n\n return false;\n\n }\n\n if self.working_path != other.working_path {\n\n return false;\n\n }\n\n return true;\n\n }\n\n }\n\n\n\n // Previous versions of repository functions\n\n impl Repository {\n\n //TODO: Versioned functions for initializing a repository for testing reading older versions of the repository\n\n \n\n }\n", "file_path": "egg/src/egg.rs", "rank": 35, "score": 17.27155592350903 }, { "content": "use std::path;\n\nuse crate::hash::Hash;\n\n\n\nmod id;\n\nmod snapshot;\n\nmod builder;\n\nmod file;\n\nmod location;\n\n\n\n#[derive(Debug, Clone, Eq, Hash)]\n\npub enum SnapshotId {\n\n Located(Hash, SnapshotLocation), // Snapshot is not loaded but we know its location\n\n NotLocated(Hash), // Snapshot supposedly exists and is referenced in the repository but we don't know its location\n\n Indexed(usize, Hash), // Snapshot is loaded and indexed in the vector of snapshots\n\n}\n\n\n\n#[derive(Debug, Clone, Eq, Hash, PartialEq)]\n\npub enum SnapshotLocation {\n\n Simple, // The snapshot is stored in a single file located in the snapshot directory on the local repository\n\n // Packed(index, path) - \n", "file_path": "egg/src/snapshots/types/mod.rs", "rank": 36, "score": 16.811710356171883 }, { "content": "use super::SnapshotBuilder;\n\nuse super::{FileMetadata, Snapshot};\n\nuse crate::hash::Hash;\n\nuse blake2::{self, Digest};\n\nuse std::path;\n\nuse std::string::String;\n\n\n\n// TODO: SnapshotBuilder should return self\n\nimpl SnapshotBuilder {\n\n pub fn new() -> SnapshotBuilder {\n\n SnapshotBuilder {\n\n message: None,\n\n id: None,\n\n files: Vec::new(),\n\n children: Vec::new(),\n\n parent: None,\n\n }\n\n }\n\n\n\n pub fn set_message(mut self, message: String) -> Self {\n", "file_path": "egg/src/snapshots/types/builder.rs", "rank": 37, "score": 16.71331404616872 }, { "content": " return Err(Error::parsing_error(Some(UnderlyingError::from(error)))\n\n .add_debug_message(format!(\"File copy failed when restoring a file that had been stored in local storage\")));\n\n }\n\n Ok(())\n\n }\n\n\n\n pub fn is_file_stored(&self, file_hash: &hash::Hash) -> bool {\n\n let stored_file_path = self.path_to_file_storage.join(String::from(file_hash));\n\n stored_file_path.exists()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::hash::{Hash};\n\n use testspace;\n\n use testspace::TestSpace;\n\n use crate::storage::LocalStorage;\n\n use std::path;\n\n\n", "file_path": "egg/src/storage/local.rs", "rank": 38, "score": 16.408982013075526 }, { "content": " }\n\n\n\n pub fn is_indexed(&self, hash: &Hash) -> bool {\n\n // Has the index seen the hash\n\n if let Some(id) = self.index.get(hash) {\n\n match id {\n\n // We only care about indexed\n\n SnapshotId::Indexed(_, _) => return true,\n\n _ => return false,\n\n }\n\n }\n\n false\n\n }\n\n\n\n // Retrieve the SnapshotId for a given hash\n\n pub fn get_id(&self, hash: &Hash, path_to_repository: &path::Path) -> Result<SnapshotId, Error> {\n\n // If the hash has an entry in the index then we can return\n\n if let Some(id) = self.index.get(hash) {\n\n return Ok(id.clone());\n\n } else {\n", "file_path": "egg/src/snapshots/index.rs", "rank": 39, "score": 16.24586918176643 }, { "content": " self.history.record_file(tsf.get_path());\n\n test_files.push(tsf);\n\n }\n\n test_files\n\n }\n\n\n\n /// Creates a test space inside the current test space\n\n pub fn create_child(&self) -> TestSpace {\n\n let new_space = TestSpace::create_rand_dir(self.working_directory.as_path());\n\n let mut history = history::FileHistory::default();\n\n history.record_directory(new_space.as_path());\n\n TestSpace {\n\n working_directory: new_space,\n\n history,\n\n allow_cleanup: self.allow_cleanup,\n\n }\n\n }\n\n\n\n /// Creates a directory in this test spaces directory\n\n pub fn create_test_path(&mut self) -> path::PathBuf {\n", "file_path": "testspace/src/testspace.rs", "rank": 40, "score": 16.235915673940653 }, { "content": "use super::SnapshotsState;\n\n\n\nmod builder;\n\nmod change;\n\nmod init;\n\n\n\n// Helper struct to help with managing changes to state\n\npub struct StateBuilder<'a> {\n\n state: &'a mut SnapshotsState, // The state to modify\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::hash::Hash;\n\n use crate::snapshots::SnapshotsState;\n\n use crate::AtomicUpdate;\n\n use testspace::TestSpace;\n\n\n\n impl PartialEq for SnapshotsState {\n\n fn eq(&self, other: &Self) -> bool {\n", "file_path": "egg/src/snapshots/state/mod.rs", "rank": 41, "score": 16.065544149237976 }, { "content": "use std::fmt;\n\nuse std::io;\n\nuse std::string;\n\n\n\n#[derive(Debug)]\n\npub enum UnderlyingError {\n\n Io(io::Error),\n\n InvalidString(string::FromUtf8Error),\n\n FailedConversion(std::num::TryFromIntError),\n\n // TODO: Path fail may need to be more generic than this\n\n PathFail(std::path::StripPrefixError),\n\n}\n\n\n\nimpl UnderlyingError {\n\n pub fn get_error(&self) -> &(dyn std::error::Error + 'static) {\n\n match self {\n\n UnderlyingError::InvalidString(error) => error,\n\n UnderlyingError::Io(error) => error,\n\n UnderlyingError::FailedConversion(error) => error,\n\n UnderlyingError::PathFail(error) => error,\n", "file_path": "egg/src/error/underlying.rs", "rank": 42, "score": 15.825335012981183 }, { "content": " let file = fs::OpenOptions::new()\n\n .create(false)\n\n .read(true)\n\n .open(path_to_file.as_path())\n\n .unwrap();\n\n let reader = BufReader::new(file);\n\n let mut lines = Vec::new();\n\n for untested_line in reader.lines() {\n\n let line = untested_line.unwrap();\n\n lines.push(line);\n\n }\n\n TestSpaceTextFile {\n\n alphabet: Alphabet::Latin,\n\n lines_of_text: lines,\n\n path_to_file,\n\n auto_flush: true,\n\n }\n\n }\n\n\n\n pub fn auto_flush(&mut self, auto_flush: bool) -> &Self {\n", "file_path": "testspace/src/testspacetextfile.rs", "rank": 43, "score": 15.73806623551399 }, { "content": " pub fn hash(&self) -> &Hash {\n\n &self.hash\n\n }\n\n\n\n pub fn filesize(&self) -> u64 {\n\n self.file_size\n\n }\n\n\n\n pub fn modified_time(&self) -> u128 {\n\n self.modified_time\n\n }\n\n\n\n pub fn path(&self) -> &path::Path {\n\n self.path.as_path()\n\n }\n\n}\n\n\n\nimpl From<&FileMetadata> for String {\n\n fn from(metadata: &FileMetadata) -> Self {\n\n format!(\n\n \"{} - hash {}/{} bytes\",\n\n metadata.path().display(),\n\n metadata.hash,\n\n metadata.file_size\n\n )\n\n }\n\n}\n", "file_path": "egg/src/snapshots/types/file.rs", "rank": 44, "score": 15.55960191097951 }, { "content": "use super::{RepositorySnapshots, Snapshot, SnapshotId};\n\nuse crate::{atomic::AtomicUpdate, error::Error};\n\n\n\nuse std::path;\n\n\n\nimpl RepositorySnapshots {\n\n // Updates the snapshot state when taking a snapshot\n\n pub fn update_state(&mut self, atomic: &mut AtomicUpdate, snapshot: &Snapshot, is_new_parent: bool) -> Result<(), Error> {\n\n self.state.change_state(atomic, |state_data| {\n\n if let Some(parent) = snapshot.get_parent() {\n\n // Remove the parent as a end node if it is a new parent, as long as we don't insert snapshots between snapshots this will work\n\n if is_new_parent {\n\n state_data.remove_end_node(parent);\n\n }\n\n \n\n } else {\n\n // If this snapshot has no parent add it as a root snapshot\n\n state_data.add_root_node(snapshot.get_hash().clone());\n\n state_data.add_end_node(snapshot.get_hash().clone());\n\n }\n", "file_path": "egg/src/snapshots/data.rs", "rank": 45, "score": 15.374410421512877 }, { "content": "use std::path;\n\n\n\nuse crate::snapshots::types::{SnapshotId, Snapshot, SnapshotLocation};\n\nuse crate::hash::Hash;\n\nuse crate::error::Error;\n\n\n\nuse super::RepositorySnapshots;\n\n\n\nimpl RepositorySnapshots {\n\n pub const fn get_path() -> &'static str {\n\n \"snapshots\"\n\n }\n\n\n\n // Note: The index only works with Hash's ID's are only used outside of egg\n\n pub fn init_index(&self) {\n\n\n\n }\n\n\n\n pub fn restore_index(&self) {\n\n // TODO: Load index state here, this should be included in the base snapshots restore\n", "file_path": "egg/src/snapshots/index.rs", "rank": 46, "score": 15.289510964076007 }, { "content": "use super::FileMetadata;\n\nuse crate::hash::Hash;\n\n\n\nuse std::path;\n\n\n\nimpl FileMetadata {\n\n pub fn new(\n\n hash: Hash,\n\n file_size: u64,\n\n path: path::PathBuf,\n\n modified_time: u128,\n\n ) -> FileMetadata {\n\n FileMetadata {\n\n hash,\n\n path,\n\n file_size,\n\n modified_time,\n\n }\n\n }\n\n\n", "file_path": "egg/src/snapshots/types/file.rs", "rank": 47, "score": 15.22651247312993 }, { "content": "use std::fmt::Display;\n\n\n\nuse crate::hash::Hash;\n\nuse super::FileMetadata;\n\nuse super::Snapshot;\n\n\n\n// TODO: A snapshot must record its history but how does this affect the id\n\n\n\nimpl Display for Snapshot {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n write!(f, \"Snapshot {}\", self.id)\n\n }\n\n}\n\n\n\nimpl Snapshot {\n\n /// Creates a new snapshot\n\n pub(crate) fn new(id: Hash, message: String, mut files: Vec<FileMetadata>, children: Vec<Hash>, parent: Option<Hash>) -> Snapshot {\n\n // Sort the hashes in order so that equivilant snapshots are not order dependant\n\n files.sort_unstable_by(|first_file, second_file| {\n\n first_file.path().cmp(&second_file.path())\n", "file_path": "egg/src/snapshots/types/snapshot.rs", "rank": 48, "score": 15.093040545966629 }, { "content": "use super::{RepositorySnapshots, SnapshotsState};\n\nuse crate::{atomic::AtomicUpdate, error::{Error, UnderlyingError}};\n\n\n\nuse std::path;\n\nuse std::collections::HashMap;\n\n\n\nimpl RepositorySnapshots {\n\n pub const SNAPSHOTS_PATH: &'static str = \"snapshots\";\n\n /// Initializes the snapshots system based on the current repository\n\n pub(crate) fn new(path_to_repository: &path::Path, path_to_working: &path::Path) -> Result<RepositorySnapshots, Error> {\n\n // TODO: This needs to change so that we always initialize which either loads in state or creates new state\n\n // initialize state - this requires a path to an initial snapshot data file\n\n // TODO: Check that a snapshot state\n\n let snapshot_directory = path_to_repository.join(Self::SNAPSHOTS_PATH);\n\n if snapshot_directory.exists() == false {\n\n std::fs::create_dir(snapshot_directory)\n\n .map_err(|err| Error::file_error(Some(UnderlyingError::from(err)))\n\n .add_generic_message(\"Failed to create snapshots directory\"))?;\n\n }\n\n let state = SnapshotsState::new(path_to_repository)?;\n", "file_path": "egg/src/snapshots/init.rs", "rank": 49, "score": 15.003437334069561 }, { "content": " if last_character == '.' || last_character.is_whitespace() {\n\n return false;\n\n }\n\n // no files that start with whitespace\n\n if first_character.is_whitespace() {\n\n return false;\n\n }\n\n return true;\n\n }\n\n}\n\n\n\nimpl Drop for TestSpace {\n\n fn drop(&mut self) {\n\n if self.allow_cleanup {\n\n self.history.cleanup();\n\n }\n\n }\n\n}\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "testspace/src/testspace.rs", "rank": 50, "score": 14.868437104696957 }, { "content": " path_list.push(tsf.get_path().to_path_buf());\n\n // We let the testspace do all of the cleanup in this case\n\n self.history.record_file(tsf.get_path());\n\n }\n\n path_list\n\n }\n\n\n\n pub fn create_random_text_file(&mut self, lines_to_write: usize) -> path::PathBuf {\n\n let mut tsf = TestSpaceFile::with_suffix(self, \".text_file\").allow_cleanup(false);\n\n tsf.write_random_text(Alphabet::Latin, lines_to_write);\n\n // TODO: write_random_code(language, indent, end_line)\n\n self.history.record_file(tsf.get_path());\n\n tsf.get_path().to_path_buf()\n\n }\n\n\n\n pub fn create_modified_text_file(\n\n &mut self,\n\n original_file: &path::Path,\n\n alphabet: &Alphabet,\n\n ) -> (path::PathBuf, Vec<LineModification>) {\n", "file_path": "testspace/src/testspace.rs", "rank": 51, "score": 14.57959366453217 }, { "content": "use super::{SnapshotsState, StateBuilder};\n\nuse crate::atomic::AtomicUpdate;\n\nuse crate::error::{Error, UnderlyingError};\n\nuse crate::hash::Hash;\n\n\n\nuse std::path;\n\nuse std::{fs, io};\n\n\n\nimpl SnapshotsState {\n\n // Allows a caller to change the current state of snapshots, ie changing working snapshot etc\n\n pub fn change_state<F>(\n\n &mut self,\n\n atomic_updater: &mut AtomicUpdate,\n\n callback: F,\n\n ) -> Result<(), Error>\n\n where\n\n F: FnOnce(&mut StateBuilder) -> Result<(), Error>,\n\n {\n\n let mut builder = StateBuilder::new(self);\n\n // Get the requried changes from the caller\n", "file_path": "egg/src/snapshots/state/change.rs", "rank": 52, "score": 14.3082446911834 }, { "content": " // Path to the directory where files are stored\n\n pub(crate) const DIRECTORY: &'static str = \"storage\";\n\n\n\n pub fn initialize(repository_path: &path::Path) -> Result<LocalStorage, Error> {\n\n // TODO: Need additional sensible defaults\n\n let path_to_storage = repository_path.join(LocalStorage::DIRECTORY);\n\n if path_to_storage.exists() == false {\n\n if let Err(error) = fs::create_dir(path_to_storage.as_path()) {\n\n return Err(Error::file_error(Some(UnderlyingError::from(error)))\n\n .add_debug_message(format!(\"Failed to create the storage directory, path was {}\", path_to_storage.display()))\n\n .add_user_message(format!(\"Failed to initialize part of the repository, the directory {} could not be created\", path_to_storage.display())));\n\n }\n\n }\n\n let path_to_file = path_to_storage.join(Self::FILE_NAME);\n\n let file = match fs::OpenOptions::new().create_new(true).write(true).open(path_to_file.as_path()) {\n\n Ok(file) => file,\n\n Err(error) => return Err(Error::file_error(Some(UnderlyingError::from(error)))\n\n .add_debug_message(format!(\"Failed to create a data storage state file, path of the file was {}\", path_to_file.display()))\n\n .add_user_message(format!(\"Failed to initialize part of the repository, the file '{}' could not be created\", path_to_file.display()))),\n\n };\n", "file_path": "egg/src/storage/local.rs", "rank": 53, "score": 14.307916100259938 }, { "content": "use crate::snapshots;\n\nuse byteorder::{self, LittleEndian, ReadBytesExt, WriteBytesExt};\n\nuse std::fs;\n\nuse std::io::{BufReader, BufWriter};\n\nuse std::path;\n\nuse crate::storage::LocalStorage;\n\nuse crate::snapshots::RepositorySnapshots;\n\nuse crate::snapshots::{SnapshotId, Snapshot};\n\nuse crate::error::{ Error, UnderlyingError };\n\nuse crate::atomic::AtomicUpdate;\n\n\n\n\n\n#[derive(Debug)]\n\npub struct Repository {\n\n version: u16,\n\n /// Working Path is the path to the directory that contains the repository\n\n working_path: path::PathBuf,\n\n path_to_repository: path::PathBuf,\n\n version_file: path::PathBuf,\n\n snapshot_storage: RepositorySnapshots,\n", "file_path": "egg/src/egg.rs", "rank": 54, "score": 14.159113330103757 }, { "content": "use std::path;\n\n\n\n// Contains types that persist user data data to disk, ie the files being placed in the repository\n\npub(crate) mod local; // Reads and writes user data to the repository\n\npub(crate) mod stream; // Provides extensions to Read Write traits to allow easier reading and writing a basic egg data types\n\n\n\n// Re-export most of these modules from here\n\n#[derive(Debug)]\n\npub struct LocalStorage {\n\n path_to_file_storage: path::PathBuf,\n\n}", "file_path": "egg/src/storage/mod.rs", "rank": 55, "score": 14.11150574672271 }, { "content": "use super::FileOperation;\n\nuse crate::error::{Error, UnderlyingError};\n\nuse std::path;\n\n\n\nimpl FileOperation {\n\n pub fn get_relative_path<'a>(\n\n &self,\n\n relative_to_path: &'a path::Path,\n\n ) -> Result<&'a path::Path, Error> {\n\n let file_path = match self {\n\n FileOperation::Create(path) => path,\n\n FileOperation::Replace(path) => path,\n\n FileOperation::Store(path) => path,\n\n };\n\n let relative_path = relative_to_path\n\n .strip_prefix(file_path)\n\n .map_err(|err| Error::invalid_parameter(Some(UnderlyingError::from(err))))?;\n\n Ok(relative_path)\n\n }\n\n\n", "file_path": "egg/src/atomic/operation.rs", "rank": 56, "score": 14.025676021430272 }, { "content": " // }\n\n // Ok(())\n\n // }\n\n\n\n // TODO: This must take a random path and just write to the file to be compatible with the atomic system\n\n\n\n pub fn write_simple_snapshot(snapshot_to_write: &Snapshot, path_to_file: &path::Path, path_to_working: &path::Path) -> Result<(), Error> {\n\n \n\n let file_to_update = match fs::OpenOptions::new().create(true).write(true).open(path_to_file) {\n\n Ok(file) => file,\n\n Err(error) => return Err(Error::file_error(Some(UnderlyingError::from(error)))\n\n .add_debug_message(format!(\"Failed to create a new file when writing a snapshot, path was {}\", path_to_file.display()))\n\n .add_user_message(format!(\"Could not create a new file, the path was {}\", path_to_file.display()))),\n\n };\n\n let mut snapshot_writer = io::BufWriter::new(file_to_update);\n\n //Write the version of the snapshot\n\n if let Err(error) = snapshot_writer.write_u16::<byteorder::LittleEndian>(Self::SNAPSHOT_VERSION) {\n\n return Err(Error::write_error(Some(UnderlyingError::from(error)))\n\n .add_debug_message(\"Failed to write the version number of the snapshot\"));\n\n }\n", "file_path": "egg/src/snapshots/file.rs", "rank": 57, "score": 13.965158101521725 }, { "content": " // NotLocal - Remote snapshot\n\n}\n\n\n\npub struct SnapshotBuilder {\n\n message: Option<String>,\n\n id: Option<Hash>,\n\n files: Vec<FileMetadata>,\n\n children: Vec<Hash>,\n\n parent: Option<Hash>,\n\n}\n\n\n\n// We only need Clone and PartialEq when testing\n\n#[cfg_attr(test, derive(Clone, PartialEq))]\n\n#[derive(Debug)]\n\n/// Represents all the information that is stored about a file being placed in a snapshot\n\npub struct FileMetadata {\n\n path: path::PathBuf,\n\n file_size: u64,\n\n modified_time: u128,\n\n hash: Hash,\n", "file_path": "egg/src/snapshots/types/mod.rs", "rank": 58, "score": 13.905232834634125 }, { "content": " let new_path = TestSpace::create_rand_dir(self.working_directory.as_path());\n\n self.history.record_directory(new_path.as_path());\n\n new_path\n\n }\n\n\n\n /// Creates a directory in the test space with the specified name\n\n pub fn create_dir<P: AsRef<str>>(&mut self, folder_name: P) -> path::PathBuf {\n\n let folder_name = folder_name.as_ref();\n\n let folder_path = self.get_path().join(folder_name);\n\n fs::create_dir(folder_path.as_path()).unwrap_or_else(|err| {\n\n panic!(\"Failed to create a named folder, path to TestSpace was {}, folder being created was {}, the error was {}\", self.working_directory.display(), folder_path.display(), err);\n\n });\n\n self.history.record_directory(folder_path.as_path());\n\n folder_path\n\n }\n\n\n\n /// Creates a randomly named file and returns a TestSpaceFile to act on that file\n\n pub fn create_tsf(&self) -> crate::TestSpaceFile {\n\n crate::TestSpaceFile::new(self)\n\n }\n", "file_path": "testspace/src/testspace.rs", "rank": 59, "score": 13.846703940467593 }, { "content": " Err(error) => {\n\n return Err(Error::file_error(Some(UnderlyingError::from(error)))\n\n .add_user_message(format!(\"Failed to read data from a file, path was {}\", path_to_file.display()))\n\n .add_debug_message(format!(\"Failed to read the version number from the version file, path was {}\", \n\n path_to_file.display()))\n\n );\n\n }\n\n };\n\n Ok(version)\n\n }\n\n\n\n pub fn get_working_path(&self) -> &path::Path {\n\n self.working_path.as_path()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\n#[macro_use]\n\nmod tests {\n\n use super::Repository;\n", "file_path": "egg/src/egg.rs", "rank": 60, "score": 13.836504306733294 }, { "content": " let path_to_state = path_to_repository\n\n .join(Self::SNAPSHOTS_PATH)\n\n .join(Self::STATE_FILE_NAME);\n\n // Create the snapshot storage state file\n\n let snapshot_file = fs::OpenOptions::new()\n\n .create_new(true)\n\n .write(true)\n\n .open(path_to_state.as_path())\n\n .map_err(|err| {\n\n Error::file_error(Some(UnderlyingError::from(err)))\n\n .add_user_message(\n\n \"Failed to create a configuration file while creating a new repository\",\n\n )\n\n .add_debug_message(format!(\n\n \"Failed to create a snapshot state file, the path was {}\",\n\n path_to_state.display()\n\n ))\n\n })?;\n\n\n\n let file_writer = io::BufWriter::new(snapshot_file);\n", "file_path": "egg/src/snapshots/state/init.rs", "rank": 61, "score": 13.827097621265922 }, { "content": "use super::{SnapshotsState, StateBuilder};\n\nuse crate::error::{Error, UnderlyingError};\n\nuse crate::storage::stream::{ReadEggExt, WriteEggExt};\n\n\n\nuse byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};\n\n\n\nuse std::collections::VecDeque;\n\nuse std::convert::TryFrom;\n\nuse std::fs;\n\nuse std::io;\n\nuse std::path;\n\n\n\n// Public interfaces\n\nimpl SnapshotsState {\n\n const VERSION: u16 = 1;\n\n pub const STATE_FILE_NAME: &'static str = \"state\";\n\n pub const SNAPSHOTS_PATH: &'static str = \"snapshots\";\n\n //SnapshotsState::STATE_PATH).join(SnapshotsState::STATE_FILE_NAME))\n\n\n\n /// Creates a new snapshot storage state file if needed and returns the current state of snapshots in the repository\n", "file_path": "egg/src/snapshots/state/init.rs", "rank": 62, "score": 13.635402459424068 }, { "content": "use std::fmt;\n\n\n\npub enum MessageType {\n\n Debug,\n\n User,\n\n Both,\n\n}\n\n\n\npub struct ErrorContext {\n\n messages: Vec<(String, MessageType)>,\n\n}\n\n\n\nimpl ErrorContext {\n\n pub(crate) fn add_message(&mut self, message: String, message_type: MessageType) {\n\n self.messages.push((message, message_type));\n\n }\n\n\n\n pub(crate) fn new() -> ErrorContext {\n\n ErrorContext {\n\n messages: Vec::with_capacity(3),\n", "file_path": "egg/src/error/context.rs", "rank": 63, "score": 13.565562684812567 }, { "content": "use super::AtomicLocation;\n\nuse std::path;\n\n\n\nimpl AtomicLocation {\n\n pub fn get_path(&self) -> &path::Path {\n\n path::Path::new(self.get_str())\n\n }\n\n // FIXME: This needs to be returned in two parts or as a path\n\n pub fn get_str(&self) -> &str {\n\n match self {\n\n AtomicLocation::Base => \"atomic\",\n\n AtomicLocation::CreateComplete => \"cc\",\n\n AtomicLocation::CreateWorking => \"cw\",\n\n AtomicLocation::ReplaceWorking => \"rw\",\n\n AtomicLocation::ReplaceComplete => \"rc\",\n\n AtomicLocation::ReplacePrevious => \"rp\",\n\n AtomicLocation::ReplaceRemove => \"rr\",\n\n AtomicLocation::StoreWorking => \"sw\",\n\n AtomicLocation::StoreComplete => \"sc\",\n\n }\n\n }\n\n}\n", "file_path": "egg/src/atomic/location.rs", "rank": 64, "score": 13.493978796482022 }, { "content": " }\n\n }\n\n\n\n pub fn create_copy(&self) -> TestSpaceTextFile {\n\n let lines_of_text = self.lines_of_text.clone();\n\n let alphabet = self.alphabet;\n\n let mut path_to_file = self.path_to_file.clone();\n\n path_to_file.pop();\n\n let file_name = TestSpace::get_random_name(15, Alphabet::Latin, Some(\".txt\"));\n\n path_to_file.push(file_name);\n\n let mut copy = TestSpaceTextFile {\n\n alphabet,\n\n lines_of_text,\n\n path_to_file,\n\n auto_flush: true,\n\n };\n\n copy.write_to_disk();\n\n copy\n\n }\n\n}\n", "file_path": "testspace/src/testspacetextfile.rs", "rank": 65, "score": 13.474328751580458 }, { "content": "\n\n#[derive(Copy, Clone)]\n\npub enum Alphabet {\n\n Arabic,\n\n Chinese,\n\n Cyrillic,\n\n Latin,\n\n}\n\npub enum LineModification {\n\n Insert(usize, String),\n\n Remove(usize),\n\n Changed(usize, String, String),\n\n}\n\n\n\npub struct TestSpaceTextFile {\n\n path_to_file: path::PathBuf,\n\n alphabet: Alphabet,\n\n lines_of_text: Vec<String>,\n\n auto_flush: bool, // After each edit do we automatically write the changes to disk\n\n}\n\n\n\npub struct TextModifier {\n\n original: Vec<String>,\n\n modified: Vec<String>,\n\n changes: Vec<LineModification>,\n\n lines_modified: Vec<usize>,\n\n}\n", "file_path": "testspace/src/lib.rs", "rank": 66, "score": 13.451086952978395 }, { "content": "use super::{AtomicLocation, AtomicUpdate, FileOperation};\n\nuse crate::error::{Error, UnderlyingError};\n\nuse std::fs;\n\nuse std::path;\n\n\n\nimpl<'a> AtomicUpdate<'a> {\n\n /// Initialize atomic file storage\n\n pub fn new(\n\n path_to_repository: &'a path::Path,\n\n path_to_working: &'a path::Path,\n\n ) -> Result<AtomicUpdate<'a>, Error> {\n\n // TODO: create_if_needed should return the path it created?\n\n fn create_if_needed(\n\n path_to_repository: &path::Path,\n\n atomic_path: AtomicLocation,\n\n ) -> Result<(), Error> {\n\n let test_directory = path_to_repository.join(atomic_path.get_path());\n\n if test_directory.exists() == false {\n\n if let Err(error) = fs::create_dir(test_directory.as_path()) {\n\n return Err(Error::file_error(Some(UnderlyingError::from(error)))\n", "file_path": "egg/src/atomic/update.rs", "rank": 67, "score": 13.390247325498969 }, { "content": "use std::path;\n\nuse std::fs;\n\nuse std::io::{self, Seek};\n\nuse std::convert::TryFrom;\n\n\n\nuse crate::{error::Error, error::UnderlyingError, hash};\n\nuse super::{types::Snapshot, RepositorySnapshots, FileMetadata, SnapshotId, SnapshotLocation};\n\nuse crate::storage::stream::{WriteEggExt, ReadEggExt};\n\n\n\nuse byteorder::{ReadBytesExt, WriteBytesExt};\n\n\n\nimpl RepositorySnapshots {\n\n pub const SNAPSHOT_VERSION: u16 = 1;\n\n // TODO: This needs to incorporate the snapshots location into its logic, ie if the snapshot was stored in a packed file that packed file needs to be rewritten\n\n // pub fn write_snapshot(&self, snapshot_to_write: &Snapshot, location: SnapshotLocation) -> Result<(), Error> {\n\n // match location {\n\n // SnapshotLocation::Simple => {\n\n // let path_to_file = self.path_to_repository.join(\"snapshots\").join(snapshot_to_write.get_hash().to_string());\n\n // self.write_simple_snapshot(snapshot_to_write, path_to_file.as_path())?;\n\n // }\n", "file_path": "egg/src/snapshots/file.rs", "rank": 68, "score": 13.343184832835165 }, { "content": "use crate::Alphabet;\n\n\n\nimpl Alphabet {\n\n pub fn get_range(&self) -> rand::distributions::Uniform<u32> {\n\n match self {\n\n Self::Arabic => rand::distributions::Uniform::new(0x0600, 0x06FF),\n\n Self::Chinese => rand::distributions::Uniform::new(0x4e00, 0x62ff),\n\n Self::Cyrillic => rand::distributions::Uniform::new(0x0000, 0x04ff),\n\n Self::Latin => rand::distributions::Uniform::new(0x0041, 0x007f),\n\n _ => panic!(\"An unknown alphabet was selected\"),\n\n }\n\n // let arabic_letters = rand::distributions::Uniform::new(0x0600, 0x06FF);\n\n // let chinese_symbols = rand::distributions::Uniform::new(0x4e00, 0x62ff);\n\n // let english_alphabet = rand::distributions::Uniform::new(0x0000, 0x007f);\n\n // let cyrillic_alphabet = rand::distributions::Uniform::new(0x0400, 0x04ff);\n\n }\n\n\n\n pub fn get_random_line(&self, rng: &mut impl rand::Rng) -> String {\n\n let characters_to_use = self.get_range();\n\n let number_of_words = rng.gen_range(5, 13);\n", "file_path": "testspace/src/alphabet.rs", "rank": 69, "score": 13.197713622268918 }, { "content": " }\n\n }\n\n }\n\n\n\n fn write_path<P: AsRef<path::Path>>(&mut self, path_to_write: P, relative_to: &path::Path) -> Result<()> {\n\n debug_assert!(relative_to.is_absolute(), \"Write path was given a relative path that was not absolute\");\n\n // Convert path to string\n\n let path_to_write = path_to_write.as_ref();\n\n // TODO: This is not enough to enforce obtaining a relative path\n\n let relative_path_to_write = match path_to_write.strip_prefix(relative_to) {\n\n Ok(relative_path) => relative_path,\n\n Err(error) => { \n\n let error = Error::write_error(Some(UnderlyingError::from(error)))\n\n .add_debug_message(format!(\"In preparation for writing a path, A conversion failed when converting an absolute path \\\"{}\\\" into a path that is relative to \\\"{}\\\"\", path_to_write.display(), relative_to.display()));\n\n return Err(error);\n\n }\n\n };\n\n debug_assert_eq!(relative_path_to_write.is_relative(), true, \"BUG: Path being stored was absolute after stripping its prefix: {}\", relative_path_to_write.display());\n\n //Convert path to a utf8 string - this means some paths on some OS's will not be supported but it guarentees cross platform correctness\n\n let path_string = match relative_path_to_write.to_str() {\n", "file_path": "egg/src/storage/stream.rs", "rank": 70, "score": 13.154902698981717 }, { "content": " self\n\n }\n\n\n\n pub fn add_generic_message<S: Into<String>>(mut self, message: S) -> Self {\n\n let context = self.get_context_mut();\n\n context.add_message(message.into(), MessageType::Both);\n\n self\n\n }\n\n}\n\n\n\nimpl fmt::Debug for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"{:?}\", self.kind).unwrap();\n\n write!(f, \"{:?}\", self.get_context())\n\n }\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"{}\", self.kind).unwrap();\n", "file_path": "egg/src/error/base.rs", "rank": 71, "score": 12.560282389204259 }, { "content": " }\n\n // Create the randomly named directory\n\n fs::create_dir(path_to_dir.as_path()).unwrap_or_else(|err| {\n\n panic!(\n\n \"Creating the random directory {} failed. The system reported the following error {}\",\n\n path_to_dir.display(),\n\n err\n\n );\n\n });\n\n // History must be recorded in the function calling this one,\n\n // FIXME: Probably should record the directory being created in the function that does the actual creating\n\n path_to_dir\n\n }\n\n}\n\n\n\nimpl TestSpace {\n\n /// Get the actual directory that this test space is operating in\n\n pub fn get_path(&self) -> &path::Path {\n\n self.working_directory.as_path()\n\n }\n", "file_path": "testspace/src/testspace.rs", "rank": 72, "score": 12.550849917079187 }, { "content": " if let Err(error) = self.write(path_string.as_bytes()) {\n\n return Err(Error::write_error(Some(UnderlyingError::from(error))).add_debug_message(format!(\"Failed to write the bytes making up a path\")));\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\n// All types that implement Read and ReadBytesExt get ReadEggTypes\n\nimpl<R: io::Read + ReadBytesExt + Seek + ?Sized> ReadEggExt for R {}\n\n// All types that implement Write and WriteBytesExt get WriteEggTypes\n\nimpl<W: io::Write + WriteBytesExt + Seek + ?Sized> WriteEggExt for W {}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use super::{ReadEggExt, WriteEggExt};\n\n use std::io::{self, Seek, SeekFrom, Read};\n\n use testspace::{TestSpace};\n\n use byteorder;\n\n use crate::hash;\n", "file_path": "egg/src/storage/stream.rs", "rank": 73, "score": 12.45296460293759 }, { "content": " self.message = Some(message);\n\n self\n\n }\n\n #[allow(dead_code)]\n\n pub fn add_file(mut self, file_to_snapshot: FileMetadata) -> Self {\n\n self.files.push(file_to_snapshot);\n\n self\n\n }\n\n\n\n pub fn add_files(mut self, mut files_to_add: Vec<FileMetadata>) -> Self {\n\n self.files.append(&mut files_to_add);\n\n self\n\n }\n\n #[allow(dead_code)]\n\n pub fn remove_file(mut self, file_to_remove: &path::Path) -> Self {\n\n if let Some(index) = self\n\n .files\n\n .iter()\n\n .position(|metadata| metadata.path() == file_to_remove)\n\n {\n", "file_path": "egg/src/snapshots/types/builder.rs", "rank": 74, "score": 12.37830984828009 }, { "content": "\n\nimpl SnapshotId {\n\n /// Is this ID currently indexed, meaning is the snapshot that this ID refers to already loaded\n\n pub fn is_indexed(&self) -> bool {\n\n match self {\n\n SnapshotId::Located(_, _) => false,\n\n SnapshotId::Indexed(_, _) => true,\n\n SnapshotId::NotLocated(_) => false,\n\n }\n\n }\n\n\n\n /// Returns a reference to the internal hash\n\n pub(crate) fn get_hash(&self) -> &Hash {\n\n match self {\n\n SnapshotId::Located(hash, _) => hash,\n\n SnapshotId::Indexed(_, hash) => hash,\n\n SnapshotId::NotLocated(hash) => hash,\n\n }\n\n }\n\n\n", "file_path": "egg/src/snapshots/types/id.rs", "rank": 75, "score": 12.225387766675613 }, { "content": "\n\n\n\n\n\n\n\n#[derive(Clone)]\n\npub struct Hash {\n\n bytes: Rc<[u8;64]>,\n\n}\n\n\n\n// Need to implement Hash as the array too large\n\nimpl std::hash::Hash for Hash {\n\n // TODO: We already have a suitable hash so we need a pass through hasher?\n\n fn hash<H: std::hash::Hasher>(&self, state: &mut H) {\n\n // Use the blake2 hash as our hash for the HashMap\n\n state.write(&self.bytes[.. self.bytes.len()]);\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for Hash {\n\n // 6F72222C33B9BF85E6379189116FD60D94B07E226FCEEF434E3376D6DD845759E36111483D990DD84AFCBF67F32B6871D825E65443A7CF61D043FE1D814C02ED\n", "file_path": "egg/src/hash.rs", "rank": 76, "score": 12.222851438738502 }, { "content": " line_number + 1,\n\n line_inserted\n\n )),\n\n Self::Changed(line_number, original_line, new_line) => f.write_fmt(format_args!(\n\n \"Changed the line at {}, the original line was '{}', the new line is '{}'\",\n\n line_number + 1,\n\n original_line,\n\n new_line\n\n )),\n\n Self::Remove(line_number) => {\n\n f.write_fmt(format_args!(\"Line {} was removed\", line_number + 1))\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl TextModifier {\n\n pub fn new(original_lines: Vec<String>) -> TextModifier {\n\n TextModifier {\n\n original: original_lines.clone(),\n", "file_path": "testspace/src/modification.rs", "rank": 77, "score": 11.936331681566243 }, { "content": " // let y = self.b.next();\n\n // if x.is_none() && y.is_none() {\n\n // return None;\n\n // }\n\n // // As long as one of the iterators is returning Some we continue returning results\n\n // Some((x,y))\n\n }\n\n}\n\n\n\nimpl<A: Iterator, B: Iterator> RawZip<A, B> {\n\n pub fn new(a: A, b: B) -> RawZip<A, B> {\n\n RawZip { a, b }\n\n }\n\n}\n\n\n\nimpl WorkingFile {\n\n pub fn is_hashed(&self) -> bool {\n\n self.hash.is_some()\n\n }\n\n pub fn hash(&self) -> Option<&Hash> {\n", "file_path": "egg/src/working.rs", "rank": 78, "score": 11.931900209073174 }, { "content": " hash_string\n\n }\n\n // 6F72222C33B9BF85E6379189116FD60D94B07E226FCEEF434E3376D6DD845759E36111483D990DD84AFCBF67F32B6871D825E65443A7CF61D043FE1D814C02ED\n\n // 6F72222C33B9BF85E6379189116FD6-D94B07E226FCEEF434E3376D6DD845759E36111483D99-DD84AFCBF67F32B6871D825E65443A7CF61D043FE1D814C-2ED\n\n}\n\n\n\nimpl From<Hash> for String {\n\n fn from(hash: Hash) -> Self {\n\n String::from(&hash)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nimpl Hash {\n\n // NOTE: Only available when testing\n\n pub fn generate_random_hash() -> Hash {\n\n use rand::Rng;\n\n let rng = rand::thread_rng();\n\n let random_bytes: Vec<u8> = rng.sample_iter(rand::distributions::Standard).take(64).collect();\n\n Hash::from(random_bytes)\n", "file_path": "egg/src/hash.rs", "rank": 79, "score": 11.78195418940517 }, { "content": " // Regardless of whether the snapshot is a child or not it will always be a end node as we never insert between nodes\n\n state_data.add_end_node(snapshot.get_hash().clone());\n\n // Update latest snapshot\n\n state_data.set_latest_snapshot(Some(snapshot.get_hash().clone()));\n\n Ok(())\n\n })\n\n }\n\n\n\n pub fn get_latest_snapshot(&self, path_to_repository: &path::Path) -> Result<Option<SnapshotId>, Error> {\n\n if let Some(hash) = self.state.get_latest_snapshot() {\n\n let id = self.get_id(&hash, path_to_repository)?;\n\n return Ok(Some(id.clone()));\n\n }\n\n Ok(None)\n\n }\n\n\n\n pub fn get_working_snapshot(&self, path_to_repository: &path::Path) -> Result<Option<SnapshotId>, Error> {\n\n if let Some(hash) = self.state.get_working_snapshot() {\n\n let id = self.get_id(&hash, path_to_repository)?;\n\n return Ok(Some(id.clone()));\n", "file_path": "egg/src/snapshots/data.rs", "rank": 80, "score": 11.749516549114679 }, { "content": "use crate::storage;\n\nuse byteorder::{ LittleEndian, ReadBytesExt, WriteBytesExt };\n\nuse crate::hash;\n\nuse std::io::{ self, Read, Seek, SeekFrom, Write };\n\nuse std::path;\n\nuse std::result;\n\nuse crate::error::{Error, UnderlyingError};\n\npub type Result<T> = result::Result<T, Error>;\n\nuse std::convert::TryFrom;\n\n\n\n//todo: Swap all string lengths to use a u64\n\n/// Reads a path from the reader, it first reads the path length and then reads the path bytes\n", "file_path": "egg/src/storage/stream.rs", "rank": 81, "score": 11.706816488922982 }, { "content": " self.hash.as_ref()\n\n }\n\n\n\n pub fn filesize(&self) -> u64 {\n\n self.file_size\n\n }\n\n\n\n pub fn modified_time(&self) -> u128 {\n\n self.modified_time\n\n }\n\n}\n\n\n\n/// Contains a number of helpful functions for dealing with the Repositories working directory\n\n/// Primarily it provides an interface to check if a file(s) have changed since a snapshot was taken\n\npub struct WorkingDirectory<'a> {\n\n working_files: HashMap<path::PathBuf, WorkingFile>,\n\n path_to_working: &'a path::Path,\n\n}\n\n\n\nimpl<'a> WorkingDirectory<'a> {\n", "file_path": "egg/src/working.rs", "rank": 82, "score": 11.58164188034271 }, { "content": " }\n\n\n\n fn get_file_object(&self) -> fs::File {\n\n println!(\"path to file is {}\", self.path_to_file.display());\n\n fs::OpenOptions::new()\n\n .read(true)\n\n .write(true)\n\n .create(true)\n\n .open(self.path_to_file.as_path())\n\n .unwrap_or_else(|err| {\n\n panic!(\n\n \"Failed to create a TestSpaceFile from a path, error was {}, path was {}\",\n\n err,\n\n self.path_to_file.display()\n\n );\n\n })\n\n }\n\n\n\n // Returns a vector of strings that represent each line that was read from the tsf file\n\n pub fn read_lines(&mut self) -> Vec<String> {\n", "file_path": "testspace/src/testspacefile.rs", "rank": 83, "score": 11.540320729928048 }, { "content": " }\n\n\n\n /// Write an optional path, meaning a path that is contained in an Option, a path is written by\n\n /// first writing its length and then the bytes, a path length of zero denotes that there is no\n\n /// path\n\n fn write_optional_path<P: AsRef<path::Path>>(&mut self, path_to_write: Option<P>, relative_to: &path::Path) -> Result<()> {\n\n match path_to_write {\n\n None => {\n\n if let Err(error) = self.write_u16::<LittleEndian>(0) {\n\n let error = Error::write_error(Some(UnderlyingError::from(error)))\n\n .add_debug_message(format!(\"Failed to write the length of an optional path that was None\"));\n\n return Err(error);\n\n }\n\n Ok(())\n\n },\n\n Some(path_to_write) => {\n\n if let Err(error) = self.write_path(path_to_write, relative_to) {\n\n return Err(error.add_debug_message(format!(\"Error occured while trying to write a path that was present but optional\")));\n\n }\n\n Ok(())\n", "file_path": "egg/src/storage/stream.rs", "rank": 84, "score": 11.410363692075684 }, { "content": " (_, true) => true, // Any files found in previous folder means that stage 2 has started and all writes have finished\n\n (true, false) => false, // Only found files in the working folder so not recoverable\n\n (false, false) => true, // No files found so either the operation was interrupted at a later stage or that operation wasn't performed\n\n };\n\n // Are all operations recoverable\n\n store_recoverable && create_recoverable && replace_recoverable\n\n }\n\n // TODO: This probably doesn't belong in this module\n\n // pub fn was_interrupted(path_to_repository: &path::Path) -> bool {\n\n // // Construct paths\n\n // let cw = path_to_repository.join(AtomicLocation::CreateWorking.get_path());\n\n // let cc = path_to_repository.join(AtomicLocation::CreateComplete.get_path());\n\n // let sw = path_to_repository.join(AtomicLocation::StoreWorking.get_path());\n\n // let sc = path_to_repository.join(AtomicLocation::StoreComplete.get_path());\n\n // let rw = path_to_repository.join(AtomicLocation::ReplaceWorking.get_path());\n\n // let rc = path_to_repository.join(AtomicLocation::ReplaceComplete.get_path());\n\n // let rp = path_to_repository.join(AtomicLocation::ReplacePrevious.get_path());\n\n // let rr = path_to_repository.join(AtomicLocation::ReplaceRemove.get_path());\n\n // // Check for any files in the above paths\n\n // AtomicUpdate::contains_files(cw.as_path()) || AtomicUpdate::contains_files(cc.as_path()) || AtomicUpdate::contains_files(sw.as_path()) || AtomicUpdate::contains_files(sc.as_path()) || \n", "file_path": "egg/src/atomic/recovery.rs", "rank": 85, "score": 11.388117927854015 }, { "content": " fn test_write_random_text() {\n\n use std::fs;\n\n use std::io::{self, BufRead};\n\n let ts = TestSpace::new().allow_cleanup(false);\n\n let mut tsf = ts.create_tsf();\n\n tsf.write_random_text(crate::Alphabet::Latin, 10);\n\n let path_to_file = tsf.get_path();\n\n // Open the tsf file\n\n let mut file = fs::OpenOptions::new()\n\n .read(true)\n\n .open(path_to_file)\n\n .unwrap();\n\n let reader = io::BufReader::new(file);\n\n // Count the number of lines\n\n let line_count = reader.lines().count();\n\n assert_eq!(line_count, 10);\n\n }\n\n\n\n #[test]\n\n fn create_subspace_file() {\n\n let ts = TestSpace::new();\n\n let mut tsf = ts.create_tsf();\n\n // Files are only created when used so we get a file object then drop it after the test\n\n let file = tsf.get_file_object();\n\n assert!(tsf.path_to_file.exists());\n\n drop(file)\n\n }\n\n}\n", "file_path": "testspace/src/testspacefile.rs", "rank": 86, "score": 11.367328935603199 }, { "content": " pub fn get_root_snapshots(&self) -> &[Hash] {\n\n self.root_snapshots.as_slice()\n\n }\n\n\n\n #[allow(dead_code)]\n\n pub fn get_end_snapshots(&self) -> &[Hash] {\n\n self.end_snapshots.as_slice()\n\n }\n\n\n\n // Get the index of the current snapshot\n\n pub fn get_working_snapshot(&self) -> Option<Hash> {\n\n self.working_snapshot.clone()\n\n }\n\n\n\n // Get the path to the most recent snapshot data file\n\n pub fn get_latest_snapshot(&self) -> Option<Hash> {\n\n self.latest_snapshot.clone()\n\n }\n\n}\n", "file_path": "egg/src/snapshots/state/change.rs", "rank": 87, "score": 11.229245379267992 }, { "content": " }\n\n\n\n let path_string = match String::from_utf8(buffer) {\n\n Ok(path_string) => path_string,\n\n Err(error) => {\n\n return Err(Error::parsing_error(Some(UnderlyingError::from(error)))\n\n .add_debug_message(\"Failed to read the length of a string with the Read trait extensions\"));\n\n }\n\n };\n\n let relative_path = path::PathBuf::from(path_string);\n\n let final_path = relative_to.join(relative_path);\n\n debug_assert_eq!(\n\n final_path.is_absolute(),\n\n true,\n\n \"BUG: Path being read is relative: {}\",\n\n final_path.display()\n\n );\n\n Ok(final_path)\n\n }\n\n\n", "file_path": "egg/src/storage/stream.rs", "rank": 88, "score": 11.17952531740865 }, { "content": " // We let the testspace do all of the cleanup in this case\n\n self.history.record_file(tsf.get_path());\n\n tsf\n\n }\n\n\n\n /// Creates a number of random files bThe directory to create the files in must exist\n\n pub fn create_files<S: AsRef<str>>(\n\n &mut self,\n\n path_to_directory: S,\n\n file_list: &[&str],\n\n file_size: usize,\n\n ) -> Vec<TestSpaceFile> {\n\n let mut test_files = Vec::new();\n\n let mut path_list = Vec::new();\n\n // let file_folder = self.create_dir(path_to_directory.as_ref());\n\n let path_to_files = self.working_directory.join(path_to_directory.as_ref());\n\n for file in file_list {\n\n let mut tsf = TestSpaceFile::from(path_to_files.as_path().join(file));\n\n tsf.write_random_bytes(file_size);\n\n path_list.push(tsf.get_path().to_path_buf());\n", "file_path": "testspace/src/testspace.rs", "rank": 89, "score": 11.143675685605405 }, { "content": " // AtomicUpdate::contains_files(rw.as_path()) || AtomicUpdate::contains_files(rc.as_path()) || AtomicUpdate::contains_files(rp.as_path()) || AtomicUpdate::contains_files(rr.as_path())\n\n // }\n\n\n\n fn contains_files(path_to_search: &path::Path) -> bool {\n\n let contents_of_path = match fs::read_dir(path_to_search) {\n\n Ok(contents_of_path) => contents_of_path,\n\n Err(error) => unimplemented!(),\n\n };\n\n if contents_of_path.count() > 0 {\n\n return true;\n\n }\n\n return false;\n\n }\n\n\n\n // pub fn recover(path_to_repository: &path::Path) -> Result<(), Error> {\n\n // // Construct paths\n\n // let cw = path_to_repository.join(AtomicLocation::CreateWorking.get_path());\n\n // let cc = path_to_repository.join(AtomicLocation::CreateComplete.get_path());\n\n // let sw = path_to_repository.join(AtomicLocation::StoreWorking.get_path());\n\n // let sc = path_to_repository.join(AtomicLocation::StoreComplete.get_path());\n", "file_path": "egg/src/atomic/recovery.rs", "rank": 90, "score": 11.058483306530068 }, { "content": "#[derive(Debug)]\n\npub struct RepositorySnapshots {\n\n state: SnapshotsState, // Reads, writes and stores the current state of the snapshot system\n\n snapshots: Vec<Snapshot>,\n\n index: HashMap<Hash, SnapshotId>,\n\n}\n\n\n\n/// Represents the state of the snapshot system, ie what is the latest snapshot, what are the root snapshots\n\n// TODO: Move all the state fileIO stuff into storage, state can be all different states tracked\n\n// TODO: impl Storable for storage state\n\n#[derive(Debug)]\n\npub struct SnapshotsState {\n\n path_to_state_file: path::PathBuf,\n\n working_snapshot: Option<Hash>, // Hash ID of the current snapshot\n\n latest_snapshot: Option<Hash>, // Path to the recent snapshot file\n\n recent_snapshots: VecDeque<Hash>, // A list of recently accessed snapshots\n\n root_snapshots: Vec<Hash>, // The root snapshots in the repository, ie snapshots with no parent\n\n // TODO: Add a usize to track the number of snapshots current stored in current_snapshot_file\n\n // TODO: A separate file that tracks only global snapshot changes\n\n end_snapshots: Vec<Hash>,\n", "file_path": "egg/src/snapshots/mod.rs", "rank": 91, "score": 11.044339133623659 }, { "content": "use super::{RepositorySnapshots, SnapshotBuilder, SnapshotId};\n\nuse crate::{working::WorkingDirectory, atomic::AtomicUpdate, error::Error, storage::LocalStorage};\n\n\n\nuse std::path;\n\n\n\nimpl RepositorySnapshots {\n\n // TODO: Need to check if there is any point in creating this snapshot - ie has something changed - this can be linked to tracking\n\n // TODO: Track files that have been snapshot\n\n // TODO: Change current snapshot to parent snapshot - current snapshot should be independent from snapshot creation\n\n /// Create a snapshot\n\n pub fn take_snapshot<S: Into<String>>(&mut self, parent_snapshot: Option<SnapshotId>, snapshot_message: S, files_to_snapshot: Vec<path::PathBuf>, path_to_repository: &path::Path, path_to_working: &path::Path, file_storage: &LocalStorage) -> Result<SnapshotId, Error> {\n\n // TODO: Add support for inserting a snapshot in between other snapshots?\n\n // TODO: Add support for multiple children\n\n // TODO: Ensure the repository state is valid before reaching this point\n\n if let Some(parent_snapshot) = parent_snapshot {\n\n return self.take_child_snapshot(parent_snapshot, snapshot_message.into(), files_to_snapshot, file_storage, path_to_working, path_to_repository);\n\n } else {\n\n return self.take_root_snapshot(snapshot_message.into(), files_to_snapshot, file_storage, path_to_working, path_to_repository);\n\n }\n\n }\n", "file_path": "egg/src/snapshots/take.rs", "rank": 92, "score": 10.935106040293867 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for ErrorContext {\n\n // Display only prints context messages that are assigned MessageType::User or MessageType::Both\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n for line in self.messages.iter().rev().filter(|line| match line.1 {\n\n MessageType::Debug => false,\n\n _ => true,\n\n }) {\n\n writeln!(f, \"{}\", line.0).unwrap();\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl std::fmt::Debug for ErrorContext {\n\n // Debug only prints context messages that are assigned MessageType::Debug or MessageType::Both\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n", "file_path": "egg/src/error/context.rs", "rank": 93, "score": 10.906629398359714 }, { "content": "\n\n /// Return the number of bytes in the hash\n\n pub fn len(&self) -> usize {\n\n self.bytes.len()\n\n }\n\n}\n\n\n\n// TODO: All the byte operations done here can either be vectorized or cast as u64 before casting back\n\n\n\nimpl Ord for Hash {\n\n fn cmp(&self, other: &Self) -> std::cmp::Ordering {\n\n self.bytes.cmp(&(*other.bytes)[..])\n\n }\n\n}\n\n\n\nimpl PartialOrd for Hash {\n\n fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {\n\n self.bytes.partial_cmp(&(*other.bytes)[..])\n\n }\n\n}\n", "file_path": "egg/src/hash.rs", "rank": 94, "score": 10.882124239224769 }, { "content": " pub fn get_path(&self) -> &path::Path {\n\n self.path_to_file.as_path()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::TestSpace;\n\n use crate::TestSpaceFile;\n\n\n\n #[test]\n\n fn modify_text_file_text() {\n\n let mut ts = TestSpace::new();\n\n let text_file = ts.create_random_text_file(30);\n\n let tsf = TestSpaceFile::from(text_file.as_path());\n\n // tsf.remove_line();\n\n // tsf.insert_line();\n\n // tsf.change_line();\n\n }\n\n\n", "file_path": "testspace/src/testspacefile.rs", "rank": 95, "score": 10.863839194310339 }, { "content": " fn load_simple_snapshot(hash: &hash::Hash, path_to_repository: &path::Path, path_to_working: &path::Path) -> Result<Snapshot, Error> {\n\n let file_name = String::from(hash);\n\n \n\n let path_to_snapshot = path_to_repository.join(Self::get_path()).join(file_name);\n\n let file = match fs::OpenOptions::new().read(true).open(path_to_snapshot.as_path()) {\n\n Ok(snapshot_file) => snapshot_file,\n\n Err(error) => {\n\n if path_to_snapshot.exists() {\n\n return Err(Error::file_error(Some(UnderlyingError::from(error)))\n\n .add_debug_message(format!(\"Failed to open the snapshot metadata file when trying to read a snapshot, the path was {} and did exist\", path_to_snapshot.display()))\n\n .add_user_message(format!(\"Failed to open a file when trying to read a snapshot, the path was {} and does exist\", path_to_snapshot.display())));\n\n } else {\n\n return Err(Error::file_error(Some(UnderlyingError::from(error)))\n\n .add_debug_message(format!(\"Failed to open the snapshot file when trying to read a snapshot, the path {} didn't exist\", path_to_snapshot.display()))\n\n .add_user_message(format!(\"Failed to open a file when trying to read a snapshot, the path was {} and that path doesn't exist\", path_to_snapshot.display())));\n\n }\n\n },\n\n };\n\n let mut snapshot_reader = io::BufReader::new(file);\n\n\n", "file_path": "egg/src/snapshots/file.rs", "rank": 96, "score": 10.731842327822015 }, { "content": " debug_assert!(matches!(self.index.get(&hash), Some(&SnapshotId::Indexed(_, _))) == false);\n\n\n\n // Add the snapshot that was loaded to the index replacing any previous entry, marking it as Indexed and supplying its index in the vector\n\n self.index.insert(hash.clone(), SnapshotId::Indexed(self.snapshots.len(), hash.clone()));\n\n self.snapshots.push(snapshot);\n\n Ok(self.snapshots.len() - 1)\n\n }\n\n\n\n \n\n}\n\n#[cfg(test)]\n\nmod tests {\n\n use testspace::TestSpace;\n\n use super::RepositorySnapshots;\n\n use crate::snapshots::types::Snapshot;\n\n use crate::hash::Hash;\n\n use crate::working::WorkingDirectory;\n\n\n\n #[test]\n\n fn write_parse_snapshot_test() {\n", "file_path": "egg/src/snapshots/file.rs", "rank": 97, "score": 10.677067565953703 }, { "content": " adjusted_sub_path.pop();\n\n assert_eq!(root_path, adjusted_sub_path.as_path());\n\n }\n\n\n\n #[test]\n\n fn new_and_get_path() {\n\n let test_space = TestSpace::new();\n\n let current_path = test_space.get_path();\n\n assert_eq!(current_path.exists(), true);\n\n drop(test_space);\n\n }\n\n\n\n #[test]\n\n fn create_random_directory() {\n\n use std::fs;\n\n let temp_folder = env::temp_dir();\n\n let random_path = TestSpace::create_rand_dir(temp_folder.as_path());\n\n assert_eq!(random_path.exists(), true);\n\n fs::remove_dir(random_path).unwrap_or_else(|err| {\n\n panic!(\n", "file_path": "testspace/src/testspace.rs", "rank": 98, "score": 10.586177405341736 }, { "content": " // Write the version of the local storage\n\n let mut data_writer = io::BufWriter::new(file);\n\n if let Err(error) = data_writer.write_u16::<byteorder::LittleEndian>(LocalStorage::VERSION) {\n\n return Err(Error::write_error(Some(UnderlyingError::from(error)))\n\n .add_debug_message(format!(\"Failed to write the version of the data storage state file\")))\n\n };\n\n Ok(LocalStorage {\n\n path_to_file_storage: path_to_storage,\n\n })\n\n }\n\n\n\n pub fn load(repository_path: &path::Path) -> Result<LocalStorage, Error> {\n\n // TODO: Correct upgrade path and remove current version from struct\n\n let storage_config = repository_path.join(LocalStorage::DIRECTORY).join(LocalStorage::FILE_NAME);\n\n let file = match fs::OpenOptions::new().read(true).open(storage_config.as_path()) {\n\n Ok(file) => file,\n\n Err(error) => return Err(Error::file_error(Some(UnderlyingError::from(error)))\n\n .add_debug_message(format!(\"Failed to open the local storage state file, path was {}\", storage_config.display()))\n\n .add_user_message(format!(\"Repository appears to be invalid, a file could not be opened, path was {}\", storage_config.display()))),\n\n };\n", "file_path": "egg/src/storage/local.rs", "rank": 99, "score": 10.525621292324168 } ]
Rust
src/main.rs
RyanBluth/mangy2
2a64daf7c911f5af52ace6e185f6cb47deed8b3d
extern crate clap; extern crate term_table; use clap::{App, AppSettings, Arg, SubCommand}; use std::process; use std::fmt::Display; use std::fs::{metadata, File, OpenOptions}; use std::io::prelude::*; use std::process::{Command, Stdio}; use std::collections::HashMap; use term_table::row::Row; use term_table::Table; use term_table::cell::Cell; const GO: &'static str = "go"; const SET: &'static str = "set"; const KEY: &'static str = "key"; const VALUE: &'static str = "value"; const LIST: &'static str = "list"; const RUN: &'static str = "run"; const RUN_ARGS: &'static str = "run_args"; const DELETE: &'static str = "delete"; const STORE_FILE: &'static str = ".managed-alias-store"; #[derive(Debug)] struct GenericError { description: String, } impl GenericError { pub fn new(description: String) -> GenericError { return GenericError { description }; } } impl<T> From<T> for GenericError where T: Display, { fn from(x: T) -> Self { return GenericError::new(format!("{}", x)); } } fn main() { let matches = App::new("managed-alias") .version("1.0") .author("Ryan Bluth") .setting(AppSettings::ArgsNegateSubcommands) .arg( Arg::with_name(KEY) .help("Variable key") .required(false) .index(1) ) .arg( Arg::with_name(RUN_ARGS) .help("Arguments to pass to the command stored in the variable matching the provided key") .required(false) .multiple(true) ) .subcommand( SubCommand::with_name(GO) .alias("g") .about("Navigates to the value of the specified key") .arg( Arg::with_name(KEY) .help("Variable key") .required(true) .index(1), ), ) .subcommand( SubCommand::with_name(LIST) .alias("l") .about("Lists all variables"), ) .subcommand( SubCommand::with_name(RUN) .alias("r") .about("Execute the matching value for the provided key") .arg( Arg::with_name(KEY) .help("Variable key") .required(true) .index(1), ) .arg( Arg::with_name(RUN_ARGS) .help("Arguments to pass to the command stored in the variable matching the provided key") .required(false) .multiple(true) ) ) .subcommand( SubCommand::with_name(SET) .alias("s") .about("Sets the specified key to the specified value") .setting(AppSettings::AllowLeadingHyphen) .arg(Arg::with_name(KEY).help("Variable key").required(true)) .arg( Arg::with_name(VALUE) .help("Variable value") .required(true) .multiple(true) .allow_hyphen_values(true), ), ) .subcommand(SubCommand::with_name(DELETE) .alias("d") .about("Delete a key value pair") .arg(Arg::with_name(KEY).help("Variable key").required(true)) ) .get_matches(); if let Some(sub_matches) = matches.subcommand_matches(GO) { match sub_matches.value_of(KEY) { Some(key) => go(key), None => exit_with_message("go requires a variable key"), } } else if let Some(sub_matches) = matches.subcommand_matches(SET) { if sub_matches.is_present(KEY) && sub_matches.is_present(VALUE) { let key = sub_matches.value_of(KEY).unwrap(); let values = sub_matches.values_of(VALUE).unwrap(); set(key, values); } else { exit_with_message("A key and value must be provided") } } else if matches.is_present(LIST) { list(); } else if let Some(sub_matches) = matches.subcommand_matches(RUN) { match sub_matches.value_of(KEY) { Some(key) => run(key, sub_matches.values_of_lossy(RUN_ARGS)), None => exit_with_message("go requires a variable key"), } } else if let Some(sub_matches) = matches.subcommand_matches(DELETE) { match sub_matches.value_of(KEY) { Some(key) => delete(key), None => exit_with_message("Delete requires a variable key"), } } else if let Some(key) = matches.value_of(KEY) { let value = lookup(key); if let Some(value) = value { let metadata = metadata(value); match metadata { Ok(metadata) => { if metadata.is_dir() { go(key); } else { run(key, matches.values_of_lossy(RUN_ARGS)); } } Err(_) => run(key, matches.values_of_lossy(RUN_ARGS)), } } else { exit_with_message(format!("Invalid key {}", key)) } } } fn list() { let entries = get_entries(); let mut commands = Vec::new(); let mut paths = Vec::new(); for entry in entries.iter().collect::<Vec<(&String, &String)>>() { let metadata = metadata(entry.1); match metadata { Ok(_) => paths.push(entry), Err(_) => commands.push(entry), } } let mut table = Table::new(); table.add_row(Row::new(vec![Cell::new("COMMANDS", 2)])); for command in commands { table.add_row(Row::new(vec![Cell::new(command.0, 1), Cell::new(command.1, 1)])); } println!("{}\n", table.as_string()); table = Table::new(); table.add_row(Row::new(vec![Cell::new("PATHS", 2)])); for path in paths { table.add_row(Row::new(vec![Cell::new(path.0, 1), Cell::new(path.1, 1)])); } println!("{}", table.as_string()); } fn run(key: &str, args: Option<Vec<String>>) { match lookup(key) { Some(value) => for command in value.split("&") { let mut out_args: Vec<String> = command .split_whitespace() .map(|s| String::from(s)) .collect::<Vec<String>>(); if let Some(arg_vec) = args.clone() { let mut joined_args = String::new(); for arg in &arg_vec { joined_args.push_str(arg.clone().as_str()); joined_args.push(' '); } joined_args.pop(); for arg in out_args.clone().iter().enumerate() { let mut current = arg.1.clone(); for i in 0..arg_vec.len() { let token = format!("${}", i); current = current.replace(token.as_str(), arg_vec[i].as_str()); } current = current.replace("$*", joined_args.as_str()); out_args[arg.0] = current; } } let mut arg_iter = out_args.iter(); match Command::new(arg_iter.next().unwrap()) .args(arg_iter) .stdout(Stdio::inherit()) .spawn() { Ok(mut child) => { if let Err(e) = child.wait() { exit_with_message(format!( "Failed to wait for command {}. Error: {}", command, e )); } } Err(e) => exit_with_message(format!("Failed to execute {}. Error: {}", command, e)), }; }, None => exit_invalid_key(key), } } fn go(key: &str) { match lookup(key) { Some(value) => println!("*{}", value), None => exit_invalid_key(key), } } fn set(key: &str, mut values: clap::Values) { let mut entries = get_entries(); let mut combined = String::from(values.next().unwrap()); for v in values { combined.push_str(" "); combined.push_str(v); } entries.insert(String::from(key), combined); write_entries(entries); } fn delete(key: &str) { let mut entries = get_entries(); entries.remove(&String::from(key)); write_entries(entries); } fn lookup(key: &str) -> Option<String> { let entries = get_entries(); match entries.get(&String::from(key)) { None => None, Some(entry) => Some(entry.clone()), } } fn get_file_contents() -> String { let mut file: File = match File::open(get_file_dir()) { Ok(file) => file, Err(_) => { return String::new(); } }; let mut buf = String::new(); file.read_to_string(&mut buf).unwrap(); return buf; } fn get_file_dir() -> String { let mut exe_path = std::env::current_exe().unwrap(); exe_path.pop(); exe_path.push(STORE_FILE); let path = String::from(exe_path.to_path_buf().to_string_lossy()); return path; } fn get_entries() -> HashMap<String, String> { let mut result: HashMap<String, String> = HashMap::new(); let contents = get_file_contents(); let lines = contents.split('\n'); for line in lines { let mut pair = line.split("\":\""); let key = pair.next(); let val = pair.next(); if key.is_some() && val.is_some() { result.insert(String::from(key.unwrap()), String::from(val.unwrap())); } } return result; } fn write_entries(entries: HashMap<String, String>) { let mut out = String::new(); for entry in entries { out.push_str(format_entry(&entry.0, &entry.1).as_str()); } let mut file = match OpenOptions::new() .write(true) .create(true) .truncate(true) .read(true) .open(get_file_dir()) { Ok(file) => file, Err(e) => { exit_with_message(format!( "Failed to create file {}. Error: {}", STORE_FILE, e )); return; } }; if let Err(e) = file.write_all(out.as_bytes()) { exit_with_message(format!( "Failed to write value to {}. Error: {}", STORE_FILE, e )); }; } fn format_entry(key: &String, val: &String) -> String { return format!("{}\":\"{}\n", key, val); } fn exit_invalid_key(key: &str) { exit_with_message(format!("No value was found for key '{}'", key)); } fn exit_with_message<T>(message: T) where T: Display, { println!("{}", message); process::exit(1); }
extern crate clap; extern crate term_table; use clap::{App, AppSettings, Arg, SubCommand}; use std::process; use std::fmt::Display; use std::fs::{metadata, File, OpenOptions}; use std::io::prelude::*; use std::process::{Command, Stdio}; use std::collections::HashMap; use term_table::row::Row; use term_table::Table; use term_table::cell::Cell; const GO: &'static str = "go"; const SET: &'static str = "set"; const KEY: &'static str = "key"; const VALUE: &'static str = "value"; const LIST: &'static str = "list"; const RUN: &'static str = "run"; const RUN_ARGS: &'static str = "run_args"; const DELETE: &'static str = "delete"; const STORE_FILE: &'static str = ".managed-alias-store"; #[derive(Debug)] struct GenericError { description: String, } impl GenericError { pub fn new(description: String) -> GenericError { return GenericError { description }; } } impl<T> From<T> for GenericError where T: Display, { fn from(x: T) -> Self { return GenericError::new(format!("{}", x)); } } fn main() { let matches = App::new("managed-alias") .version("1.0") .author("Ryan Bluth") .setting(AppSettings::ArgsNegateSubcommands) .arg( Arg::with_name(KEY) .help("Variable key") .required(false) .index(1) ) .arg( Arg::with_name(RUN_ARGS) .help("Arguments to pass to the command stored in the variable matching the provided key") .required(false) .multiple(true) ) .subcommand( SubCommand::with_name(GO) .alias("g") .about("Navigates to the value of the specified key") .arg( Arg::with_name(KEY) .help("Variable key") .required(true) .index(1), ), ) .subcommand( SubCommand::with_name(LIST) .alias("l") .about("Lists all variables"), ) .subcommand( SubCommand::with_name(RUN) .alias("r") .about("Execute the matching value for the provided key") .arg( Arg::with_name(KEY) .help("Variable key") .required(true) .index(1), ) .arg( Arg::with_name(RUN_ARGS) .help("Arguments to pass to the command stored in the variable matching the provided key") .required(false) .multiple(true) ) ) .subcommand( SubCommand::with_name(SET) .alias("s") .about("Sets the specified key to the specified value") .setting(AppSettings::AllowLeadingHyphen) .arg(Arg::with_name(KEY).help("Variable key").required(true)) .arg( Arg::with_name(VALUE) .help("Variable value") .required(true) .multiple(true) .allow_hyphen_values(true), ), ) .subcommand(SubCommand::with_name(DELETE) .alias("d") .about("Delete a key value pair") .arg(Arg::with_name(KEY).help("Variable key").required(true)) ) .get_matches(); if let Some(sub_matches) = matches.subcommand_matches(GO) { match sub_matches.value_of(KEY) { Some(key) => go(key), None => exit_with_message("go requires a variable key"), } } else if let Some(sub_matches) = matches.subcommand_matches(SET) { if sub_matches.is_present(KEY) && sub_matches.is_present(VALUE) { let key = sub_matches.value_of(KEY).unwrap(); let values = sub_matches.values_of(VALUE).unwrap(); set(key, values); } else { exit_with_message("A key and value must be provided") } } else if matches.is_present(LIST) { list(); } else if let Some(sub_matches) = matches.subcommand_matc
etadata { Ok(metadata) => { if metadata.is_dir() { go(key); } else { run(key, matches.values_of_lossy(RUN_ARGS)); } } Err(_) => run(key, matches.values_of_lossy(RUN_ARGS)), } } else { exit_with_message(format!("Invalid key {}", key)) } } } fn list() { let entries = get_entries(); let mut commands = Vec::new(); let mut paths = Vec::new(); for entry in entries.iter().collect::<Vec<(&String, &String)>>() { let metadata = metadata(entry.1); match metadata { Ok(_) => paths.push(entry), Err(_) => commands.push(entry), } } let mut table = Table::new(); table.add_row(Row::new(vec![Cell::new("COMMANDS", 2)])); for command in commands { table.add_row(Row::new(vec![Cell::new(command.0, 1), Cell::new(command.1, 1)])); } println!("{}\n", table.as_string()); table = Table::new(); table.add_row(Row::new(vec![Cell::new("PATHS", 2)])); for path in paths { table.add_row(Row::new(vec![Cell::new(path.0, 1), Cell::new(path.1, 1)])); } println!("{}", table.as_string()); } fn run(key: &str, args: Option<Vec<String>>) { match lookup(key) { Some(value) => for command in value.split("&") { let mut out_args: Vec<String> = command .split_whitespace() .map(|s| String::from(s)) .collect::<Vec<String>>(); if let Some(arg_vec) = args.clone() { let mut joined_args = String::new(); for arg in &arg_vec { joined_args.push_str(arg.clone().as_str()); joined_args.push(' '); } joined_args.pop(); for arg in out_args.clone().iter().enumerate() { let mut current = arg.1.clone(); for i in 0..arg_vec.len() { let token = format!("${}", i); current = current.replace(token.as_str(), arg_vec[i].as_str()); } current = current.replace("$*", joined_args.as_str()); out_args[arg.0] = current; } } let mut arg_iter = out_args.iter(); match Command::new(arg_iter.next().unwrap()) .args(arg_iter) .stdout(Stdio::inherit()) .spawn() { Ok(mut child) => { if let Err(e) = child.wait() { exit_with_message(format!( "Failed to wait for command {}. Error: {}", command, e )); } } Err(e) => exit_with_message(format!("Failed to execute {}. Error: {}", command, e)), }; }, None => exit_invalid_key(key), } } fn go(key: &str) { match lookup(key) { Some(value) => println!("*{}", value), None => exit_invalid_key(key), } } fn set(key: &str, mut values: clap::Values) { let mut entries = get_entries(); let mut combined = String::from(values.next().unwrap()); for v in values { combined.push_str(" "); combined.push_str(v); } entries.insert(String::from(key), combined); write_entries(entries); } fn delete(key: &str) { let mut entries = get_entries(); entries.remove(&String::from(key)); write_entries(entries); } fn lookup(key: &str) -> Option<String> { let entries = get_entries(); match entries.get(&String::from(key)) { None => None, Some(entry) => Some(entry.clone()), } } fn get_file_contents() -> String { let mut file: File = match File::open(get_file_dir()) { Ok(file) => file, Err(_) => { return String::new(); } }; let mut buf = String::new(); file.read_to_string(&mut buf).unwrap(); return buf; } fn get_file_dir() -> String { let mut exe_path = std::env::current_exe().unwrap(); exe_path.pop(); exe_path.push(STORE_FILE); let path = String::from(exe_path.to_path_buf().to_string_lossy()); return path; } fn get_entries() -> HashMap<String, String> { let mut result: HashMap<String, String> = HashMap::new(); let contents = get_file_contents(); let lines = contents.split('\n'); for line in lines { let mut pair = line.split("\":\""); let key = pair.next(); let val = pair.next(); if key.is_some() && val.is_some() { result.insert(String::from(key.unwrap()), String::from(val.unwrap())); } } return result; } fn write_entries(entries: HashMap<String, String>) { let mut out = String::new(); for entry in entries { out.push_str(format_entry(&entry.0, &entry.1).as_str()); } let mut file = match OpenOptions::new() .write(true) .create(true) .truncate(true) .read(true) .open(get_file_dir()) { Ok(file) => file, Err(e) => { exit_with_message(format!( "Failed to create file {}. Error: {}", STORE_FILE, e )); return; } }; if let Err(e) = file.write_all(out.as_bytes()) { exit_with_message(format!( "Failed to write value to {}. Error: {}", STORE_FILE, e )); }; } fn format_entry(key: &String, val: &String) -> String { return format!("{}\":\"{}\n", key, val); } fn exit_invalid_key(key: &str) { exit_with_message(format!("No value was found for key '{}'", key)); } fn exit_with_message<T>(message: T) where T: Display, { println!("{}", message); process::exit(1); }
hes(RUN) { match sub_matches.value_of(KEY) { Some(key) => run(key, sub_matches.values_of_lossy(RUN_ARGS)), None => exit_with_message("go requires a variable key"), } } else if let Some(sub_matches) = matches.subcommand_matches(DELETE) { match sub_matches.value_of(KEY) { Some(key) => delete(key), None => exit_with_message("Delete requires a variable key"), } } else if let Some(key) = matches.value_of(KEY) { let value = lookup(key); if let Some(value) = value { let metadata = metadata(value); match m
function_block-random_span
[ { "content": "# managed-alias\n\n\n\nmanaged-alias is an alternative to the alias command. managed-alias allows you to maintain a list of aliases that you can modify on the fly and is persitent across terminal sessions.\n\n\n\n\n\n| Command | Result |\n\n| ---------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ |\n\n| `set(s) <key> <value>` | Stores a key value pair |\n\n| `delete(d) <key>` | Deletes the key value pair with the matching key |\n\n| `go(g)<key>` | CDs into the directory stored for the provided key |\n\n| `list(l)` | Lists the stored key value pairs |\n\n| `run(r) <key>` | Executes the value for the provided key. Values can be stored with placeholders `($0, $1, $2,` etc) and will be replaced with any additional arguments provided to the run subcommand. $* will be replaced with all arguments separated by whitespace |\n\n\n\n## Installation\n\n\n\n### Linux \n\n\n\nCD into dist/linux and run `./install.sh`\n\n\n\nRestart your terminal and run `ma --help` to verify things are working\n\n \n\n### Mac(Install Script Coming)\n\n\n\nThe linux install script should work if you change .bashrc to .bash_profile \n\n\n\nYou'll have to compile it yourself for now\n\n\n\n### Windows(Install Script Coming)\n\n\n\nHaven't tested this yet\n", "file_path": "README.md", "rank": 15, "score": 9891.974914587454 }, { "content": "@ECHO OFF\n\nchcp 65001\n\nset VAR=\"\"\n\nFOR /F \"delims=\" %%I IN ('managed-alias.exe %1 %2 %3 %4 %5 %6 %7 %8 %9') do set VAR=%%I & ECHO %%I\n\nIF \"%VAR:~0,1%\"==\"*\" pushd %VAR:~1%", "file_path": "ma.bat", "rank": 24, "score": 1.5507446862199616 } ]
Rust
src/bin/taxonate/app.rs
elasticdog/taxonate
6d7a5591778e6d53744ffde3d40ba91972d0840f
use std::{ collections::HashSet, env, io::{self, BufRead}, path::{Path, PathBuf}, }; use clap::{crate_authors, crate_name, crate_version, App, AppSettings, Arg, ArgMatches}; use taxonate::config::{Color, Config, LogLevel}; pub fn build() -> App<'static, 'static> { let color = env::var("TAXONATE_COLOR").unwrap_or_else(|_| "auto".to_owned()); let color_app_setting = match color.as_str() { "always" => AppSettings::ColorAlways, "never" => AppSettings::ColorNever, _ => AppSettings::ColorAuto, }; App::new(crate_name!()) .version(crate_version!()) .author(crate_authors!()) .setting(AppSettings::AllowInvalidUtf8) .setting(AppSettings::ColoredHelp) .setting(color_app_setting) .about( "Identify and filter files based on their programming language.\n\n\ Use '--help' instead of '-h' to see a more detailed version of the \ help text.", ) .long_about("Identify and filter files based on their programming language.") .arg( Arg::with_name("filename_only") .help("Suppresses display of the identified language") .long_help( "Suppresses normal output; only displays the file name and \ not the identified programming language", ) .short("f") .long("filename-only"), ) .arg( Arg::with_name("list_languages") .help("Lists supported programming languages") .long_help( "Displays a list of supported programming languages for \ filtering output", ) .short("L") .long("list-languages"), ) .arg( Arg::with_name("color") .help("Specifies when to use colored output") .short("c") .long("color") .takes_value(true) .value_name("WHEN") .possible_values(&["auto", "always", "never"]) .env("TAXONATE_COLOR") .default_value("auto"), ) .arg( Arg::with_name("debug") .help("Adjusts the log level for debugging") .short("d") .long("debug") .takes_value(true) .value_name("LEVEL") .possible_values(&["error", "warning", "info", "debug", "trace"]) .env("TAXONATE_DEBUG") .default_value("error"), ) .arg( Arg::with_name("language") .help("Filters output by programming language") .long_help( "Filters output to only show files identified as the given \ programming language", ) .short("l") .long("language") .takes_value(true) .value_name("LANGUAGE") .env("TAXONATE_LANGUAGE"), ) .arg( Arg::with_name("PATH") .help("File or directory to identify. Use '-' for standard input.") .long_help( "A file or directory to identify. Directories will have \ all files identified recursively. Use a dash ('-') to \ read from standard input.", ) .multiple(true), ) } pub fn config_from(matches: &ArgMatches) -> Config { let color = match matches.value_of("color").unwrap() { "auto" => Color::Auto, "always" => Color::Always, "never" => Color::Never, _ => unreachable!(), }; let filename_only = matches.is_present("filename_only"); let log_level = match matches.value_of("debug").unwrap() { "error" => LogLevel::Error, "warning" => LogLevel::Warning, "info" => LogLevel::Info, "debug" => LogLevel::Debug, "trace" => LogLevel::Trace, _ => unreachable!(), }; let language = matches.value_of("language").map(String::from); let mut paths: HashSet<PathBuf> = matches .values_of_os("PATH") .unwrap_or_default() .map(PathBuf::from) .collect(); if paths.remove(Path::new("-")) { let stdin = io::stdin(); for line in stdin.lock().lines() { paths.insert(PathBuf::from(line.unwrap())); } } if paths.is_empty() { paths.insert(PathBuf::from(".")); } Config::new() .set_color(color) .set_filename_only(filename_only) .set_log_level(log_level) .set_language(language) .set_paths(paths) }
use std::{ collections::HashSet, env, io::{self, BufRead}, path::{Path, PathBuf}, }; use clap::{crate_authors, crate_name, crate_version, App, AppSettings, Arg, ArgMatches}; use taxonate::config::{Color, Config, LogLevel}; pub fn build() -> App<'static, 'static> { let color = env::var("TAXONATE_COLOR").unwrap_or_else(|_| "auto".to_owned()); let color_app_setting = match color.as_str() { "always" => AppSettings::ColorAlways, "never" => AppSettings::ColorNever, _ => AppSettings::ColorAuto, }; App::new(crate_name!()) .version(crate_version!()) .author(crate_authors!()) .setting(AppSettings::AllowInvalidUtf8) .setting(AppSettings::ColoredHelp) .setting(color_app_setting) .about( "Identify and filter files based on their programming language.\n\n\ Use '--help' instead of '-h' to see a more detailed version of the \ help text.", ) .long_about("Identify and filter files based on their programming language.") .arg( Arg::with_name("filename_only") .help("Suppresses display of the identified language") .long_help( "Suppresses normal output; only displays the file name and \ not the identified programming language", ) .short("f") .long("filename-only"), ) .arg( Arg::with_name("list_languages") .help("Lists supported programming languages") .long_help( "Displays a list of supported programming languages for \ filtering output", ) .short("L") .long("list-languages"), ) .arg( Arg::with_name("color") .help("Specifies when to use colored output") .short("c") .long("color") .takes_value(true) .value_name("WHEN") .possible_values(&["auto", "always", "never"]) .env("TAXONATE_COLOR") .default_value("auto"), ) .arg( Arg::with_name("debug") .help("Adjusts the log level for debugging") .short("d") .long("debug") .takes_value(true) .value_name("LEVEL") .possible_values(&["error", "warning", "info", "debug", "trace"]) .env("TAXONATE_DEBUG") .default_value("error"), ) .arg( Arg::with_name("language") .help("Filters output by programming language") .long_help( "Filters output to only show files identified as the given \ programming language", ) .short("l") .long("language") .takes_value(true) .value_name("LANGUAGE") .env("TAXONATE_LANGUAGE"), ) .arg( Arg::with_name("PATH") .help("File or directory to identify. Use '-' for standard input.") .long_help( "A file or directory to identify. Directories will have \ all files identified recursively. Use a dash ('-') to \ read from standard input.", ) .multiple(true), ) } pub fn config_from(matches: &ArgMatches) -> Config { let color = match matches.value_of("color").unwrap() { "auto" => Color::Auto, "always" => Color::Always, "never" => Color::N
ove(Path::new("-")) { let stdin = io::stdin(); for line in stdin.lock().lines() { paths.insert(PathBuf::from(line.unwrap())); } } if paths.is_empty() { paths.insert(PathBuf::from(".")); } Config::new() .set_color(color) .set_filename_only(filename_only) .set_log_level(log_level) .set_language(language) .set_paths(paths) }
ever, _ => unreachable!(), }; let filename_only = matches.is_present("filename_only"); let log_level = match matches.value_of("debug").unwrap() { "error" => LogLevel::Error, "warning" => LogLevel::Warning, "info" => LogLevel::Info, "debug" => LogLevel::Debug, "trace" => LogLevel::Trace, _ => unreachable!(), }; let language = matches.value_of("language").map(String::from); let mut paths: HashSet<PathBuf> = matches .values_of_os("PATH") .unwrap_or_default() .map(PathBuf::from) .collect(); if paths.rem
function_block-random_span
[ { "content": "#[must_use]\n\npub fn identify(file: &Path) -> Option<&Language> {\n\n find_lang_by_interpreter(&file).or_else(|| find_lang_by_glob(&file))\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 0, "score": 96360.11204077046 }, { "content": "/// # Errors\n\n///\n\n/// Will return `Err` if ...TODO...\n\npub fn run(config: &Config) -> Result<(), Box<dyn Error>> {\n\n debug!(\"configuration settings: {:?}\", config);\n\n\n\n let mut lang_filter: Option<&Language> = None;\n\n if let Some(key) = config.language() {\n\n lang_filter = LANGUAGES.languages.get(key);\n\n }\n\n info!(\"applying language filter: {:?}\", lang_filter);\n\n\n\n let stdout = io::stdout();\n\n let handle = stdout.lock();\n\n let mut buffer = io::BufWriter::new(handle);\n\n\n\n for path in config.paths() {\n\n let walker = Walk::new(path);\n\n\n\n let files = walker\n\n .filter_map(Result::ok)\n\n .filter(|entry| entry.file_type().expect(\"no file type\").is_file());\n\n\n", "file_path": "src/lib.rs", "rank": 1, "score": 71248.47348708444 }, { "content": "#[must_use]\n\nfn read_interpreter(file: &Path) -> Option<String> {\n\n let file = match File::open(file) {\n\n Ok(file) => file,\n\n Err(_) => return None,\n\n };\n\n let mut buf = BufReader::new(file);\n\n let mut line = String::new();\n\n drop(buf.read_line(&mut line));\n\n\n\n parse_shebang(&line)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 2, "score": 57859.830853634594 }, { "content": "fn find_lang_by_glob(file: &Path) -> Option<&Language> {\n\n let file_name = file.file_name()?.to_str()?;\n\n\n\n let result = LANGUAGES\n\n .languages\n\n .par_iter()\n\n .find_any(|(_, lang)| matches_any_glob(&lang.globs, file_name));\n\n\n\n match result {\n\n Some((_, lang)) => Some(&lang),\n\n None => None,\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 3, "score": 55279.061018326705 }, { "content": "fn find_lang_by_interpreter(file: &Path) -> Option<&Language> {\n\n let result = LANGUAGES\n\n .languages\n\n .par_iter()\n\n .find_any(|(_, lang)| matches_any_interpreter(&lang.interpreters, file));\n\n\n\n match result {\n\n Some((_, lang)) => Some(&lang),\n\n None => None,\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 4, "score": 55279.061018326705 }, { "content": "fn matches_any_interpreter(interpreters: &[String], file: &Path) -> bool {\n\n interpreters\n\n .par_iter()\n\n .any(|interpreter| Some(interpreter.clone()) == read_interpreter(&file))\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 5, "score": 55032.80437821582 }, { "content": "fn should_print(lang: Option<&Language>, lang_filter: Option<&Language>) -> bool {\n\n if lang_filter.is_none() {\n\n true\n\n } else {\n\n lang == lang_filter\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn shebang_typical() {\n\n assert_eq!(Some(String::from(\"bash\")), parse_shebang(\"#!/bin/bash\"));\n\n }\n\n\n\n #[test]\n\n fn shebang_with_argument() {\n\n assert_eq!(Some(String::from(\"bash\")), parse_shebang(\"#!/bin/bash -x\"));\n", "file_path": "src/lib.rs", "rank": 6, "score": 48482.76095264945 }, { "content": "fn identify_and_print<W: Write>(\n\n file: &Path,\n\n filename_only: bool,\n\n lang_filter: Option<&Language>,\n\n writer: &mut W,\n\n) -> Result<(), Box<dyn Error>> {\n\n let lang = identify(file);\n\n\n\n let lang_name = match lang {\n\n Some(lang) => &lang.name,\n\n None => \"Unknown\",\n\n };\n\n\n\n trace!(\n\n \"file {:?} identified as language {:?}\",\n\n file.display(),\n\n lang_name\n\n );\n\n\n\n if should_print(lang, lang_filter) {\n\n if filename_only {\n\n writeln!(writer, \"{}\", file.display())?;\n\n } else {\n\n writeln!(writer, \"{}: {}\", file.display(), lang_name)?;\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 7, "score": 37244.08411267347 }, { "content": "fn matches_any_glob(globs: &[String], str: &str) -> bool {\n\n globs\n\n .par_iter()\n\n .any(|glob| Pattern::new(glob).unwrap().matches(str))\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 8, "score": 32410.16134067614 }, { "content": "// SPDX-License-Identifier: MIT OR Apache-2.0\n\n\n\nuse std::{collections::BTreeMap, fmt};\n\n\n\nuse lazy_static::lazy_static;\n\nuse serde::Deserialize;\n\n\n\nconst LANGUAGES_JSON: &str = include_str!(\"../data/languages.json\");\n\n\n\n#[derive(Deserialize)]\n\npub struct Languages {\n\n pub languages: BTreeMap<String, Language>,\n\n}\n\n\n\n#[derive(Debug, Deserialize, Eq, PartialEq)]\n\npub struct Language {\n\n pub name: String,\n\n pub globs: Vec<String>,\n\n pub interpreters: Vec<String>,\n\n}\n", "file_path": "src/languages.rs", "rank": 9, "score": 19260.683371197796 }, { "content": "\n\nimpl fmt::Display for Language {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}\", self.name)\n\n }\n\n}\n\n\n\nlazy_static! {\n\n pub static ref LANGUAGES: Languages = serde_json::from_str(&LANGUAGES_JSON).unwrap();\n\n}\n", "file_path": "src/languages.rs", "rank": 10, "score": 19257.60291031919 }, { "content": "// SPDX-License-Identifier: MIT OR Apache-2.0\n\n\n\nuse std::{collections::HashSet, path::PathBuf};\n\n\n\n#[derive(Debug)]\n\npub enum Color {\n\n Always,\n\n Auto,\n\n Never,\n\n}\n\n\n\nimpl Default for Color {\n\n fn default() -> Self {\n\n Color::Auto\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub enum LogLevel {\n\n Error,\n", "file_path": "src/config.rs", "rank": 11, "score": 19007.492001957777 }, { "content": " Warning,\n\n Info,\n\n Debug,\n\n Trace,\n\n}\n\n\n\nimpl Default for LogLevel {\n\n fn default() -> Self {\n\n LogLevel::Error\n\n }\n\n}\n\n\n\n#[derive(Debug, Default)]\n\npub struct Config {\n\n color: Color,\n\n filename_only: bool,\n\n log_level: LogLevel,\n\n language: Option<String>,\n\n paths: HashSet<PathBuf>,\n\n}\n", "file_path": "src/config.rs", "rank": 12, "score": 19006.426407156738 }, { "content": "\n\nimpl Config {\n\n #[must_use]\n\n pub fn new() -> Config {\n\n Config {\n\n color: Color::Auto,\n\n filename_only: false,\n\n log_level: LogLevel::Error,\n\n language: None,\n\n paths: HashSet::new(),\n\n }\n\n }\n\n\n\n /// Get when to output color.\n\n #[must_use]\n\n pub fn color(&self) -> &Color {\n\n &self.color\n\n }\n\n\n\n /// Set when to output color.\n", "file_path": "src/config.rs", "rank": 13, "score": 19005.63901326158 }, { "content": " #[must_use]\n\n pub fn set_color(mut self, val: Color) -> Config {\n\n self.color = val;\n\n self\n\n }\n\n\n\n /// Get when to output color.\n\n #[must_use]\n\n pub fn filename_only(&self) -> bool {\n\n self.filename_only\n\n }\n\n\n\n /// Set when to display only the file name in the output.\n\n #[must_use]\n\n pub fn set_filename_only(mut self, val: bool) -> Config {\n\n self.filename_only = val;\n\n self\n\n }\n\n\n\n /// Get the logging level.\n", "file_path": "src/config.rs", "rank": 14, "score": 19004.363640707103 }, { "content": " #[must_use]\n\n pub fn log_level(&self) -> &LogLevel {\n\n &self.log_level\n\n }\n\n\n\n /// Set the logging level.\n\n #[must_use]\n\n pub fn set_log_level(mut self, val: LogLevel) -> Config {\n\n self.log_level = val;\n\n self\n\n }\n\n\n\n /// Get the language for filtering output.\n\n #[must_use]\n\n pub fn language(&self) -> &Option<String> {\n\n &self.language\n\n }\n\n\n\n /// Set the language for filtering output.\n\n #[must_use]\n", "file_path": "src/config.rs", "rank": 15, "score": 19002.997559273626 }, { "content": " pub fn set_language(mut self, val: Option<String>) -> Config {\n\n self.language = val;\n\n self\n\n }\n\n\n\n /// Get the paths to scan for language identification.\n\n #[must_use]\n\n pub fn paths(&self) -> &HashSet<PathBuf> {\n\n &self.paths\n\n }\n\n\n\n /// Add a path to scan for language identification.\n\n #[must_use]\n\n pub fn add_path(mut self, val: PathBuf) -> Config {\n\n self.paths.insert(val);\n\n self\n\n }\n\n\n\n /// Add multiple paths to scan for language idenfication.\n\n #[must_use]\n\n pub fn set_paths(mut self, val: HashSet<PathBuf>) -> Config {\n\n self.paths = val;\n\n self\n\n }\n\n}\n", "file_path": "src/config.rs", "rank": 16, "score": 18996.4697192886 }, { "content": "#[must_use]\n\nfn parse_shebang(line: &str) -> Option<String> {\n\n // ignore leading whitespace\n\n let line = line.trim_start();\n\n\n\n if line.starts_with(\"#!\") {\n\n let mut tokens = line.trim_start_matches(\"#!\").split_whitespace();\n\n let path = Path::new(tokens.next()?);\n\n\n\n if path.is_absolute() {\n\n if path.ends_with(\"env\") {\n\n tokens.next().map(String::from)\n\n } else {\n\n // TODO: this conversion chain smells bad\n\n path.file_name().unwrap().to_str().map(String::from)\n\n }\n\n } else {\n\n None\n\n }\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 17, "score": 18562.29841253246 }, { "content": "// SPDX-License-Identifier: MIT OR Apache-2.0\n\n\n\nuse std::{\n\n error::Error,\n\n fs::File,\n\n io::{self, BufRead, BufReader, Write},\n\n path::Path,\n\n};\n\n\n\nuse glob::Pattern;\n\nuse ignore::Walk;\n\nuse log::{debug, info, trace};\n\nuse rayon::prelude::*;\n\n\n\npub mod config;\n\npub mod languages;\n\n\n\nuse crate::config::Config;\n\nuse crate::languages::{Language, LANGUAGES};\n\n\n\n/// # Errors\n\n///\n\n/// Will return `Err` if ...TODO...\n", "file_path": "src/lib.rs", "rank": 18, "score": 16.976432981704157 }, { "content": "### Identification\n\n\n\nIn its most simple form, point `taxonate` at a file to identify its language:\n\n\n\n $ taxonate src/lib.rs\n\n src/lib.rs: Rust\n\n\n\nFile names can also be read from STDIN, if you specify a dash (`-`) as the path:\n\n\n\n $ find . -name \"main*\" | taxonate -\n\n ./target/doc/main.js: JavaScript\n\n ./src/bin/taxonate/main.rs: Rust\n\n\n\nInstead of pointing to individual files, you can point to a directory to\n\nrecursively identify all files within it (respecting [`.gitignore`] patterns):\n\n\n\n $ taxonate .\n\n ./LICENSE-MIT: Unknown\n\n ./Cargo.toml: TOML\n\n ./src/bin/taxonate/main.rs: Rust\n\n ./src/bin/taxonate/app.rs: Rust\n\n ./src/lib.rs: Rust\n\n ./src/languages.rs: Rust\n\n ./src/config.rs: Rust\n\n ./data/languages.cue: CUE\n\n ./data/languages.json: JSON\n\n ./data/README.md: Markdown\n\n ./data/dump_tool.cue: CUE\n\n ./LICENSE-APACHE: Unknown\n\n ./Cargo.lock: Unknown\n\n ./CHANGELOG.md: Markdown\n\n ./README.md: Markdown\n\n\n\n> _NOTE:_ If no path is provided, `taxonate` will default to recursively\n\n> identifying files within the current directory.\n\n\n\n[`.gitignore`]: https://git-scm.com/docs/gitignore\n\n\n\n### Filtering\n\n\n\nTo filter the output so it only displays files identified as a specific\n\nlanguage, use the `--language` option:\n\n\n\n $ taxonate --language rust\n\n ./src/bin/taxonate/main.rs: Rust\n\n ./src/bin/taxonate/app.rs: Rust\n\n ./src/lib.rs: Rust\n\n ./src/languages.rs: Rust\n\n ./src/config.rs: Rust\n\n\n\nTo display just the file names without the identified language (e.g. if you want\n\nto pipe the output elsewhere), add the `--filename-only` flag:\n\n\n\n $ taxonate --language rust --filename-only\n\n ./src/bin/taxonate/main.rs\n\n ./src/bin/taxonate/app.rs\n\n ./src/lib.rs\n\n ./src/languages.rs\n\n ./src/config.rs\n\n\n", "file_path": "README.md", "rank": 19, "score": 13.612205769766328 }, { "content": "### Supported Languages\n\n\n\nYou can display a list of the supported languages (in `key: name` format) by\n\nrunning the following command:\n\n\n\n $ taxonate --list-languages\n\n\n\nWhere `key` is what you should provide to the `--language` option when\n\nfiltering. See the [`data/` directory](/data) for more details on how the\n\nlanguages are defined.\n\n\n\n## License\n\n\n\nLicensed under either of\n\n\n\n- Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or\n\n http://www.apache.org/licenses/LICENSE-2.0)\n\n- MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)\n\n\n\nat your option.\n\n\n\n## Contribution\n\n\n\nUnless you explicitly state otherwise, any contribution intentionally submitted\n\nfor inclusion in the work by you, as defined in the Apache-2.0 license, shall be\n\ndual licensed as above, without any additional terms or conditions.\n", "file_path": "README.md", "rank": 20, "score": 12.00217609232115 }, { "content": "# taxonate\n\n\n\n[![Rust (stable)](<https://github.com/elasticdog/taxonate/workflows/Rust%20(stable)/badge.svg>)](https://github.com/elasticdog/taxonate/actions?query=workflow%3A%22Rust+%28stable%29%22+branch%3Amaster)\n\n\n\nA command line tool to identify and filter plain text files based on their\n\n[_programming_ language]. This can be useful for automated formatting and\n\nlinting of code as part of continuous integration.\n\n\n\nLanguage identification is established by first checking the file for a\n\n[shebang] line with a known interpreter; if none is found, the file name itself\n\nis checked against known glob patterns. This two-step process is more accurate\n\nthan a naive search for file [extensions], as it will properly classify\n\nexecutable scripts.\n\n\n\n[_programming_ language]: https://en.wikipedia.org/wiki/Programming_language\n\n[shebang]: https://en.wikipedia.org/wiki/Shebang_(Unix)\n\n[extensions]: https://en.wikipedia.org/wiki/Filename_extension\n\n\n\n## Usage\n\n\n\n taxonate [FLAGS] [OPTIONS] [PATH]...\n\n\n\nRun `taxonate --help` for detailed information on all features.\n\n\n", "file_path": "README.md", "rank": 21, "score": 10.99837134709077 }, { "content": "### Schema\n\n\n\nA language definition has four parts that can be seen in this example:\n\n\n\n```json\n\n\"python\": {\n\n \"name\": \"Python\",\n\n \"globs\": [\n\n \"*.py\",\n\n \"*.pyw\"\n\n ],\n\n \"interpreters\": [\n\n \"python\",\n\n \"python2\",\n\n \"python3\"\n\n ]\n\n}\n\n```\n\n\n\n1. `key` is the map key used to identify a language and is what end users will\n\n specify via the `--language LANGUAGE` command line option. The key must be a\n\n valid [CUE identifier] and will end up being lowercased in the final JSON\n\n output.\n\n\n\n2. `name` is the language's human friendly display name and should be\n\n capitalized accordingly, as a proper noun.\n\n\n\n3. `globs` is an array containing the common pattern(s) that will match\n\n filenames belonging to the language.\n\n\n\n4. `interpreters` is an array containing the language's executable program\n\n name(s) that would be specified in a script's [shebang] line as part of the\n\n interpreter directive.\n\n\n\n> NOTE: `globs` and `interpreters` are considered to be filetype \"markers\" used\n\n> for identification; a minimum of one marker is required for each language\n\n> definition.\n\n\n\n[cue identifier]: https://cuelang.org/docs/references/spec/#identifiers\n\n[shebang]: https://en.wikipedia.org/wiki/Shebang_(Unix)\n", "file_path": "data/README.md", "rank": 22, "score": 9.38108285353876 }, { "content": "# Data Directory\n\n\n\nThis directory houses data that is ingested by taxonate at compile time.\n\n\n\n## Updating the Supported Languages\n\n\n\n### Prerequisites\n\n\n\nYou'll need the following software installed on your machine in order to develop\n\nand submit changes to the languages supported by taxonate:\n\n\n\n- [CUE](https://cuelang.org/)\n\n- [Prettier](https://prettier.io/)\n\n\n\n### Overview\n\n\n\nThe programming languages that taxonate is able to detect are defined by data in\n\nthe _languages.json_ file, which **should not be edited manually**!\n\n\n\nTo ensure consistency and check for potential conflicts, the language\n\ndefinitions should be written using CUE (Configure Unify Execute) within the\n\n_languages.cue_ file. Once you've made an edit to the language definitions, you\n\ncan evaluate the configuration and export the resulting JSON by running the\n\ncustom CUE command:\n\n\n\n $ cue dump\n\n\n\nThis will emit a pretty version of the JSON data to STDOUT, and will also save a\n\nminified copy, overwriting the existing _languages.json_ file.\n\n\n\nTo verify that the _languages.cue_ and _languages.json_ files are in sync, you\n\ncan validate that they match and adhere to the defined constraints by running:\n\n\n\n $ cue vet languages.json languages.cue\n\n\n", "file_path": "data/README.md", "rank": 23, "score": 9.150336793843348 }, { "content": " for file in files {\n\n identify_and_print(\n\n file.path(),\n\n config.filename_only(),\n\n lang_filter,\n\n buffer.get_mut(),\n\n )?;\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 24, "score": 5.349794355478022 }, { "content": "# Changelog for taxonate\n\n\n\nAll notable changes to the project will be documented in this file.\n\n\n\nThe format is based on [Keep a Changelog][1], and this project adheres to\n\n[Semantic Versioning][2].\n\n\n\n[1]: https://keepachangelog.com/en/1.0.0/\n\n[2]: https://semver.org/spec/v2.0.0.html\n\n\n\n## [Unreleased]\n\n\n\n[unreleased]: https://github.com/elasticdog/taxonate/tree/master\n", "file_path": "CHANGELOG.md", "rank": 25, "score": 4.384364074476056 }, { "content": " }\n\n\n\n #[test]\n\n fn shebang_with_env_indirection() {\n\n assert_eq!(\n\n Some(String::from(\"bash\")),\n\n parse_shebang(\"#!/usr/bin/env bash\")\n\n );\n\n }\n\n\n\n #[test]\n\n fn shebang_with_leading_whitespace() {\n\n assert_eq!(Some(String::from(\"bash\")), parse_shebang(\" #!/bin/bash\"));\n\n }\n\n\n\n #[test]\n\n fn shebang_with_inner_whitespace() {\n\n assert_eq!(Some(String::from(\"bash\")), parse_shebang(\"#! /bin/bash\"));\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 26, "score": 1.973208530606699 }, { "content": " #[test]\n\n fn shebang_without_absolute_path() {\n\n assert_eq!(None, parse_shebang(\"#!bash\"));\n\n }\n\n\n\n #[test]\n\n fn shebang_without_env_argument() {\n\n assert_eq!(None, parse_shebang(\"#!/usr/bin/env\"));\n\n }\n\n\n\n #[test]\n\n fn shebang_without_interpreter() {\n\n assert_eq!(None, parse_shebang(\"#!\"));\n\n }\n\n\n\n #[test]\n\n fn shebang_without_shebang() {\n\n assert_eq!(None, parse_shebang(\"bash\"));\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 27, "score": 1.9059909803589314 } ]
Rust
core/src/reflection/microfacet_reflection.rs
hackmad/pbr-rust
d181621fcde300e88e1063b481fa240a49157627
#![allow(dead_code)] use super::*; use crate::microfacet::*; use bumpalo::Bump; use std::fmt; pub struct MicrofacetReflection<'arena> { bxdf_type: BxDFType, fresnel: &'arena mut Fresnel<'arena>, r: Spectrum, distribution: &'arena mut MicrofacetDistribution<'arena>, } impl<'arena> MicrofacetReflection<'arena> { #[allow(clippy::mut_from_ref)] pub fn alloc( arena: &'arena Bump, r: Spectrum, distribution: &'arena mut MicrofacetDistribution<'arena>, fresnel: &'arena mut Fresnel<'arena>, ) -> &'arena mut BxDF<'arena> { let model = arena.alloc(Self { bxdf_type: BxDFType::BSDF_REFLECTION | BxDFType::BSDF_GLOSSY, r, distribution, fresnel, }); arena.alloc(BxDF::MicrofacetReflection(model)) } #[allow(clippy::mut_from_ref)] pub fn clone_alloc(&self, arena: &'arena Bump) -> &'arena mut BxDF<'arena> { let distribution = self.distribution.clone_alloc(arena); let fresnel = self.fresnel.clone_alloc(arena); let model = arena.alloc(Self { bxdf_type: self.bxdf_type, r: self.r, distribution, fresnel, }); arena.alloc(BxDF::MicrofacetReflection(model)) } pub fn get_type(&self) -> BxDFType { self.bxdf_type } pub fn f(&self, wo: &Vector3f, wi: &Vector3f) -> Spectrum { let cos_theta_o = abs_cos_theta(wo); let cos_theta_i = abs_cos_theta(wi); let wh = wi + wo; if (cos_theta_i == 0.0 || cos_theta_o == 0.0) || (wh.x == 0.0 && wh.y == 0.0 && wh.z == 0.0) { Spectrum::ZERO } else { let wh = wh.normalize(); let f = self .fresnel .evaluate(wi.dot(&wh.face_forward(&Vector3f::new(0.0, 0.0, 1.0)))); self.r * self.distribution.d(&wh) * self.distribution.g(wo, wi) * f / (4.0 * cos_theta_i * cos_theta_o) } } pub fn sample_f(&self, wo: &Vector3f, u: &Point2f) -> BxDFSample { if wo.z == 0.0 { BxDFSample::from(self.bxdf_type) } else { let wh = self.distribution.sample_wh(wo, u); if wo.dot(&wh) < 0.0 { BxDFSample::from(self.bxdf_type) } else { let wi = reflect(wo, &wh); if !same_hemisphere(wo, &wi) { BxDFSample::new(Spectrum::ZERO, 0.0, wi, self.bxdf_type) } else { let pdf = self.distribution.pdf(wo, &wh) / (4.0 * wo.dot(&wh)); BxDFSample::new(self.f(wo, &wi), pdf, wi, self.bxdf_type) } } } } pub fn pdf(&self, wo: &Vector3f, wi: &Vector3f) -> Float { if same_hemisphere(wo, wi) { let wh = (wo + wi).normalize(); self.distribution.pdf(wo, &wh) / (4.0 * wo.dot(&wh)) } else { 0.0 } } } impl<'arena> fmt::Display for MicrofacetReflection<'arena> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!( f, "MicrofacetReflection {{ bxdf_type: {}, fresnel: {}, r: {}, distribution: {} }}", self.bxdf_type, self.fresnel, self.r, self.distribution ) } }
#![allow(dead_code)] use super::*; use crate::microfacet::*; use bumpalo::Bump; use std::fmt; pub struct MicrofacetReflection<'arena> { bxdf_type: BxDFType, fresnel: &'arena mut Fresnel<'arena>, r: Spectrum, distribution: &'arena mut MicrofacetDistribution<'arena>, } impl<'arena> MicrofacetReflection<'arena> { #[allow(clippy::mut_from_ref)] pub fn alloc( arena: &'arena Bump, r: Spectrum, distribution: &'arena mut MicrofacetDistribution<'arena>, fresnel: &'arena mut Fresnel<'arena>, ) -> &'arena mut BxDF<'arena> { let model = arena.alloc(Self { bxdf_type: BxDFType::BSDF_REFLECTION | BxDFType::BSDF_GLOSSY, r, distribution, fresnel, }); arena.alloc(BxDF::MicrofacetReflection(model)) } #[allow(clippy::mut_from_ref)] pub fn clone_alloc(&self, arena: &'arena Bump) -> &'arena mut BxDF<'arena> { let distribution = self.distribution.clone_alloc(arena); let fresnel = self.fresnel.clone_alloc(arena); let model = arena.alloc(Self { bxdf_type: self.bxdf_type, r: self.r, distribution, fresnel, }); arena.alloc(BxDF::MicrofacetReflection(model)) } pub fn get_type(&self) -> BxDFType { self.bxdf_type } pub fn f(&self, wo: &Vector3f, wi: &Vector3f) -> Spectrum { let cos_theta_o = abs_cos_theta(wo); let cos_theta_i = abs_cos_theta(wi); let wh = wi + wo; if (cos_theta_i == 0.0 || cos_theta_o == 0.0) || (wh.x == 0.0 && wh.y == 0.0 && wh.z == 0.0) { Spectrum::ZERO } else { let wh = wh.normalize(); let f = self .fresnel .evaluate(wi.dot(&wh.face_forward(&Vector3f::new(0.0, 0.0, 1.0)))); self.r * self.distribution.d(&wh) * self.distribution.g(wo, wi) * f / (4.0 * cos_theta_i * cos_theta_o) } } pub fn sample_f(&self, wo: &Vector3f, u: &Point2f) -> BxDFSample { if wo.z == 0.0 { BxDFSample::from(self.bxdf_type) } else { let wh = self.distribution.sample_wh(wo, u); if wo.dot(&wh) < 0.0 { BxDFSample::from(self.bxdf_type) } else { let wi = reflect(wo, &wh); if !same_hemisphere(wo, &wi) { BxDFSample::new(Spectrum::ZERO, 0.0, wi, self.bxdf_type) } else {
pub fn pdf(&self, wo: &Vector3f, wi: &Vector3f) -> Float { if same_hemisphere(wo, wi) { let wh = (wo + wi).normalize(); self.distribution.pdf(wo, &wh) / (4.0 * wo.dot(&wh)) } else { 0.0 } } } impl<'arena> fmt::Display for MicrofacetReflection<'arena> { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!( f, "MicrofacetReflection {{ bxdf_type: {}, fresnel: {}, r: {}, distribution: {} }}", self.bxdf_type, self.fresnel, self.r, self.distribution ) } }
let pdf = self.distribution.pdf(wo, &wh) / (4.0 * wo.dot(&wh)); BxDFSample::new(self.f(wo, &wi), pdf, wi, self.bxdf_type) } } } }
function_block-function_prefix_line
[ { "content": "#[inline]\n\npub fn reflect(wo: &Vector3f, n: &Vector3f) -> Vector3f {\n\n -wo + 2.0 * wo.dot(n) * n\n\n}\n", "file_path": "core/src/reflection/common.rs", "rank": 0, "score": 258319.93077896046 }, { "content": "/// Uniformly sample a direction from a sphere.\n\n///\n\n/// * `u` - The random sample point.\n\npub fn uniform_sample_sphere(u: &Point2f) -> Vector3f {\n\n let z = 1.0 - 2.0 * u[0];\n\n let r = max(0.0, 1.0 - z * z).sqrt();\n\n let phi = TWO_PI * u[1];\n\n Vector3f::new(r * cos(phi), r * sin(phi), z)\n\n}\n\n\n\n/// Returns the PDF for uniformly sampling a direction from a sphere.\n\npub const fn uniform_sphere_pdf() -> Float {\n\n INV_FOUR_PI\n\n}\n\n\n", "file_path": "core/src/sampling/common.rs", "rank": 1, "score": 249933.97822330194 }, { "content": "#[inline]\n\npub fn cosine_sample_hemisphere(u: &Point2f) -> Vector3f {\n\n let d = concentric_sample_disk(u);\n\n let z = max(0.0, 1.0 - d.x * d.x - d.y * d.y).sqrt();\n\n Vector3f::new(d.x, d.y, z)\n\n}\n\n\n\n/// Returns the PDF for cosine-weighted sampling a direction from a hemisphere.\n\n///\n\n/// * `cos_theta` - Cosine term of incident radiance.\n", "file_path": "core/src/sampling/common.rs", "rank": 2, "score": 249933.97822330194 }, { "content": "/// Uniformly sample a direction on a hemisphere.\n\n///\n\n/// * `u` - The random sample point.\n\npub fn uniform_sample_hemisphere(u: &Point2f) -> Vector3f {\n\n let z = u[0];\n\n let r = max(0.0, 1.0 - z * z).sqrt();\n\n let phi = TWO_PI * u[1];\n\n Vector3f::new(r * cos(phi), r * sin(phi), z)\n\n}\n\n\n\n/// Returns the PDF for uniformly sampling a direction from a hemisphere.\n\n#[inline]\n\npub const fn uniform_hemisphere_pdf() -> Float {\n\n INV_TWO_PI\n\n}\n\n\n", "file_path": "core/src/sampling/common.rs", "rank": 3, "score": 249933.97822330194 }, { "content": "/// Sorts the sample values by wavelength.\n\n///\n\n/// * `lambda` - Vector containing wavelengths.\n\n/// * `vals` - Vector containing sample values corresponding to `lambda`.\n\npub fn sort_spectrum_samples(samples: &mut [Sample]) {\n\n samples.sort_by(|s1, s2| s1.lambda.partial_cmp(&s2.lambda).unwrap());\n\n}\n\n\n", "file_path": "core/src/spectrum/common.rs", "rank": 4, "score": 229063.20046947684 }, { "content": "/// Returns the Fresnel reflection at the boundary between a conductor and\n\n/// dielectric medium for unpolarized light.\n\n///\n\n/// * `cos_theta_i` - cos(θi) for angle between incident direction and geometric\n\n/// surface normal on the same side as incident direction `wi`.\n\n/// * `eta_i` - Index of refraction for medium that incident ray is in.\n\n/// * `eta_t` - Index of refraction for medium that incident ray is entering.\n\n/// * `k` - The absorption coefficient.\n\npub fn fr_conductor(cos_theta_i: Float, eta_i: Spectrum, eta_t: Spectrum, k: Spectrum) -> Spectrum {\n\n let cos_theta_i = clamp(cos_theta_i, -1.0, 1.0);\n\n let eta = eta_t / eta_i;\n\n let eta_k = k / eta_i;\n\n\n\n let cos_theta_i_2 = cos_theta_i * cos_theta_i;\n\n let sin_theta_i_2 = 1.0 - cos_theta_i;\n\n let eta_2 = eta * eta;\n\n let eta_k_2 = eta_k * eta_k;\n\n\n\n let t0 = eta_2 - eta_k_2 - Spectrum::new(sin_theta_i_2);\n\n let a2_plus_b2 = (t0 * t0 + 4.0 * eta_2 * eta_k_2).sqrt();\n\n let t1 = a2_plus_b2 + Spectrum::new(cos_theta_i_2);\n\n let a = (0.5 * (a2_plus_b2 + t0)).sqrt();\n\n let t2 = 2.0 * cos_theta_i * a;\n\n let rs = (t1 - t2) / (t1 + t2);\n\n\n\n let t3 = cos_theta_i_2 * a2_plus_b2 + Spectrum::new(sin_theta_i_2 * sin_theta_i_2);\n\n let t4 = t2 * sin_theta_i_2;\n\n let rp = rs * (t3 - t4) / (t3 + t4);\n\n\n\n 0.5 * (rp + rs)\n\n}\n", "file_path": "core/src/reflection/fresnel.rs", "rank": 5, "score": 228982.13139003812 }, { "content": "/// Computes the refracted direction, given incident direction `wi`, surface normal\n\n/// `n` in the same hemisphere as `wi` and `eta`. If there is total internal\n\n/// reflection, `None` is returned.\n\n///\n\n/// * `wi` - Incident direction.\n\n/// * `n` - Surface normal.\n\n/// * `eta` - Ratio of indices of refraction in the incident and transmitted media.\n\npub fn refract(wi: &Vector3f, n: &Normal3f, eta: Float) -> Option<Vector3f> {\n\n // Compute cos(theta_t) using Snell's law\n\n let cos_theta_i = n.dot(wi);\n\n let sin_2_theta_i = max(0.0, 1.0 - cos_theta_i * cos_theta_i);\n\n let sin_2_theta_t = eta * eta * sin_2_theta_i;\n\n\n\n // Handle total internal reflection for transmission.\n\n if sin_2_theta_t >= 1.0 {\n\n None\n\n } else {\n\n let cos_theta_t = (1.0 - sin_2_theta_t).sqrt();\n\n Some(eta * -wi + (eta * cos_theta_i - cos_theta_t) * Vector3f::from(n))\n\n }\n\n}\n\n\n\n/// Computes the reflection of a vector around a normal.\n\n///\n\n/// * `wo` - Vector to reflect.\n\n/// * `n` - Normal.\n", "file_path": "core/src/reflection/common.rs", "rank": 6, "score": 220908.47948718801 }, { "content": "/// Sample points inside a unit circle by sampling a square of length 2 and\n\n/// rejecting points outside the circle.\n\n///\n\n/// * `rng` - Random number generator.\n\npub fn rejection_sample_disk(rng: &mut RNG) -> Point2f {\n\n loop {\n\n let rx = rng.uniform_float();\n\n let ry = rng.uniform_float();\n\n\n\n let x = 1.0 - 2.0 * rx;\n\n let y = 1.0 - 2.0 * ry;\n\n\n\n if x * x + y * y > 1.0 {\n\n continue;\n\n }\n\n\n\n return Point2f::new(x, y);\n\n }\n\n}\n\n\n", "file_path": "core/src/sampling/common.rs", "rank": 7, "score": 219983.7114996559 }, { "content": "/// Uniformly sample a direction from a cone of directions about the `(0, 0, 1)`\n\n/// axis.\n\n///\n\n/// * `u` - The random sample point.\n\n/// * `cos_theta_max` - Cosine of the maximum angle of the beam.\n\npub fn uniform_sample_cone(u: &Point2f, cos_theta_max: Float) -> Vector3f {\n\n let cos_theta = (1.0 - u[0]) + u[0] * cos_theta_max;\n\n let sin_theta = (1.0 - cos_theta * cos_theta).sqrt();\n\n let phi = u[1] * TWO_PI;\n\n Vector3f::new(cos(phi) * sin_theta, sin(phi) * sin_theta, cos_theta)\n\n}\n\n\n", "file_path": "core/src/sampling/common.rs", "rank": 8, "score": 219001.4968228718 }, { "content": "/// Sample a point on a unit disk by mapping from a unit square to the unit\n\n/// circle. The concentric mapping takes points in [-1, 1]^2 to unit disk by\n\n/// uniformly mapping concentric squares to concentric circles.\n\n///\n\n/// * `u` - The random sample point.\n\npub fn concentric_sample_disk(u: &Point2f) -> Point2f {\n\n // Map uniform random numbers to [-1,1]^2.\n\n let u_offset = 2.0 * u - Vector2f::new(1.0, 1.0);\n\n\n\n // Handle degeneracy at the origin.\n\n if u_offset.x == 0.0 && u_offset.y == 0.0 {\n\n return Point2f::zero();\n\n }\n\n\n\n // Apply concentric mapping to point\n\n let (r, theta) = if abs(u_offset.x) > abs(u_offset.y) {\n\n (u_offset.x, PI_OVER_FOUR * (u_offset.y / u_offset.x))\n\n } else {\n\n (\n\n u_offset.y,\n\n PI_OVER_TWO - PI_OVER_FOUR * (u_offset.x / u_offset.y),\n\n )\n\n };\n\n\n\n r * Point2f::new(cos(theta), sin(theta))\n\n}\n\n\n", "file_path": "core/src/sampling/common.rs", "rank": 9, "score": 214304.26605646324 }, { "content": "/// Uniformly sample a point on an isosceles triangle.\n\n///\n\n/// * `u` - The random sample point.\n\npub fn uniform_sample_triangle(u: &Point2f) -> Point2f {\n\n let su0 = u[0].sqrt();\n\n Point2f::new(1.0 - su0, u[1] * su0)\n\n}\n\n\n\n/// Sample a direction on a hemisphere using cosine-weighted sampling.\n\n///\n\n/// * `u` - The random sample point.\n", "file_path": "core/src/sampling/common.rs", "rank": 10, "score": 214304.26605646324 }, { "content": "/// Uniformly sample a point on the disk.\n\n///\n\n/// * `u` - The random sample point.\n\npub fn uniform_sample_disk(u: &Point2f) -> Point2f {\n\n let r = u[0].sqrt();\n\n let theta = TWO_PI * u[1];\n\n Point2f::new(r * cos(theta), r * sin(theta))\n\n}\n\n\n", "file_path": "core/src/sampling/common.rs", "rank": 11, "score": 214304.26605646324 }, { "content": "/// Returns the average value across segments that are partially or fully\n\n/// within the range of wavelengths.\n\n///\n\n/// * `samples` - The sample values.\n\n/// * `lambda_start` - Starting wavelength.\n\n/// * `lambda_end` - Ending wavelength.\n\npub fn average_spectrum_samples(\n\n samples: &[Sample],\n\n lambda_start: Float,\n\n lambda_end: Float,\n\n) -> Float {\n\n assert!(lambda_start < lambda_end);\n\n\n\n let n = samples.len();\n\n\n\n // Handle cases with out-of-bounds range or single sample only.\n\n if lambda_end <= samples[0].lambda {\n\n return samples[0].value;\n\n }\n\n\n\n if lambda_start >= samples[n - 1].lambda {\n\n return samples[n - 1].value;\n\n }\n\n\n\n if n == 1 {\n\n return samples[0].value;\n", "file_path": "core/src/spectrum/common.rs", "rank": 12, "score": 198163.5374173213 }, { "content": "/// Evaluates the Perlin Noise function for a 2-D point. Evaluates `noise_3d()`\n\n/// at z = 0.5.\n\n///\n\n/// * `p` - The 2-D point.\n\npub fn noise_point2f(p: Point2f) -> Float {\n\n noise_2d(p.x, p.y)\n\n}\n\n\n", "file_path": "core/src/texture/common.rs", "rank": 13, "score": 192715.25280050104 }, { "content": "#[inline]\n\npub fn same_hemisphere(w: &Vector3f, wp: &Vector3f) -> bool {\n\n w.z * wp.z > 0.0\n\n}\n\n\n", "file_path": "core/src/reflection/common.rs", "rank": 14, "score": 189277.59033342163 }, { "content": "/// Generates the 2D samples using a generator matrices.\n\n///\n\n/// * `c` - Generator matrices for the 2 dimensions.\n\n/// * `n` - Number of samples to generate\n\n/// * `scramble` - Starting set of bits used for scrambling in each dimension.\n\n/// * `p` - Samples to update.\n\npub fn gray_code_sample_2d(c: &[[u32; 32]; 2], n: usize, scramble: &[u32; 2], p: &mut [Point2f]) {\n\n let mut v = [scramble[0], scramble[1]];\n\n for (i, pi) in p.iter_mut().enumerate().take(n) {\n\n let t = (i + 1).trailing_zeros() as usize;\n\n\n\n for j in 0..2 {\n\n (*pi)[j] = min(\n\n v[j] as Float * hexf32!(\"0x1.0p-32\") as Float,\n\n ONE_MINUS_EPSILON,\n\n );\n\n v[j] ^= c[j][t];\n\n }\n\n }\n\n}\n\n\n\n/// Define VanDerCorput Generator Matrix.\n\n#[rustfmt::skip]\n\nconst C_VANDER_CORPUT: [u32; 32] = [\n\n 0b10000000000000000000000000000000,\n\n 0b1000000000000000000000000000000,\n", "file_path": "core/src/low_discrepency.rs", "rank": 15, "score": 187568.81757034577 }, { "content": "/// Returns the cosine of the angle ΔΦ between two vector's Φ values in the\n\n/// shading coordinate system.\n\n///\n\n/// * `wa` - First direction vector.\n\n/// * `wb` - Second direction vector.\n\npub fn cos_d_phi(wa: &Vector3f, wb: &Vector3f) -> Float {\n\n let waxy = wa.x * wa.x + wa.y * wa.y;\n\n let wbxy = wb.x * wb.x + wb.y * wb.y;\n\n if waxy == 0.0 || wbxy == 0.0 {\n\n 1.0\n\n } else {\n\n clamp(\n\n (wa.x * wb.x + wa.y * wb.y) / (waxy * wbxy).sqrt(),\n\n -1.0,\n\n 1.0,\n\n )\n\n }\n\n}\n\n\n\n/// Returns `true` if two vectors are in the same hemisphere.\n\n///\n\n/// * `w` - First vector.\n\n/// * `wp` - Second vector.\n", "file_path": "core/src/reflection/common.rs", "rank": 16, "score": 183164.40587679017 }, { "content": "#[inline]\n\npub fn spherical_theta(v: &Vector3f) -> Float {\n\n clamp(v.z, -1.0, 1.0).acos()\n\n}\n\n\n\n/// Return the spherical angle Ø for a given vector.\n\n///\n\n/// * `v` - The vector.\n", "file_path": "core/src/geometry/util.rs", "rank": 17, "score": 179454.36617649504 }, { "content": "#[inline]\n\npub fn tan_2_theta(w: &Vector3f) -> Float {\n\n sin_2_theta(w) / cos_2_theta(w)\n\n}\n\n\n\n/// Returns the cosine of the angle Φ measured from the given direction to the\n\n/// x-axis after projection to the xy plane.\n\n///\n\n/// * `w` - The direction vector.\n", "file_path": "core/src/reflection/common.rs", "rank": 18, "score": 179454.36617649504 }, { "content": "#[inline]\n\npub fn sin_theta(w: &Vector3f) -> Float {\n\n sin_2_theta(w).sqrt()\n\n}\n\n\n\n/// Returns the tangent of the angle θ measured from the given direction to the\n\n/// z-axis.\n\n///\n\n/// * `w` - The direction vector.\n", "file_path": "core/src/reflection/common.rs", "rank": 19, "score": 179454.36617649504 }, { "content": "#[inline]\n\npub fn spherical_phi(v: &Vector3f) -> Float {\n\n let p = atan2(v.y, v.x);\n\n if p < 0.0 {\n\n p + TWO_PI\n\n } else {\n\n p\n\n }\n\n}\n", "file_path": "core/src/geometry/util.rs", "rank": 20, "score": 179454.36617649504 }, { "content": "#[inline]\n\npub fn cos_phi(w: &Vector3f) -> Float {\n\n let s = sin_theta(w);\n\n if s == 0.0 {\n\n 1.0\n\n } else {\n\n clamp(w.x / s, -1.0, 1.0)\n\n }\n\n}\n\n\n\n/// Returns the square of the cosine of the angle Φ measured from the given\n\n/// direction to the x-axis after projection to the xy plane.\n\n///\n\n/// * `w` - The direction vector.\n", "file_path": "core/src/reflection/common.rs", "rank": 21, "score": 179454.36617649504 }, { "content": "#[inline]\n\npub fn sin_2_phi(w: &Vector3f) -> Float {\n\n let c = sin_phi(w);\n\n c * c\n\n}\n\n\n", "file_path": "core/src/reflection/common.rs", "rank": 22, "score": 179454.36617649504 }, { "content": "#[inline]\n\npub fn tan_theta(w: &Vector3f) -> Float {\n\n sin_theta(w) / cos_theta(w)\n\n}\n\n\n\n/// Returns the square of the tangent of the angle θ measured from the given\n\n/// direction to the z-axis.\n\n///\n\n/// * `w` - The direction vector.\n", "file_path": "core/src/reflection/common.rs", "rank": 23, "score": 179454.36617649504 }, { "content": "#[inline]\n\npub fn cos_2_phi(w: &Vector3f) -> Float {\n\n let c = cos_phi(w);\n\n c * c\n\n}\n\n\n\n/// Returns the sine of the angle Φ measured from the given direction to the\n\n/// x-axis after projection to the xy plane.\n\n///\n\n/// * `w` - The direction vector.\n", "file_path": "core/src/reflection/common.rs", "rank": 24, "score": 179454.36617649504 }, { "content": "#[inline]\n\npub fn sin_2_theta(w: &Vector3f) -> Float {\n\n max(0.0, 1.0 - cos_2_theta(w))\n\n}\n\n\n\n/// Returns the sine of the angle θ measured from the given direction to the\n\n/// z-axis.\n\n///\n\n/// * `w` - The direction vector.\n", "file_path": "core/src/reflection/common.rs", "rank": 25, "score": 179454.36617649504 }, { "content": "#[inline]\n\npub fn cos_2_theta(w: &Vector3f) -> Float {\n\n w.z * w.z\n\n}\n\n\n\n/// Returns the absolute value of the cosine of the angle θ measured from the\n\n/// given direction to the z-axis.\n\n///\n\n/// * `w` - The direction vector.\n", "file_path": "core/src/reflection/common.rs", "rank": 26, "score": 179454.36617649504 }, { "content": "/// Take a 3D coordinate value where each component is a floating-point value\n\n/// between 0 and 2^10 and convert these values to integers and then computes the\n\n/// Morton code by expanding the three 10-bit quantized values so that their i^th\n\n/// bits are at position `3i`, then shifting the bits over one more, the z bits\n\n/// over two more, and ORing together the result.\n\n///\n\n/// * `v` - The coordinate value.\n\npub fn encode_morton_3(v: &Vector3f) -> u32 {\n\n debug_assert!(v.x > 0.0);\n\n debug_assert!(v.y > 0.0);\n\n debug_assert!(v.z > 0.0);\n\n\n\n (left_shift_3(float_to_bits(v.z)) << 2)\n\n | (left_shift_3(float_to_bits(v.y)) << 1)\n\n | left_shift_3(float_to_bits(v.x))\n\n}\n\n\n\npub const N_BITS: usize = 30;\n\nconst BITS_PER_PASS: usize = 6;\n\nconst N_PASSES: usize = N_BITS / BITS_PER_PASS; // This should divide evenly.\n\nconst N_BUCKETS: usize = 1 << BITS_PER_PASS;\n\nconst BIT_MASK: usize = (1 << BITS_PER_PASS) - 1;\n\n\n", "file_path": "accelerators/src/bvh/morton.rs", "rank": 27, "score": 179454.36617649504 }, { "content": "#[inline]\n\npub fn sin_phi(w: &Vector3f) -> Float {\n\n let s = sin_theta(w);\n\n if s == 0.0 {\n\n 0.0\n\n } else {\n\n clamp(w.y / s, -1.0, 1.0)\n\n }\n\n}\n\n\n\n/// Returns the square of the sine of the angle Φ measured from the given\n\n/// direction to the x-axis after projection to the xy plane.\n\n///\n\n/// * `w` - The direction vector.\n", "file_path": "core/src/reflection/common.rs", "rank": 28, "score": 179454.36617649504 }, { "content": "#[inline]\n\npub fn cos_theta(w: &Vector3f) -> Float {\n\n w.z\n\n}\n\n\n\n/// Returns the square of the cosine of the angle θ measured from the given\n\n/// direction to the z-axis.\n\n///\n\n/// * `w` - The direction vector.\n", "file_path": "core/src/reflection/common.rs", "rank": 29, "score": 179454.36617649504 }, { "content": "/// Returns a smart pointer to a new `LightDistribution` implementation.\n\n///\n\n/// * `strategy` - The strategy to use for light sampling.\n\n/// * `scene` - The scene.\n\npub fn create_light_sample_distribution(\n\n strategy: LightSampleStategy,\n\n scene: &Scene,\n\n) -> ArcLightDistribution {\n\n let strategy = if scene.lights.len() == 1 {\n\n LightSampleStategy::Uniform\n\n } else {\n\n strategy\n\n };\n\n match strategy {\n\n LightSampleStategy::Uniform => Arc::new(UniformLightDistribution::new(scene)),\n\n LightSampleStategy::Power => Arc::new(PowerLightDistribution::new(scene)),\n\n LightSampleStategy::Spatial => Arc::new(SpatialLightDistribution::new(scene, 64)),\n\n }\n\n}\n", "file_path": "core/src/light_distrib/mod.rs", "rank": 30, "score": 178253.52084671712 }, { "content": "/// Decomposes a transformation matrix into its translation, rotation and\n\n/// scaling components.\n\n///\n\n/// The transformation decomposition is: M = TRS.\n\n///\n\n/// * `m` - The matrix to decompose.\n\n/// * `t` - The translation component.\n\n/// * `r_quat` - The rotation component.\n\n/// * `s` - The scaling component.\n\nfn decompose(m: &Matrix4x4, t: &mut Vector3f, r_quat: &mut Quaternion, s: &mut Matrix4x4) {\n\n // Extract translation T from transformation matrix\n\n *t = Vector3f::new(m[0][3], m[1][3], m[2][3]);\n\n\n\n // Compute new transformation matrix M without translation\n\n let mut m1 = *m;\n\n for i in 0..3 {\n\n m1.m[i][3] = 0.0;\n\n m1.m[3][i] = 0.0;\n\n }\n\n m1.m[3][3] = 1.0;\n\n\n\n // Extract rotation R from transformation matrix\n\n let mut r = m1;\n\n let mut norm = 0.0;\n\n let mut count = 0;\n\n loop {\n\n // Compute the next matrix R_next in series\n\n let mut r_next = Matrix4x4::IDENTITY;\n\n let r_it = r.transpose().inverse();\n", "file_path": "core/src/geometry/animated_transform.rs", "rank": 31, "score": 176278.80864394334 }, { "content": "#[inline]\n\npub fn abs_cos_theta(w: &Vector3f) -> Float {\n\n abs(w.z)\n\n}\n\n\n\n/// Returns the square of the sine of the angle θ measured from the given\n\n/// direction to the z-axis.\n\n///\n\n/// * `w` - The direction vector.\n", "file_path": "core/src/reflection/common.rs", "rank": 32, "score": 176001.81609158497 }, { "content": "/// Generate 2D samples.\n\n///\n\n/// * `rng` - Random number generator.\n\n/// * `nx` - Number of samples in x-direction.\n\n/// * `ny` - Number of samples in y-direction.\n\n/// * `jitter` - Jitter the samples.\n\npub fn stratified_sample_2d(rng: &mut RNG, nx: usize, ny: usize, jitter: bool) -> Vec<Point2f> {\n\n let dx = 1.0 / nx as Float;\n\n let dy = 1.0 / ny as Float;\n\n\n\n iproduct!(0..ny, 0..nx)\n\n .map(|(y, x)| {\n\n let jx = if jitter { rng.uniform_float() } else { 0.5 };\n\n let jy = if jitter { rng.uniform_float() } else { 0.5 };\n\n Point2f::new(\n\n min((x as Float + jx) * dx, ONE_MINUS_EPSILON),\n\n min((y as Float + jy) * dy, ONE_MINUS_EPSILON),\n\n )\n\n })\n\n .collect::<Vec<Point2f>>()\n\n}\n\n\n", "file_path": "core/src/sampling/common.rs", "rank": 33, "score": 175782.2088034028 }, { "content": "/// Determines if given vector containing wavelengths is sorted.\n\n///\n\n/// * `lambda` - Vector containing wavelengths.\n\npub fn are_spectrum_samples_sorted(samples: &[Sample]) -> bool {\n\n let n = samples.len();\n\n match n {\n\n 0 => true,\n\n 1 => true,\n\n _ => !samples\n\n .iter()\n\n .zip(samples[1..].iter())\n\n .any(|(s1, s2)| s1.lambda > s2.lambda),\n\n }\n\n}\n\n\n", "file_path": "core/src/spectrum/common.rs", "rank": 34, "score": 171187.84243828754 }, { "content": "/// Evaluate second moment of the Fresnel reflectance function.\n\n///\n\n/// * `eta` - Index of refraction of the scattering medium.\n\npub fn fresnel_moment_2(eta: Float) -> Float {\n\n let eta2 = eta * eta;\n\n let eta3 = eta2 * eta;\n\n let eta4 = eta3 * eta;\n\n let eta5 = eta4 * eta;\n\n if eta < 1.0 {\n\n 0.27614 - 0.87350 * eta + 1.12077 * eta2 - 0.65095 * eta3 + 0.07883 * eta4 + 0.04860 * eta5\n\n } else {\n\n let r_eta = 1.0 / eta;\n\n let r_eta2 = r_eta * r_eta;\n\n let r_eta3 = r_eta2 * r_eta;\n\n -547.033 + 45.3087 * r_eta3 - 218.725 * r_eta2 + 458.843 * r_eta + 404.557 * eta\n\n - 189.519 * eta2\n\n + 54.9327 * eta3\n\n - 9.00603 * eta4\n\n + 0.63942 * eta5\n\n }\n\n}\n", "file_path": "core/src/bssrdf.rs", "rank": 35, "score": 164517.10755096676 }, { "content": "/// Evaluate first moment of the Fresnel reflectance function.\n\n///\n\n/// * `eta` - Index of refraction of the scattering medium.\n\npub fn fresnel_moment_1(eta: Float) -> Float {\n\n let eta2 = eta * eta;\n\n let eta3 = eta2 * eta;\n\n let eta4 = eta3 * eta;\n\n let eta5 = eta4 * eta;\n\n if eta < 1.0 {\n\n 0.45966 - 1.73965 * eta + 3.37668 * eta2 - 3.904945 * eta3 + 2.49277 * eta4 - 0.68441 * eta5\n\n } else {\n\n -4.61686 + 11.1136 * eta - 10.4646 * eta2 + 5.11455 * eta3 - 1.27198 * eta4 + 0.12746 * eta5\n\n }\n\n}\n\n\n", "file_path": "core/src/bssrdf.rs", "rank": 36, "score": 164517.10755096676 }, { "content": "/// Returns the value of an SPD at a given wavelength by interpolating a possibly\n\n/// irregularly sampled wavelengths/values by linearly interpolating between two\n\n/// sample values that bracket the given wavelength.\n\n///\n\n/// * `samples` - The sample values.\n\n/// * `l` - Wavelength at which to interpolate SPD.\n\npub fn interpolate_spectrum_samples(samples: &[Sample], l: Float) -> Float {\n\n let n = samples.len();\n\n\n\n if l <= samples[0].lambda {\n\n return samples[0].value;\n\n }\n\n if l >= samples[n - 1].lambda {\n\n return samples[n - 1].value;\n\n }\n\n\n\n let offset = find_interval(n, |index| samples[index].lambda <= l);\n\n\n\n assert!(l >= samples[offset].lambda && l <= samples[offset + 1].lambda);\n\n\n\n let t = (l - samples[offset].lambda) / (samples[offset + 1].lambda - samples[offset].lambda);\n\n lerp(t, samples[offset].value, samples[offset + 1].value)\n\n}\n\n\n\n/// Converts the given XYZ coefficients to RGB coefficients using RGB spectra\n\n/// defined for high-definition TVs.\n\n///\n\n/// * `xyz` - The XYZ coefficients.\n", "file_path": "core/src/spectrum/common.rs", "rank": 37, "score": 162395.84747022804 }, { "content": "/// Sorts a list of morton primitives in place using Radix sort.\n\n///\n\n/// * `v` - A `RefCell` containing the morton primitives vector.\n\npub fn radix_sort(v: &mut RefCell<Vec<MortonPrimitive>>) {\n\n let n = { v.borrow().len() };\n\n let mut temp_vector = RefCell::new(vec![MortonPrimitive::default(); n]);\n\n\n\n for pass in 0..N_PASSES {\n\n // Perform one pass of radix sort, sorting BITS_PER_PASS bits.\n\n let low_bit = pass * BITS_PER_PASS;\n\n\n\n // Set in and out vector pointers for radix sort pass.\n\n let (v_in, v_out) = if pass & 1 == 1 {\n\n (temp_vector.borrow(), v.get_mut())\n\n } else {\n\n (v.borrow(), temp_vector.get_mut())\n\n };\n\n\n\n // Count number of zero bits in array for current radix sort bit.\n\n let mut bucket_count = [0; N_BUCKETS];\n\n for mp in v_in.iter() {\n\n let bucket = ((mp.morton_code >> low_bit) as usize) & BIT_MASK;\n\n debug_assert!(bucket < N_BUCKETS);\n", "file_path": "accelerators/src/bvh/morton.rs", "rank": 38, "score": 157918.28068496648 }, { "content": "#[rustfmt::skip]\n\npub fn xyz_to_rgb(xyz: &[Float; 3]) -> [Float; 3] {\n\n [\n\n 3.240479 * xyz[0] - 1.537150 * xyz[1] - 0.498535 * xyz[2],\n\n -0.969256 * xyz[0] + 1.875991 * xyz[1] + 0.041556 * xyz[2],\n\n 0.055648 * xyz[0] - 0.204043 * xyz[1] + 1.057311 * xyz[2],\n\n ]\n\n}\n\n\n\n/// Converts the given RGB coefficients to XYZ coefficients using RGB spectra\n\n/// defined for high-definition TVs.\n\n///\n\n/// * `rgb` - The RGB coefficients.\n", "file_path": "core/src/spectrum/common.rs", "rank": 39, "score": 155144.4379182881 }, { "content": "#[rustfmt::skip]\n\npub fn rgb_to_xyz(rgb: &[Float; 3]) -> [Float; 3] {\n\n [\n\n 0.412453 * rgb[0] + 0.357580 * rgb[1] + 0.180423 * rgb[2],\n\n 0.212671 * rgb[0] + 0.715160 * rgb[1] + 0.072169 * rgb[2],\n\n 0.019334 * rgb[0] + 0.119193 * rgb[1] + 0.950227 * rgb[2],\n\n ]\n\n}\n\n\n", "file_path": "core/src/spectrum/common.rs", "rank": 40, "score": 155144.4379182881 }, { "content": "/// Compute random permutation tables.\n\n///\n\n/// * `rng` - The random number generator.\n\npub fn compute_radical_inverse_permutations(rng: &mut RNG) -> Vec<u16> {\n\n // Allocate space for radical inverse permutations.\n\n let perm_array_size = (0..PRIME_TABLE_SIZE).fold(0, |a, i| a + PRIMES[i]);\n\n\n\n let mut perms = vec![0_u16; perm_array_size];\n\n let mut p = 0;\n\n for i in 0..PRIME_TABLE_SIZE {\n\n // Generate random permutation for i^th prime base.\n\n for j in 0..PRIMES[i] {\n\n perms[p + j] = j as u16;\n\n }\n\n\n\n rng.shuffle(&mut perms[p..p + PRIMES[i]], PRIMES[i], 1);\n\n p += PRIMES[i];\n\n }\n\n perms\n\n}\n\n\n", "file_path": "core/src/low_discrepency.rs", "rank": 41, "score": 155077.50749212396 }, { "content": "/// Helper function for sampling visible area of normals.\n\n///\n\n/// * `wi` - Incident direction.\n\n/// * `alpha_x` - For microfacets oriented perpendicular to the x-axis and where\n\n/// α = sqrt(2) * σ and σ is the RMS slope of microfacets.\n\n/// * `alpha_y` - For microfacets oriented perpendicular to the y-axis and where\n\n/// α = sqrt(2) * σ and σ is the RMS slope of microfacets.\n\n/// * `u1` - The uniform random value.\n\n/// * `u2` - The uniform random value.\n\nfn trowbridge_reitz_sample(wi: &Vector3f, alpha_x: Float, alpha_y: Float, u1: Float, u2: Float) -> Vector3f {\n\n // 1. Stretch wi.\n\n let wi_stretched = Vector3f::new(alpha_x * wi.x, alpha_y * wi.y, wi.z).normalize();\n\n\n\n // 2. Simulate P22_{wi}(x_slope, y_slope, 1, 1)\n\n let (mut slope_x, mut slope_y) = trowbridge_reitz_sample_11(cos_theta(&wi_stretched), u1, u2);\n\n\n\n // 3. Rotate.\n\n let tmp = cos_phi(&wi_stretched) * slope_x - sin_phi(&wi_stretched) * slope_y;\n\n slope_y = sin_phi(&wi_stretched) * slope_x + cos_phi(&wi_stretched) * slope_y;\n\n slope_x = tmp;\n\n\n\n // 4. Unstretch.\n\n slope_x *= alpha_x;\n\n slope_y *= alpha_y;\n\n\n\n // 5. Compute normal.\n\n Vector3f::new(-slope_x, -slope_y, 1.0).normalize()\n\n}", "file_path": "core/src/microfacet/trowbridge_reitz.rs", "rank": 42, "score": 151747.97351468133 }, { "content": "/// Returns the emitted radiance at a given temperature and wavelengths for a\n\n/// blackbody (perfect emitter).\n\n///\n\n/// * `lambda` - Wavelengths in nanometers.\n\n/// * `t` - Temperature in Kelvin.\n\npub fn blackbody(lambda: &[Float], t: Float) -> Vec<Float> {\n\n if t <= 0.0 {\n\n return vec![0.0; lambda.len()];\n\n }\n\n\n\n const C: Float = 299792458.0;\n\n const H: Float = 6.62606957e-34;\n\n const KB: Float = 1.3806488e-23;\n\n\n\n let n = lambda.len();\n\n let mut le = vec![0.0; n];\n\n for i in 0..n {\n\n // Compute emitted radiance for blackbody at wavelength `lambda[i]`.\n\n let l = lambda[i] * 1e-9; // Convert nanometers -> meters.\n\n let lambda5 = (l * l) * (l * l) * l;\n\n le[i] = (2.0 * H * C * C) / (lambda5 * (((H * C) / (l * KB * t)).exp() - 1.0));\n\n assert!(!le[i].is_nan());\n\n }\n\n le\n\n}\n\n\n", "file_path": "core/src/spectrum/common.rs", "rank": 43, "score": 149744.67308051328 }, { "content": "/// Partition a subset of items between start and end inclusive such that:\n\n/// - k^th element will be in its sorted order\n\n/// - elements e in v[start, k - 1] will satisfy f(e, ek) == Ordering::Less\n\n/// - elements e in v[k + 1, end] will satisfy f(e, ek) == Ordering::Greater\n\n/// and the k^th element is returned.\n\n///\n\n/// * `v` - Vector to partition.\n\n/// * `start` - Starting index.\n\n/// * `end` - Ending index + 1.\n\n/// * `f` - Predicate used for partitioning.\n\nfn kth_element_by<F, T>(v: &mut Vec<T>, start: usize, k: usize, end: usize, f: F) -> Option<T>\n\nwhere\n\n F: Fn(&T, &T) -> Ordering,\n\n T: Copy,\n\n{\n\n if start >= end || end > v.len() || k < start || k >= end {\n\n return None;\n\n }\n\n\n\n let w = v[start..end].as_mut();\n\n let i = *kth_by(w, k - start, |&x, &y| f(&x, &y));\n\n\n\n Some(i)\n\n}\n\n\n", "file_path": "accelerators/src/bvh/sah.rs", "rank": 44, "score": 148914.62365516246 }, { "content": "/// Returns the normalized emitted radiance at a given temperature and wavelengths\n\n/// for a blackbody (perfect emitter) based on maximum blackbody radiance.\n\n///\n\n/// * `lambda` - Wavelengths in nanometers.\n\n/// * `t` - Temperature in Kelvin.\n\npub fn blackbody_normalized(lambda: &[Float], t: Float) -> Vec<Float> {\n\n let mut le = blackbody(lambda, t);\n\n\n\n // Normalize `Le` values based on maximum blackbody radiance.\n\n let lambda_max = 2.8977721e-3 / t * 1e9; // Convert to meters -> nanometers.\n\n let max_l = blackbody(&[lambda_max], t);\n\n\n\n for v in &mut le {\n\n *v /= max_l[0];\n\n }\n\n le\n\n}\n", "file_path": "core/src/spectrum/common.rs", "rank": 45, "score": 146626.5758569683 }, { "content": "/// Returns the fresnel reflection for dielectric materials and unpolarized light.\n\n///\n\n/// * `cos_theta_i` - cos(θi) for angle between incident direction and geometric\n\n/// surface normal.\n\n/// * `eta_i` - index of refraction for medium that incident ray is in.\n\n/// * `eta_t` - index of refraction for medium that incident ray is entering.\n\npub fn fr_dielectric(cos_theta_i: Float, eta_i: Float, eta_t: Float) -> Float {\n\n let mut cos_theta_i = clamp(cos_theta_i, -1.0, 1.0);\n\n let mut eta_i = eta_i;\n\n let mut eta_t = eta_t;\n\n\n\n // Potentially swap indices of refraction.\n\n let entering = cos_theta_i > 0.0;\n\n if !entering {\n\n swap(&mut eta_i, &mut eta_t);\n\n cos_theta_i = abs(cos_theta_i);\n\n }\n\n\n\n // Compute _cosThetaT_ using Snell's law.\n\n let sin_theta_i = (0.0 as Float).max(1.0 - cos_theta_i * cos_theta_i).sqrt();\n\n let sin_theta_t = eta_i / eta_t * sin_theta_i;\n\n\n\n // Handle total internal reflection.\n\n if sin_theta_t >= 1.0 {\n\n 1.0\n\n } else {\n\n let cos_theta_t = (0.0 as Float).max(1.0 - sin_theta_t * sin_theta_t).sqrt();\n\n let r_parl = ((eta_t * cos_theta_i) - (eta_i * cos_theta_t))\n\n / ((eta_t * cos_theta_i) + (eta_i * cos_theta_t));\n\n let r_perp = ((eta_i * cos_theta_i) - (eta_t * cos_theta_t))\n\n / ((eta_i * cos_theta_i) + (eta_t * cos_theta_t));\n\n (r_parl * r_parl + r_perp * r_perp) / 2.0\n\n }\n\n}\n\n\n", "file_path": "core/src/reflection/fresnel.rs", "rank": 46, "score": 146489.2048392355 }, { "content": "/// Returns the light power distribution in a scene.\n\n///\n\n/// * `scene` - The scene.\n\npub fn compute_light_power_distribution(scene: &Scene) -> Option<Distribution1D> {\n\n if scene.lights.is_empty() {\n\n None\n\n } else {\n\n let light_power: Vec<Float> = scene.lights.iter().map(|light| light.power().y()).collect();\n\n Some(Distribution1D::new(light_power))\n\n }\n\n}\n", "file_path": "core/src/integrator/common.rs", "rank": 47, "score": 145659.4860518205 }, { "content": "/// Generates the 1D samples using a generator matrix.\n\n///\n\n/// * `c` - A generator matrix.\n\n/// * `n` - Number of samples to generate\n\n/// * `scramble` - Starting set of bits used for scrambling.\n\n/// * `p` - Samples to update.\n\npub fn gray_code_sample_1d(c: &[u32], n: usize, scramble: u32, p: &mut [Float]) {\n\n let mut v = scramble;\n\n for i in 0..n {\n\n p[i] = min(\n\n v as Float * hexf32!(\"0x1.0p-32\") as Float, // 1 / (2^32)\n\n ONE_MINUS_EPSILON,\n\n );\n\n v ^= c[(i + 1).trailing_zeros() as usize];\n\n }\n\n}\n\n\n", "file_path": "core/src/low_discrepency.rs", "rank": 48, "score": 145000.63070727134 }, { "content": "/// Build the BVH structure using HLBVH algorithm.\n\n///\n\n/// * `arena` - Shared arena for memory allocations.\n\n/// * `primitives` - The primitives in the node.\n\n/// * `max_prims_in_node` - Maximum number of primitives in the node.\n\n/// * `primitive_info` - Primitive information.\n\n/// * `total_nodes` - Used to return total number of nodes.\n\n/// * `ordered_prims` - Used to return a list of primitives ordered such that\n\n/// primitives in leaf nodes occupy contiguous ranges in\n\n/// the vector.\n\npub fn build(\n\n arena: &SharedArena<BVHBuildNode>,\n\n primitives: &[ArcPrimitive],\n\n max_prims_in_node: u8,\n\n primitive_info: &mut Vec<BVHPrimitiveInfo>,\n\n total_nodes: &mut usize,\n\n ordered_prims: Arc<Mutex<Vec<ArcPrimitive>>>,\n\n) -> ArenaArc<BVHBuildNode> {\n\n // Compute bounds of all primitives in BVH node.\n\n let bounds = primitive_info\n\n .iter()\n\n .fold(Bounds3f::EMPTY, |b, pi| b.union(&pi.bounds));\n\n\n\n // Compute Morton indices of primitives.\n\n let morton_prims: Vec<MortonPrimitive> = primitive_info\n\n .par_iter()\n\n .map(|&pi| {\n\n let centroid_offset = bounds.offset(&pi.centroid);\n\n let v = centroid_offset * MORTON_SCALE;\n\n MortonPrimitive::new(pi.primitive_number, encode_morton_3(&v))\n", "file_path": "accelerators/src/bvh/hlbvh.rs", "rank": 49, "score": 144220.05481804913 }, { "content": "/// Recursively build the BVH structure for either Middle, EqualCounts or SAH\n\n/// algorithm.\n\n///\n\n/// * `arena` - Shared arena for memory allocations.\n\n/// * `primitives` - The primitives in the node.\n\n/// * `split_method` - Middle|EqualCounts|SAH\n\n/// * `max_prims_in_node` - Maximum number of primitives in the node.\n\n/// * `primitive_info` - Primitive information.\n\n/// * `start` - Starting index. For first call it should be 0.\n\n/// * `end` - Ending index + 1. For first call it should be number\n\n/// of primitives.\n\n/// * `total_nodes` - Used to return total number of nodes.\n\n/// * `ordered_prims` - Used to return a list of primitives ordered such that\n\n/// primitives in leaf nodes occupy contiguous ranges in\n\n/// the vector.\n\npub fn build(\n\n arena: &SharedArena<BVHBuildNode>,\n\n primitives: &[ArcPrimitive],\n\n split_method: SplitMethod,\n\n max_prims_in_node: u8,\n\n primitive_info: &mut Vec<BVHPrimitiveInfo>,\n\n start: usize,\n\n end: usize,\n\n total_nodes: &mut usize,\n\n ordered_prims: Arc<Mutex<Vec<ArcPrimitive>>>,\n\n) -> ArenaArc<BVHBuildNode> {\n\n // Compute bounds of all primitives in BVH node.\n\n let mut bounds = Bounds3f::EMPTY;\n\n for info in primitive_info.iter().take(end).skip(start) {\n\n bounds = bounds.union(&info.bounds);\n\n }\n\n\n\n let mut dim = Axis::default(); // Will be set if we need to make interior node.\n\n\n\n let n_primitives = end - start;\n", "file_path": "accelerators/src/bvh/sah.rs", "rank": 50, "score": 144218.10629141156 }, { "content": "/// Uses Perlin Noise to generate turbulence function.\n\n///\n\n/// * `p` - The point.\n\n/// * `dpdx` - Partial derivative at point p with respect to x.\n\n/// * `dpdy` - Partial derivative at point p with respect to y.\n\n/// * `omega` - Smoothness falloff value in [0, 1].\n\n/// * `max_octaves` - Maximum number of octaves of noise to use for the sum.\n\npub fn turbulence(\n\n p: &Point3f,\n\n dpdx: &Vector3f,\n\n dpdy: &Vector3f,\n\n omega: Float,\n\n max_octaves: usize,\n\n) -> Float {\n\n // Compute number of octaves for antialiased FBm\n\n let len2 = max(dpdx.length_squared(), dpdy.length_squared());\n\n let n = clamp(-1.0 - 0.5 * len2.log2(), 0.0, max_octaves as Float);\n\n let n_int = n.floor() as usize;\n\n\n\n // Compute sum of octaves of noise for turbulence\n\n let mut sum = 0.0;\n\n let mut lambda = 1.0;\n\n let mut o = 1.0;\n\n for _i in 0..n_int {\n\n sum += o * abs(noise_point3f(lambda * p));\n\n lambda *= 1.99;\n\n o *= omega;\n", "file_path": "core/src/texture/common.rs", "rank": 51, "score": 144209.1853822284 }, { "content": "/// Uses Perlin Noise to generate values based on Fractional Brownian motion.\n\n///\n\n/// * `p` - The point.\n\n/// * `dpdx` - Partial derivative at point p with respect to x.\n\n/// * `dpdy` - Partial derivative at point p with respect to y.\n\n/// * `omega` - Smoothness falloff value in [0, 1].\n\n/// * `max_octaves` - Maximum number of octaves of noise to use for the sum.\n\npub fn fbm(\n\n p: &Point3f,\n\n dpdx: &Vector3f,\n\n dpdy: &Vector3f,\n\n omega: Float,\n\n max_octaves: usize,\n\n) -> Float {\n\n // Compute number of octaves for antialiased FBm.\n\n let len2 = max(dpdx.length_squared(), dpdy.length_squared());\n\n let n = clamp(-1.0 - 0.5 * len2.log2(), 0.0, max_octaves as Float);\n\n let n_int = n.floor() as usize;\n\n\n\n // Compute sum of octaves of noise for FBm\n\n let mut sum = 0.0;\n\n let mut lambda = 1.0;\n\n let mut o = 1.0;\n\n for _i in 0..n_int {\n\n sum += o * noise_point3f(lambda * p);\n\n lambda *= 1.99;\n\n o *= omega;\n\n }\n\n let n_partial = n - n_int as Float;\n\n sum += o * smooth_step(0.3, 0.7, n_partial) * noise_point3f(lambda * p);\n\n sum\n\n}\n\n\n", "file_path": "core/src/texture/common.rs", "rank": 52, "score": 144209.1157475672 }, { "content": "/// Generate a number of scrambled 2D sample values using the Gray code-based\n\n/// sampling and the Sobol generator matrices.\n\n///\n\n/// * `n_samples_per_pixel_sample` - Number of samples to generate for every\n\n/// sample for a pixel.\n\n/// * `n_pixel_samples` - Number of samples for a pixel.\n\n/// * `samples` - Sample values to update.\n\n/// * `rng` - Random number generator.\n\npub fn sobol_2d(\n\n n_samples_per_pixel_sample: usize,\n\n n_pixel_samples: usize,\n\n samples: &mut [Point2f],\n\n rng: &mut RNG,\n\n) {\n\n let scramble = [rng.uniform_u32(), rng.uniform_u32()];\n\n\n\n gray_code_sample_2d(\n\n &C_SOBOL,\n\n n_samples_per_pixel_sample * n_pixel_samples,\n\n &scramble,\n\n samples,\n\n );\n\n\n\n for i in 0..n_pixel_samples {\n\n let start = i * n_samples_per_pixel_sample;\n\n let end = start + n_samples_per_pixel_sample;\n\n rng.shuffle(&mut samples[start..end], n_samples_per_pixel_sample, 1);\n\n }\n\n\n\n let start = 0;\n\n let end = start + n_pixel_samples;\n\n rng.shuffle(\n\n &mut samples[start..end],\n\n n_pixel_samples,\n\n n_samples_per_pixel_sample,\n\n );\n\n}\n\n\n", "file_path": "core/src/low_discrepency.rs", "rank": 53, "score": 144207.79667344072 }, { "content": "#[inline]\n\npub fn spherical_direction(sin_theta: Float, cos_theta: Float, phi: Float) -> Vector3f {\n\n Vector3f::new(sin_theta * cos(phi), sin_theta * sin(phi), cos_theta)\n\n}\n\n\n\n/// Returns a direction (x, y, z) for spherical coordinates (θ, Ø) with respect\n\n/// to a coordinate frame.\n\n///\n\n/// * `sin_theta` - sin(θ).\n\n/// * `cos_theta` - cos(θ).\n\n/// * `phi` - Ø.\n\n/// * `x` - Basis vector representing x-axis.\n\n/// * `y` - Basis vector representing y-axis.\n\n/// * `z` - Basis vector representing z-axis.\n", "file_path": "core/src/geometry/util.rs", "rank": 54, "score": 143883.8523381316 }, { "content": "#[allow(non_snake_case)]\n\npub fn sample_catmull_rom(\n\n x: &[Float],\n\n f: &[Float],\n\n F: &[Float],\n\n u: Float,\n\n) -> (Float, Float, Float) {\n\n // Get number of samples.\n\n let n = x.len();\n\n\n\n // Map `u` to a spline interval by inverting `F`.\n\n let u = u * F[n - 1];\n\n let i = find_interval(n, |j| F[j] <= u);\n\n\n\n // Look up `x_i` and function values of spline segment `i`.\n\n let x0 = x[i];\n\n let x1 = x[i + 1];\n\n let f0 = f[i];\n\n let f1 = f[i + 1];\n\n let width = x1 - x0;\n\n\n", "file_path": "core/src/interpolation.rs", "rank": 55, "score": 141501.23007104374 }, { "content": "/// Compute a direct lighting estimate for a light source sample by applying\n\n/// multiple importance sampling.\n\n///\n\n/// * `it` - The intersection information.\n\n/// * `bsdf` - The BSDF at the intersection.\n\n/// * `u_scattering` - Scattering sample.\n\n/// * `light` - The light.\n\n/// * `u_light` - Light sample.\n\n/// * `scene` - The scene.\n\n/// * `sampler` - The sampler.\n\n/// * `handle_media` - Indicates whether effects of volumetric attenuation\n\n/// should be considered (default to false).\n\n/// * `specular` - Indicates whether perfectly specular lobes should be\n\n/// considered (default to false).\n\npub fn estimate_direct(\n\n it: &Interaction,\n\n bsdf: &Option<&mut BSDF>,\n\n u_scattering: &Point2f,\n\n light: ArcLight,\n\n u_light: &Point2f,\n\n scene: &Scene,\n\n sampler: &mut ArcSampler,\n\n handle_media: bool,\n\n specular: bool,\n\n) -> Spectrum {\n\n let bsdf_flags = if specular {\n\n BxDFType::all()\n\n } else {\n\n BxDFType::all() & !BxDFType::BSDF_SPECULAR\n\n };\n\n let mut ld = Spectrum::ZERO;\n\n let hit = it.get_hit();\n\n let mut scattering_pdf = 0.0;\n\n\n", "file_path": "core/src/integrator/common.rs", "rank": 56, "score": 141501.23007104374 }, { "content": "#[allow(non_snake_case)]\n\npub fn sample_catmull_rom_2d(\n\n nodes1: &[Float],\n\n nodes2: &[Float],\n\n values: &[Float],\n\n cdf: &[Float],\n\n alpha: Float,\n\n u: Float,\n\n) -> (Float, Float, Float) {\n\n // Get number of nodes.\n\n let size2 = nodes2.len();\n\n\n\n // Determine offset and coefficients for the `alpha` parameter.\n\n let (weights, offset) = if let Some((w, o)) = catmull_rom_weights(nodes1, alpha) {\n\n (w, o)\n\n } else {\n\n return (0.0, 0.0, 0.0);\n\n };\n\n\n\n // Define a lambda function to interpolate table entries.\n\n let interpolate = |array: &[Float], idx: usize| -> Float {\n", "file_path": "core/src/interpolation.rs", "rank": 57, "score": 141501.23007104374 }, { "content": "/// Reads a PBRT file format and calls the API wrapper functions.\n\n///\n\n/// * `abs_path` - The absolute path to scene file.\n\n/// * `api` - The PBRT API interface.\n\npub fn parse(abs_path: &str, api: &mut Api) -> std::result::Result<(), String> {\n\n // Get path to scene file's folder for resolving relative paths to includes,\n\n // images, etc.\n\n let scene_path = parent_path(abs_path).unwrap();\n\n\n\n // Load input file.\n\n let unparsed_file = file_to_string(&abs_path)?;\n\n\n\n // Parse the input file into `Nodes`.\n\n match PbrtParser::parse_with_userdata(Rule::pbrt, &unparsed_file, scene_path.to_owned())\n\n // There should be a single root node in the parsed tree.\n\n .and_then(|inputs| inputs.single())\n\n // Consume the `Node` recursively into the final value.\n\n .and_then(PbrtParser::pbrt)\n\n // Process the and call API.\n\n .and_then(|pbrt| {\n\n pbrt.process(api);\n\n Ok(())\n\n }) {\n\n Ok(()) => Ok(()),\n\n Err(e) => Err(format!(\"{:}\", e)),\n\n }\n\n}\n\n\n", "file_path": "api/src/parser/mod.rs", "rank": 58, "score": 139700.9049028678 }, { "content": "/// Generate Latin Hypercube samples.\n\n///\n\n/// * `rng` - Random number generator.\n\n/// * `n_samples` - Number of samples.\n\n/// * `n_dim` - Number of dimensions.\n\npub fn latin_hypercube(rng: &mut RNG, n_samples: usize, n_dim: usize) -> Vec<Float> {\n\n let mut samples = vec![0.0; n_samples];\n\n let inv_n_samples = 1.0 / n_samples as Float;\n\n\n\n // Generate LHS samples along diagonal.\n\n for i in 0..n_samples {\n\n for j in 0..n_dim {\n\n let r = rng.uniform_float();\n\n let sj = (i as Float + r) * inv_n_samples;\n\n samples[n_dim * i + j] = min(sj, ONE_MINUS_EPSILON);\n\n }\n\n }\n\n\n\n // Permute LHS samples in each dimension.\n\n for i in 0..n_dim {\n\n for j in 0..n_samples {\n\n let other = j + rng.bounded_uniform_u32(0, (n_samples - j) as u32) as usize;\n\n samples.swap(n_dim * j + i, n_dim * other + i);\n\n }\n\n }\n\n\n\n samples\n\n}\n\n\n", "file_path": "core/src/sampling/common.rs", "rank": 59, "score": 139700.9049028678 }, { "content": "/// Generate 1D samples.\n\n///\n\n/// * `rng` - Random number generator.\n\n/// * `n_samples` - Number of samples.\n\n/// * `jitter` - Jitter the samples.\n\npub fn stratified_sample_1d(rng: &mut RNG, n_samples: usize, jitter: bool) -> Vec<Float> {\n\n let inv_n_samples = 1.0 / n_samples as Float;\n\n\n\n (0..n_samples)\n\n .map(|i| {\n\n let delta = if jitter { rng.uniform_float() } else { 0.5 };\n\n min((i as Float + delta) * inv_n_samples, ONE_MINUS_EPSILON)\n\n })\n\n .collect::<Vec<Float>>()\n\n}\n\n\n", "file_path": "core/src/sampling/common.rs", "rank": 60, "score": 139700.9049028678 }, { "content": "/// Generate a number of scrambled 1D sample values using the Gray code-based\n\n/// sampling and the VanDerCorput generator matrix.\n\n///\n\n/// * `n_samples_per_pixel_sample` - Number of samples to generate for every\n\n/// sample for a pixel.\n\n/// * `n_pixel_samples` - Number of samples for a pixel.\n\n/// * `samples` - Sample values to update.\n\n/// * `rng` - Random number generator.\n\npub fn van_der_corput(\n\n n_samples_per_pixel_sample: usize,\n\n n_pixel_samples: usize,\n\n samples: &mut [Float],\n\n rng: &mut RNG,\n\n) {\n\n let scramble = rng.uniform_u32();\n\n let total_samples = n_samples_per_pixel_sample * n_pixel_samples;\n\n\n\n gray_code_sample_1d(&C_VANDER_CORPUT, total_samples, scramble, samples);\n\n\n\n // Randomly shuffle 1D sample points.\n\n for i in 0..n_pixel_samples {\n\n let start = i * n_samples_per_pixel_sample;\n\n let end = start + n_samples_per_pixel_sample;\n\n rng.shuffle(&mut samples[start..end], n_samples_per_pixel_sample, 1);\n\n }\n\n\n\n // Randomly shuffle last set of 1D sample points.\n\n let start = 0;\n", "file_path": "core/src/low_discrepency.rs", "rank": 61, "score": 138949.9464703758 }, { "content": "/// Uniformly sample all lights in the scene for direct lighting.\n\n///\n\n/// * `it` - The intersection information.\n\n/// * `bsdf` - The BSDF at the intersection.\n\n/// * `scene` - The scene.\n\n/// * `sampler` - The sampler.\n\n/// * `n_light_samples` - The number of samples to take for each light.\n\n/// * `handle_media` - Indicates whether effects of volumetric attenuation\n\n/// should be considered (default to false).\n\npub fn uniform_sample_all_lights(\n\n it: &Interaction,\n\n bsdf: &Option<&mut BSDF>,\n\n scene: &Scene,\n\n sampler: &mut ArcSampler,\n\n n_light_samples: &[usize],\n\n handle_media: bool,\n\n) -> Spectrum {\n\n let mut l = Spectrum::ZERO;\n\n\n\n for (j, light) in scene.lights.iter().enumerate() {\n\n let sampler_mut = Arc::get_mut(sampler).unwrap();\n\n\n\n // Accumulate contribution of j^th light to `l`.\n\n let n_samples = n_light_samples[j];\n\n\n\n let u_light_array = sampler_mut.get_2d_array(n_samples);\n\n let u_scattering_array = sampler_mut.get_2d_array(n_samples);\n\n\n\n let nl = u_light_array.len();\n", "file_path": "core/src/integrator/common.rs", "rank": 62, "score": 138945.128539631 }, { "content": "/// Clears all MIPMap caches.\n\npub fn clear_mipmap_caches() {\n\n let mut mipmaps = RGB_SPECTRUM_MIPMAPS\n\n .lock()\n\n .expect(\"Unable to access RGB_SPECTRUM_MIPMAPS mutex\");\n\n mipmaps.clear();\n\n\n\n let mut mipmaps = FLOAT_MIPMAPS\n\n .lock()\n\n .expect(\"Unable to access FLOAT_MIPMAPS mutex\");\n\n mipmaps.clear();\n\n}\n\n\n", "file_path": "core/src/mipmap/cache.rs", "rank": 63, "score": 138945.128539631 }, { "content": "/// Uniformly sample from one random light in the scene for direct lighting and\n\n/// multiply result by number of lights to compensate.\n\n///\n\n/// * `it` - The intersection information.\n\n/// * `bsdf` - The BSDF at the intersection.\n\n/// * `scene` - The scene.\n\n/// * `sampler` - The sampler.\n\n/// * `handle_media` - Indicates whether effects of volumetric attenuation\n\n/// should be considered (default to false).\n\n/// * `light_distrib` - PDF for the light's distribution. (default to None).\n\npub fn uniform_sample_one_light(\n\n it: &Interaction,\n\n bsdf: &Option<&mut BSDF>,\n\n scene: &Scene,\n\n sampler: &mut ArcSampler,\n\n handle_media: bool,\n\n light_distrib: Option<Arc<Distribution1D>>,\n\n) -> Spectrum {\n\n // Randomly choose a single light to sample, `light`.\n\n let n_lights = scene.lights.len();\n\n if n_lights == 0 {\n\n return Spectrum::ZERO;\n\n }\n\n\n\n let sampler_mut = Arc::get_mut(sampler).unwrap();\n\n\n\n let (light_num, light_pdf) = if let Some(ld) = light_distrib {\n\n let sample = sampler_mut.get_1d();\n\n let (ln, pdf, _) = ld.sample_discrete(sample);\n\n if pdf == 0.0 {\n", "file_path": "core/src/integrator/common.rs", "rank": 64, "score": 136528.0472973021 }, { "content": "#[inline]\n\npub fn spherical_direction_in_coord_frame(\n\n sin_theta: Float,\n\n cos_theta: Float,\n\n phi: Float,\n\n x: &Vector3f,\n\n y: &Vector3f,\n\n z: &Vector3f,\n\n) -> Vector3f {\n\n sin_theta * cos(phi) * x + sin_theta * sin(phi) * y + cos_theta * z\n\n}\n\n\n\n/// Return the spherical angle θ for a given vector.\n\n///\n\n/// * `v` - The vector.\n", "file_path": "core/src/geometry/util.rs", "rank": 65, "score": 136523.15813553767 }, { "content": "/// Convert a 32-bit floating point value to its constituent bits and\n\n/// return the representation as 32-bit unsigned integer.\n\n///\n\n/// * `f` - The 32-bit floating point number.\n\npub fn float_to_bits(f: f32) -> u32 {\n\n // SAFETY: f32 and u32 have same size (32-bits).\n\n let result: u32;\n\n unsafe {\n\n let i: u32 = std::mem::transmute_copy(&f);\n\n result = i;\n\n }\n\n result\n\n}\n\n\n", "file_path": "core/src/pbrt/common.rs", "rank": 66, "score": 134329.50354954443 }, { "content": "/// Uniformly sample a direction from a cone of directions about the z-axis in a\n\n/// given coordinate system.\n\n///\n\n/// * `u` - The random sample point.\n\n/// * `x` - The x-axis basis vector.\n\n/// * `y` - The x-axis basis vector.\n\n/// * `z` - The x-axis basis vector.\n\n/// * `cos_theta_max` - Cosine of the maximum angle of the beam.\n\npub fn uniform_sample_cone_coordinate_system(\n\n u: &Point2f,\n\n cos_theta_max: Float,\n\n x: &Vector3f,\n\n y: &Vector3f,\n\n z: &Vector3f,\n\n) -> Vector3f {\n\n let cos_theta = lerp(u[0], cos_theta_max, 1.0);\n\n let sin_theta = (1.0 - cos_theta * cos_theta).sqrt();\n\n let phi = u[1] * TWO_PI;\n\n cos(phi) * sin_theta * x + sin(phi) * sin_theta * y + cos_theta * z\n\n}\n\n\n\n/// Returns the PDF for sampling a direction from a cone of directions.\n", "file_path": "core/src/sampling/common.rs", "rank": 67, "score": 134225.0310128142 }, { "content": "/// Interface and helper functions for SPDs.\n\npub trait CoefficientSpectrum:\n\n Sized\n\n + Add\n\n + AddAssign\n\n + Sub\n\n + SubAssign\n\n + Mul<Self>\n\n + MulAssign<Self>\n\n + MulAssign<Float>\n\n + Div<Self>\n\n + DivAssign<Self>\n\n + DivAssign<Float>\n\n + Neg\n\n + Index<usize>\n\n + IndexMut<usize>\n\n + Clamp<Float>\n\n{\n\n /// Returns the stored samples.\n\n fn samples(&self) -> &[Float];\n\n\n", "file_path": "core/src/spectrum/common.rs", "rank": 68, "score": 131073.78473088864 }, { "content": "/// Evaluates the Perlin Noise function for a 1-D point. Evaluates `noise_1d()`\n\n/// at y = z = 0.5.\n\n///\n\n/// * `x` - x-coordinate of point.\n\npub fn noise_1d(x: Float) -> Float {\n\n noise_3d(x, 0.5, 0.5)\n\n}\n\n\n", "file_path": "core/src/texture/common.rs", "rank": 69, "score": 121786.89115596408 }, { "content": "#[inline(always)]\n\npub fn gamma(n: Int) -> Float {\n\n (n as Float * MACHINE_EPSILON) / (1.0 - n as Float * MACHINE_EPSILON)\n\n}\n\n\n\n/// Returns gamma corrected values for use in 8-bit images.\n\n///\n\n/// * `value` - Value to correct.\n", "file_path": "core/src/pbrt/common.rs", "rank": 70, "score": 121786.89115596408 }, { "content": "#[inline(always)]\n\npub fn erf(x: Float) -> Float {\n\n // constants\n\n let a1 = 0.254829592;\n\n let a2 = -0.284496736;\n\n let a3 = 1.421413741;\n\n let a4 = -1.453152027;\n\n let a5 = 1.061405429;\n\n let p = 0.3275911;\n\n\n\n // Save the sign of x\n\n let sign = if x < 0.0 { -1.0 } else { 1.0 };\n\n let x = abs(x);\n\n\n\n // A&S formula 7.1.26.\n\n let t = 1.0 / (1.0 + p * x);\n\n let y = 1.0 - (((((a5 * t + a4) * t) + a3) * t + a2) * t + a1) * t * (-x * x).exp();\n\n\n\n sign * y\n\n}\n\n\n\n/// Returns the inverse of the error function for a given floating point value.\n\n///\n\n/// * `x` - The floating point value.\n", "file_path": "core/src/pbrt/common.rs", "rank": 71, "score": 121786.89115596408 }, { "content": "/// Evaluate a Bézier curve at given parameter and return the point and derivative\n\n/// at the point.\n\n///\n\n/// * `cp` - The control points.\n\n/// * `u` - The parameter to evaluate.\n\nfn eval_bezier(cp: &[Point3f; 4], u: Float) -> (Point3f, Vector3f) {\n\n let cp1 = [\n\n lerp(u, cp[0], cp[1]),\n\n lerp(u, cp[1], cp[2]),\n\n lerp(u, cp[2], cp[3]),\n\n ];\n\n let cp2 = [lerp(u, cp1[0], cp1[1]), lerp(u, cp1[1], cp1[2])];\n\n\n\n let deriv = if (cp2[1] - cp2[0]).length_squared() > 0.0 {\n\n 3.0 * (cp2[1] - cp2[0])\n\n } else {\n\n // For a cubic Bezier, if the first three control points (say) are\n\n // coincident, then the derivative of the curve is legitimately (0,0,0)\n\n // at u=0. This is problematic for us, though, since we'd like to be\n\n // able to compute a surface normal there. In that case, just punt and\n\n // take the difference between the first and last control points, which\n\n // ain't great, but will hopefully do.\n\n cp[3] - cp[0]\n\n };\n\n\n\n (lerp(u, cp2[0], cp2[1]), deriv)\n\n}\n", "file_path": "shapes/src/curve.rs", "rank": 72, "score": 120378.49497051995 }, { "content": "/// Bump a floating point value up to the next greater representable floating\n\n/// point value.\n\n///\n\n/// * `v` - Floating point value.\n\npub fn next_float_up(v: Float) -> Float {\n\n // Handle infinity and negative zero for next_float_up\n\n if v.is_infinite() && v > 0.0 {\n\n return v;\n\n }\n\n\n\n let nv = if v == -0.0 { 0.0 } else { v };\n\n\n\n // Advance v to next higher float\n\n let mut ui = float_to_bits(nv);\n\n if nv >= 0.0 {\n\n ui += 1;\n\n } else {\n\n ui -= 1;\n\n }\n\n\n\n bits_to_float(ui)\n\n}\n\n\n", "file_path": "core/src/pbrt/common.rs", "rank": 73, "score": 119370.6648534929 }, { "content": "/// Bump a floating point value up to the next lower representable floating\n\n/// point value.\n\n///\n\n/// * `v` - Floating point value.\n\npub fn next_float_down(v: Float) -> Float {\n\n // Handle infinity and positive zero for next_float_down\n\n if v.is_infinite() && v < 0.0 {\n\n return v;\n\n }\n\n\n\n // Advance v to next lower float\n\n let nv = if v == 0.0 { -0.0 } else { v };\n\n let mut ui = float_to_bits(nv);\n\n if nv > 0.0 {\n\n ui -= 1;\n\n } else {\n\n ui += 1;\n\n }\n\n\n\n bits_to_float(ui)\n\n}\n\n\n", "file_path": "core/src/pbrt/common.rs", "rank": 74, "score": 119370.6648534929 }, { "content": "/// Returns `true` if given path is a relative path; otherwise returns `false`.\n\n///\n\n/// * `path` - The path.\n\npub fn is_relative_path(path: &str) -> bool {\n\n PathBuf::from(path).is_relative()\n\n}\n\n\n", "file_path": "core/src/fileutil.rs", "rank": 75, "score": 119364.92075187076 }, { "content": "/// Evaluates the Perlin Noise function for a 3-D point.\n\n///\n\n/// * `p` - The 3-D point.\n\npub fn noise_point3f(p: Point3f) -> Float {\n\n noise_3d(p.x, p.y, p.z)\n\n}\n\n\n", "file_path": "core/src/texture/common.rs", "rank": 76, "score": 119364.92075187076 }, { "content": "#[inline(always)]\n\npub fn erf_inv(x: Float) -> Float {\n\n let x = clamp(x, -0.99999, 0.99999);\n\n let mut w = -((1.0 - x) * (1.0 + x)).ln();\n\n if w < 5.0 {\n\n w -= 2.5;\n\n\n\n let mut p = 2.81022636e-08;\n\n p = 3.43273939e-07 + p * w;\n\n p = -3.5233877e-06 + p * w;\n\n p = -4.39150654e-06 + p * w;\n\n p = 0.00021858087 + p * w;\n\n p = -0.00125372503 + p * w;\n\n p = -0.00417768164 + p * w;\n\n p = 0.246640727 + p * w;\n\n p = 1.50140941 + p * w;\n\n p * x\n\n } else {\n\n w = w.sqrt() - 3.0;\n\n\n\n let mut p = -0.000200214257;\n", "file_path": "core/src/pbrt/common.rs", "rank": 77, "score": 119364.92075187076 }, { "content": "#[inline]\n\npub fn reverse_bits_32(n: u32) -> u32 {\n\n let mut n = (n << 16) | (n >> 16);\n\n n = ((n & 0x00ff00ff) << 8) | ((n & 0xff00ff00) >> 8);\n\n n = ((n & 0x0f0f0f0f) << 4) | ((n & 0xf0f0f0f0) >> 4);\n\n n = ((n & 0x33333333) << 2) | ((n & 0xcccccccc) >> 2);\n\n n = ((n & 0x55555555) << 1) | ((n & 0xaaaaaaaa) >> 1);\n\n n\n\n}\n\n\n\n/// Reverse the bits of a 64-bit integer.\n\n///\n\n/// * `n` - 64-bit integer.\n", "file_path": "core/src/low_discrepency.rs", "rank": 78, "score": 119364.92075187076 }, { "content": "/// Returns `true` if given path is a absolute path; otherwise returns `false`.\n\n///\n\n/// * `path` - The path.\n\npub fn is_absolute_path(path: &str) -> bool {\n\n PathBuf::from(path).is_absolute()\n\n}\n", "file_path": "core/src/fileutil.rs", "rank": 79, "score": 119364.92075187076 }, { "content": "#[inline(always)]\n\npub fn tan(theta: Float) -> Float {\n\n theta.tan()\n\n}\n\n\n\n/// Return the arccosine of an angle.\n\n///\n\n/// * `theta` - The angle in radians.\n", "file_path": "core/src/pbrt/common.rs", "rank": 80, "score": 119364.92075187076 }, { "content": "/// Convert the bits of a 32-bit unsigned interger value and return the\n\n/// representation as a 32-bit floating point value.\n\n///\n\n/// * `i` - The 32-bit unsigned interger.\n\npub fn bits_to_float(i: u32) -> f32 {\n\n // SAFETY: f32 and u32 have same size (32-bits).\n\n let result: f32;\n\n unsafe {\n\n let f: f32 = std::mem::transmute_copy(&i);\n\n result = f;\n\n }\n\n result\n\n}\n\n\n", "file_path": "core/src/pbrt/common.rs", "rank": 81, "score": 119364.92075187076 }, { "content": "#[inline(always)]\n\npub fn sin(theta: Float) -> Float {\n\n theta.sin()\n\n}\n\n\n\n/// Return the tangent of an angle.\n\n///\n\n/// * `theta` - The angle in radians.\n", "file_path": "core/src/pbrt/common.rs", "rank": 82, "score": 119364.92075187076 }, { "content": "#[inline(always)]\n\npub fn atan(theta: Float) -> Float {\n\n theta.atan()\n\n}\n\n\n\n/// Computes the four quadrant arctangent of `y/x`.\n\n///\n\n/// Return values are in the following ranges based on `y` and `x`:\n\n/// * x = 0, y = 0 => 0\n\n/// * x >= 0 => arctan(y/x) -> [-π/2, π/2]\n\n/// * y >= 0 => arctan(y/x) + π -> (π/2, π]\n\n/// * y < 0 => arctan(y/x) - π -> (-π, -π/2)\n\n///\n\n/// * `y` - Proportion of y-coordinate.\n\n/// * `x` - Proportion of x-coordinate.\n", "file_path": "core/src/pbrt/common.rs", "rank": 83, "score": 119364.92075187076 }, { "content": "#[inline(always)]\n\npub fn cos(theta: Float) -> Float {\n\n theta.cos()\n\n}\n\n\n\n/// Return the sine of an angle.\n\n///\n\n/// * `theta` - The angle in radians.\n", "file_path": "core/src/pbrt/common.rs", "rank": 84, "score": 119364.92075187076 }, { "content": "#[inline(always)]\n\npub fn acos(theta: Float) -> Float {\n\n theta.acos()\n\n}\n\n\n\n/// Return the arcsine of an angle.\n\n///\n\n/// * `theta` - The angle in radians.\n", "file_path": "core/src/pbrt/common.rs", "rank": 85, "score": 119364.92075187076 }, { "content": "#[inline(always)]\n\npub fn asin(theta: Float) -> Float {\n\n theta.asin()\n\n}\n\n\n\n/// Computes the arctangent of a number. Return value is in radians in the range\n\n/// [-π/2, π/2];\n\n///\n\n/// * `theta` - The angle in radians.\n", "file_path": "core/src/pbrt/common.rs", "rank": 86, "score": 119364.92075187076 }, { "content": "#[inline(always)]\n\npub fn gamma_correct(value: Float) -> Float {\n\n if value <= 0.0031308 {\n\n 12.92 * value\n\n } else {\n\n 1.055 * value.powf(1.0 / 2.4) - 0.055\n\n }\n\n}\n\n\n\n/// Returns inverse of a gamma corrected value.\n\n///\n\n/// * `value` - The value.\n", "file_path": "core/src/pbrt/common.rs", "rank": 87, "score": 117066.79362914729 }, { "content": "#[inline]\n\npub fn gray_code_sample(n: u32) -> u32 {\n\n (n >> 1) ^ n\n\n}\n\n\n", "file_path": "core/src/low_discrepency.rs", "rank": 88, "score": 117066.79362914729 }, { "content": "#[inline(always)]\n\npub fn abs<T>(n: T) -> T\n\nwhere\n\n T: Num + Neg<Output = T> + PartialOrd + Copy,\n\n{\n\n if n < T::zero() {\n\n -n\n\n } else {\n\n n\n\n }\n\n}\n\n\n\n/// Returns the minimum of 2 numbers.\n\n///\n\n/// * `a` - First number.\n\n/// * `b` - Second number.\n", "file_path": "core/src/pbrt/common.rs", "rank": 89, "score": 116144.73702390504 }, { "content": "/// Interface of light distribution implementations that provide probability\n\n/// distributions for sampling light sources at a given point in space.\n\npub trait LightDistribution {\n\n /// Given a point |p| in space, this method returns a (hopefully effective)\n\n /// sampling distribution for light sources at that point.\n\n fn lookup(&self, p: &Point3f) -> Option<Arc<Distribution1D>>;\n\n}\n\n\n\n/// Atomic reference counted `LightDistribution `.\n\npub type ArcLightDistribution = Arc<dyn LightDistribution + Send + Sync>;\n\n\n", "file_path": "core/src/light_distrib/mod.rs", "rank": 90, "score": 115933.08042682118 }, { "content": "#[inline(always)]\n\npub fn inv_gamma_correct(value: Float) -> Float {\n\n if value <= 0.04045 {\n\n value * 1.0 / 12.92\n\n } else {\n\n ((value + 0.055) * 1.0 / 1.055).powf(2.4)\n\n }\n\n}\n\n\n\n/// Linearly interpolate between two points for parameters in [0, 1] and\n\n/// extrapolate for parameters outside that interval.\n\n///\n\n/// * `t` - Parameter.\n\n/// * `p0` - Point at t=0.\n\n/// * `p1` - Point at t=1.\n", "file_path": "core/src/pbrt/common.rs", "rank": 91, "score": 114883.24781622007 }, { "content": "/// Converts a named parameter to a vector of some type that stores\n\n/// 3 values.\n\n///\n\n/// * `v` - Slice containing floating point values.\n\n/// * `new` - Function used to construct elements of resulting vector.\n\nfn float_list_to_vec3<T, F>(v: &[Float], new: F) -> (Vec<T>, Option<String>)\n\nwhere\n\n F: Fn(Float, Float, Float) -> T,\n\n{\n\n let mut msg: Option<String> = None;\n\n\n\n let n = v.len();\n\n if n % 3 != 0 {\n\n msg = Some(\"length is not divisible by 3\".to_owned());\n\n }\n\n let res = (0..n)\n\n .step_by(3)\n\n .map(|i| new(v[i], v[i + 1], v[i + 2]))\n\n .collect();\n\n (res, msg)\n\n}\n\n\n", "file_path": "api/src/parser/mod.rs", "rank": 92, "score": 114077.94740038669 }, { "content": "/// Converts a named parameter to a vector of some type that stores\n\n/// 2 values.\n\n///\n\n/// * `v` - Slice containing floating point values.\n\n/// * `new` - Function used to construct elements of resulting vector.\n\nfn float_list_to_vec2<T, F>(v: &[Float], new: F) -> (Vec<T>, Option<String>)\n\nwhere\n\n F: Fn(Float, Float) -> T,\n\n{\n\n let mut msg: Option<String> = None;\n\n\n\n let n = v.len();\n\n if n % 2 != 0 {\n\n msg = Some(\"length is not divisible by 2\".to_owned());\n\n }\n\n let res = (0..n).step_by(2).map(|i| new(v[i], v[i + 1])).collect();\n\n (res, msg)\n\n}\n", "file_path": "api/src/parser/mod.rs", "rank": 93, "score": 114077.94740038669 }, { "content": "/// Returns the path to the parent folder; or `None` if path is root.\n\n///\n\n/// * `path` - The path.\n\npub fn parent_path(path: &str) -> Option<String> {\n\n PathBuf::from(path)\n\n .parent()\n\n .and_then(|p| p.to_str().map(String::from))\n\n}\n\n\n", "file_path": "core/src/fileutil.rs", "rank": 94, "score": 113846.60990118158 }, { "content": "#[inline(always)]\n\npub fn atan2(y: Float, x: Float) -> Float {\n\n y.atan2(x)\n\n}\n\n\n\n/// Returns `v^5`.\n\n///\n\n/// * `v` - The value.\n", "file_path": "core/src/pbrt/common.rs", "rank": 95, "score": 113558.82954662276 }, { "content": "/// Evaluates the Perlin Noise function for a 2-D point. Evaluates `noise_3d()`\n\n/// at z = 0.5.\n\n///\n\n/// * `x` - x-coordinate of point.\n\n/// * `y` - y-coordinate of point.\n\npub fn noise_2d(x: Float, y: Float) -> Float {\n\n noise_3d(x, y, 0.5)\n\n}\n\n\n", "file_path": "core/src/texture/common.rs", "rank": 96, "score": 113558.82954662276 }, { "content": "#[allow(non_snake_case)]\n\npub fn invert_catmull_rom(x: &[Float], values: &[Float], u: Float) -> Float {\n\n let n = values.len();\n\n\n\n // Stop when `u` is out of bounds.\n\n if !(u > values[0]) {\n\n return x[0];\n\n } else if !(u < values[n - 1]) {\n\n return x[n - 1];\n\n }\n\n\n\n // Map `u` to a spline interval by inverting `values`.\n\n let i = find_interval(n, |i| values[i] <= u);\n\n\n\n // Look up `x_i` and function values of spline segment `i`.\n\n let x0 = x[i];\n\n let x1 = x[i + 1];\n\n let f0 = values[i];\n\n let f1 = values[i + 1];\n\n let width = x1 - x0;\n\n\n", "file_path": "core/src/interpolation.rs", "rank": 97, "score": 113138.9005535611 }, { "content": "#[inline]\n\npub fn cosine_hemisphere_pdf(cos_theta: Float) -> Float {\n\n cos_theta * INV_PI\n\n}\n\n\n\n/// Weight samples using the balance heuristic.\n\n///\n\n/// * `nf` - Number of samples taken from sampling distribution `pf`.\n\n/// * `f_pdf` - PDF value from sampling distribution `pf`.\n\n/// * `ng` - Number of samples taken from sampling distribution `pg`.\n\n/// * `g_pdf` - PDF value from sampling distribution `pg`.\n", "file_path": "core/src/sampling/common.rs", "rank": 98, "score": 112805.92245455307 }, { "content": "fn bump_int(x: Float) -> Float {\n\n (x / 2.0).floor() + 2.0 * max((x / 2.0) - (x / 2.0).floor() - 0.5, 0.0)\n\n}\n\n\n\nmacro_rules! from_params {\n\n ($t: ty, $get_texture_or_else_func: ident) => {\n\n impl From<(&TextureParams, ArcTransform)> for CheckerboardTexture2D<$t> {\n\n /// Create a `CheckerboardTexture2D<$t>` from given parameter set and\n\n /// transformation from texture space to world space.\n\n ///\n\n /// * `p` - Tuple containing texture parameters and texture space\n\n /// to world space transform.\n\n fn from(p: (&TextureParams, ArcTransform)) -> Self {\n\n let (tp, tex2world) = p;\n\n\n\n // Check texture dimensions.\n\n let dim = tp.find_int(\"dimension\", 2);\n\n if dim != 2 {\n\n panic!(\"Cannot create CheckerboardTexture2D for dim = {}\", dim);\n\n }\n", "file_path": "textures/src/checkerboard_2d.rs", "rank": 99, "score": 111869.39102230938 } ]
Rust
src/lib.rs
rkanati/podchamp
84488b1e92532052f1c1176150a8afa95779de65
#[macro_use] extern crate diesel; #[macro_use] extern crate diesel_migrations; use { chrono::{DateTime, Utc, NaiveDateTime}, thiserror::Error, url::Url, }; embed_migrations!(); pub mod models; pub mod schema; pub fn run_migrations(db: &diesel::sqlite::SqliteConnection) -> anyhow::Result<()> { embedded_migrations::run(db)?; Ok(()) } pub struct Database { conn: diesel::sqlite::SqliteConnection, } #[derive(Debug, Error)] pub enum OpenDatabaseError { #[error("invalid database path")] InvalidPath, #[error("creating database directory")] CreateDirectory(std::io::Error), #[error(transparent)] Diesel(#[from] diesel::result::ConnectionError), } impl Database { pub fn open(path: &std::path::Path) -> Result<Database, OpenDatabaseError> { let dir = path.parent().ok_or(OpenDatabaseError::InvalidPath)?; std::fs::create_dir_all(dir).map_err(OpenDatabaseError::CreateDirectory)?; let path = path.to_str().ok_or(OpenDatabaseError::InvalidPath)?; use diesel::prelude::*; let conn = SqliteConnection::establish(path)?; let db = Database{conn}; Ok(db) } } impl std::ops::Deref for Database { type Target = diesel::sqlite::SqliteConnection; fn deref(&self) -> &Self::Target { &self.conn } } #[derive(Debug, Error)] pub enum AddFeedError { #[error("feed named {0} already in database")] NameTaken(String), #[error(transparent)] Database(#[from] diesel::result::Error), } impl Database { pub fn add_feed(&mut self, name: &str, link: &Url, backlog: std::num::NonZeroU32, ) -> Result<(), AddFeedError> { let feed = models::NewFeed { name, uri: link.as_str(), backlog: backlog.get() as i32, fetch_since: None }; use diesel::{prelude::*, result::{Error, DatabaseErrorKind}}; diesel::insert_into(schema::feeds::table) .values(&feed) .execute(&self.conn) .map_err(|e| match e { Error::DatabaseError(DatabaseErrorKind::UniqueViolation, _) => AddFeedError::NameTaken(name.to_owned()), e => e.into(), })?; Ok(()) } } #[derive(Debug, Error)] pub enum RemoveFeedError { #[error("no feed named {0}")] NoSuchFeed(String), #[error(transparent)] Database(#[from] diesel::result::Error), } impl Database { pub fn remove_feed(&mut self, name: &str) -> Result<(), RemoveFeedError> { use{diesel::prelude::*, schema::feeds::dsl as dsl}; let n = diesel::delete(dsl::feeds.filter(dsl::name.eq(name))) .execute(&self.conn)?; if n == 0 { return Err(RemoveFeedError::NoSuchFeed(name.into())); } Ok(()) } } #[derive(Debug, Error)] pub enum GetFeedsError { #[error(transparent)] Database(#[from] diesel::result::Error), } #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum GetFeeds<'n> { All, One(&'n str), } impl Database { pub fn get_feeds(&self, which: GetFeeds<'_>) -> Result<Vec<models::Feed>, GetFeedsError> { use{diesel::prelude::*, schema::feeds::dsl as feeds}; let query = match which { GetFeeds::All => feeds::feeds.into_boxed(), GetFeeds::One(name) => feeds::feeds.filter(feeds::name.eq(name)).into_boxed() }; query.load::<models::Feed>(&self.conn) .map_err(GetFeedsError::Database) } } #[derive(Debug, Error)] pub enum SetColumnError { #[error("no feed named {0}")] NoSuchFeed(String), #[error(transparent)] Database(#[from] diesel::result::Error), } impl Database { pub fn set_link(&mut self, feed: &str, link: &Url) -> Result<(), SetColumnError> { use{diesel::prelude::*, schema::feeds::dsl as dsl}; let n = diesel::update(dsl::feeds.filter(dsl::name.eq(feed))) .set(dsl::uri.eq(link.as_str())) .execute(&self.conn)?; if n == 0 { return Err(SetColumnError::NoSuchFeed(feed.into())); } Ok(()) } pub fn set_backlog(&mut self, feed: &str, backlog: std::num::NonZeroU32) -> Result<(), SetColumnError> { use{diesel::prelude::*, schema::feeds::dsl as dsl}; let n = diesel::update(dsl::feeds.filter(dsl::name.eq(feed))) .set(dsl::backlog.eq(backlog.get() as i32)) .execute(&self.conn)?; if n == 0 { return Err(SetColumnError::NoSuchFeed(feed.into())); } Ok(()) } pub fn set_fetch_since(&mut self, feed: &str, since: &DateTime<Utc>) -> Result<(), SetColumnError> { use{diesel::prelude::*, schema::feeds::dsl as dsl}; let n = diesel::update(dsl::feeds.filter(dsl::name.eq(feed))) .set(dsl::fetch_since.eq(since.naive_utc())) .execute(&self.conn)?; if n == 0 { return Err(SetColumnError::NoSuchFeed(feed.into())); } Ok(()) } } #[derive(Debug, Error)] pub enum ResetRegisterError { #[error(transparent)] Database(#[from] diesel::result::Error), } impl Database { pub fn reset_register(&mut self, feed: &str) -> Result<(), ResetRegisterError> { use diesel::prelude::*; use schema::{register::dsl as register, feeds::dsl as feeds}; diesel::delete(register::register.filter(register::feed.eq(feed))) .execute(&self.conn)?; diesel::update(feeds::feeds.filter(feeds::name.eq(feed))) .set(feeds::fetch_since.eq::<Option<NaiveDateTime>>(None)) .execute(&self.conn)?; Ok(()) } } #[derive(Debug, Error)] pub enum IsEpisodeRegisteredError { #[error(transparent)] Database(#[from] diesel::result::Error), } impl Database { pub fn is_episode_registered(&self, feed: &str, guid: &str) -> Result<bool, IsEpisodeRegisteredError> { use {diesel::prelude::*, schema::register::dsl as register}; let n: i64 = register::register .filter(register::feed.eq(feed)) .filter(register::guid.eq(guid)) .count() .get_result(&self.conn)?; Ok(n != 0) } } #[derive(Debug, Error)] pub enum RegisterEpisodeError { #[error(transparent)] Database(#[from] diesel::result::Error), } impl Database { pub fn register_episode(&mut self, feed: &str, guid: &str) -> Result<(), RegisterEpisodeError> { let registration = models::NewRegistration{feed, guid}; use diesel::prelude::*; diesel::insert_into(schema::register::table) .values(&registration) .execute(&self.conn)?; Ok(()) } }
#[macro_use] extern crate diesel; #[macro_use] extern crate diesel_migrations; use { chrono::{DateTime, Utc, NaiveDateTime}, thiserror::Error, url::Url, }; embed_migrations!(); pub mod models; pub mod schema; pub fn run_migrations(db: &diesel::sqlite::SqliteConnection) -> anyhow::Result<()> { embedded_migrations::run(db)?; Ok(()) } pub struct Database { conn: diesel::sqlite::SqliteConnection, } #[derive(Debug, Error)] pub enum OpenDatabaseError { #[error("invalid database path")] InvalidPath, #[error("creating database directory")] CreateDirectory(std::io::Error), #[error(transparent)] Diesel(#[from] diesel::result::ConnectionError), } impl Database { pub fn open(path: &std::path::Path) -> Result<Database, OpenDatabaseError> { let dir = path.parent().ok_or(OpenDatabaseError::InvalidPath)?; std::fs::create_dir_all(dir).map_err(OpenDatabaseError::CreateDirectory)?; let path = path.to_str().ok_or(OpenDatabaseError::InvalidPath)?; use diesel::prelude::*; let conn = SqliteConnection::establish(path)?; let db = Database{conn}; Ok(db) } } impl std::ops::Deref for Database { type Target = diesel::sqlite::SqliteConnection; fn deref(&self) -> &Self::Target { &self.conn } } #[derive(Debug, Error)] pub enum AddFeedError { #[error("feed named {0} already in database")] NameTaken(String), #[error(transparent)] Database(#[from] diesel::result::Error), } impl Database { pub fn add_feed(&mut self, name: &str, link: &Url, backlog: std::num::NonZeroU32, ) -> Result<(), AddFeedError> { let feed = models::NewFeed { name, uri: link.as_str(), backlog: backlog.get() as i32, fetch_since: None }; use diesel::{prelude::*, result::{Error, DatabaseErrorKind}}; diesel::insert_into(schema::feeds::table) .values(&feed) .execute(&self.conn) .map_err(|e| match e { Error::DatabaseError(DatabaseErrorKind::UniqueViolation, _) => AddFeedError::NameTaken(name.to_owned()), e => e.into(), })?; Ok(()) } } #[derive(Debug, Error)] pub enum RemoveFeedError { #[error("no feed named {0}")] NoSuchFeed(String), #[error(transparent)] Database(#[from] diesel::result::Error), } impl Database { pub fn remove_feed(&mut self, name: &str) -> Result<(), RemoveFeedError> { use{diesel::prelude::*, schema::feeds::dsl as dsl}; let n = diesel::delete(dsl::feeds.filter(dsl::name.eq(name))) .execute(&self.conn)?; if n == 0 { return Err(RemoveFeedError::NoSuchFeed(name.into())); } Ok(()) } } #[derive(Debug, Error)] pub enum GetFeedsError { #[error(transparent)] Database(#[from] diesel::result::Error), } #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub enum GetFeeds<'n> { All, One(&'n str), } impl Database { pub fn get_feeds(&self, which: GetFeeds<'_>) -> Result<Vec<models::Feed>, GetFeedsError> { use{diesel::prelude::*, schema::feeds::dsl as feeds}; let query = match which { GetFeeds::All => feeds::feeds.into_boxed(), GetFeeds::One(name) => feeds::feeds.filter(feeds::name.eq(name)).into_boxed() }; query.load::<models::Feed>(&self.conn) .map_err(GetFeedsError::Database) } } #[derive(Debug, Error)] pub enum SetColumnError { #[error("no feed named {0}")] NoSuchFeed(String), #[error(transparent)] Database(#[from] diesel::result::Error), } impl Database { pub fn set_link(&mut self, feed: &str, link: &Url) -> Result<(), SetColumnError> { use{diesel::prelude::*, schema::feeds::dsl as dsl}; let n = diesel::update(dsl::feeds.filter(dsl::name.eq(feed))) .set(dsl::uri.eq(link.as_str())) .execute(&self.conn)?; if n == 0 { return Err(SetColumnError::NoSuchFeed(feed.into())); } Ok(()) } pub fn set_backlog(&mut self, feed: &str, backlog: std::num::NonZeroU32) -> Result<(), SetColumnError> { use{diesel::prelude::*, schema::feeds::dsl as dsl}; let n
0) } } #[derive(Debug, Error)] pub enum RegisterEpisodeError { #[error(transparent)] Database(#[from] diesel::result::Error), } impl Database { pub fn register_episode(&mut self, feed: &str, guid: &str) -> Result<(), RegisterEpisodeError> { let registration = models::NewRegistration{feed, guid}; use diesel::prelude::*; diesel::insert_into(schema::register::table) .values(&registration) .execute(&self.conn)?; Ok(()) } }
= diesel::update(dsl::feeds.filter(dsl::name.eq(feed))) .set(dsl::backlog.eq(backlog.get() as i32)) .execute(&self.conn)?; if n == 0 { return Err(SetColumnError::NoSuchFeed(feed.into())); } Ok(()) } pub fn set_fetch_since(&mut self, feed: &str, since: &DateTime<Utc>) -> Result<(), SetColumnError> { use{diesel::prelude::*, schema::feeds::dsl as dsl}; let n = diesel::update(dsl::feeds.filter(dsl::name.eq(feed))) .set(dsl::fetch_since.eq(since.naive_utc())) .execute(&self.conn)?; if n == 0 { return Err(SetColumnError::NoSuchFeed(feed.into())); } Ok(()) } } #[derive(Debug, Error)] pub enum ResetRegisterError { #[error(transparent)] Database(#[from] diesel::result::Error), } impl Database { pub fn reset_register(&mut self, feed: &str) -> Result<(), ResetRegisterError> { use diesel::prelude::*; use schema::{register::dsl as register, feeds::dsl as feeds}; diesel::delete(register::register.filter(register::feed.eq(feed))) .execute(&self.conn)?; diesel::update(feeds::feeds.filter(feeds::name.eq(feed))) .set(feeds::fetch_since.eq::<Option<NaiveDateTime>>(None)) .execute(&self.conn)?; Ok(()) } } #[derive(Debug, Error)] pub enum IsEpisodeRegisteredError { #[error(transparent)] Database(#[from] diesel::result::Error), } impl Database { pub fn is_episode_registered(&self, feed: &str, guid: &str) -> Result<bool, IsEpisodeRegisteredError> { use {diesel::prelude::*, schema::register::dsl as register}; let n: i64 = register::register .filter(register::feed.eq(feed)) .filter(register::guid.eq(guid)) .count() .get_result(&self.conn)?; Ok(n !=
random
[ { "content": "fn collect_recent_episodes<'c> (channel: &'c feed_rs::model::Feed, now: &DateTime<Utc>)\n\n -> Vec<(&'c feed_rs::model::Entry, &'c Url, DateTime<Utc>)>\n\n{\n\n let mut recents: Vec<_> = channel.entries.iter()\n\n // ignore items with no date, or no actual episode to download\n\n .filter_map(|item| {\n\n let date = item.published?;\n\n // TODO sort this out. as of feed-rs 0.6, rss enclosures are emulated with\n\n // mediarss media objects, but this is very janky and not really consistent\n\n // with podcasts as they are normally understood. file a bug? not sure.\n\n let link = item.media.iter()\n\n .flat_map(|media_obj| media_obj.content.iter())\n\n .find_map(|content| {\n\n let mime = content.content_type.as_ref()?;\n\n if mime.type_() != \"audio\" { return None; }\n\n content.url.as_ref()\n\n })?;\n\n Some((item, link, date))\n\n })\n\n // ignore time-travellers\n", "file_path": "src/fetch.rs", "rank": 1, "score": 82743.95699580816 }, { "content": "#[derive(Clone)]\n\nstruct SingleInstance(std::sync::Arc<std::sync::Mutex<Option<std::path::PathBuf>>>);\n\n\n\nimpl SingleInstance {\n\n fn new(rt_path: &std::path::Path) -> Anyhow<Self> {\n\n std::fs::create_dir_all(rt_path)?;\n\n\n\n let lockdir_path = rt_path.join(\"podchamp.lock.d\");\n\n std::fs::create_dir(&lockdir_path)?;\n\n\n\n let pid = format!(\"{}\", std::process::id());\n\n std::fs::write(lockdir_path.join(\"pid\"), &pid)?;\n\n\n\n let utx = std::sync::Mutex::new(Some(lockdir_path));\n\n Ok(SingleInstance(std::sync::Arc::new(utx)))\n\n }\n\n\n\n fn done(&self) {\n\n if let Ok(mut lock) = self.0.lock() {\n\n if let Some(path) = lock.take() {\n\n let _ = std::fs::remove_dir_all(&path);\n", "file_path": "src/main.rs", "rank": 2, "score": 43432.17878057042 }, { "content": "create table feeds(\n\n name text not null primary key,\n\n uri text not null,\n\n backlog int not null,\n\n fetch_since datetime\n\n);\n\n\n", "file_path": "migrations/2020-10-27-144918_initial_schema/up.sql", "rank": 3, "score": 36292.79906290347 }, { "content": "drop table feeds;\n", "file_path": "migrations/2020-10-27-144918_initial_schema/down.sql", "rank": 4, "score": 36292.79906290347 }, { "content": "// }\n\n//}\n\n\n\n#[derive(Queryable)]\n\npub struct Feed {\n\n pub name: String,\n\n pub uri: String,\n\n pub backlog: i32,\n\n pub fetch_since: Option<NaiveDateTime>,\n\n}\n\n\n\n#[derive(Insertable)]\n\n#[table_name=\"feeds\"]\n\npub struct NewFeed<'a> {\n\n pub name: &'a str,\n\n pub uri: &'a str,\n\n pub backlog: i32,\n\n pub fetch_since: Option<NaiveDateTime>,\n\n}\n\n\n", "file_path": "src/models.rs", "rank": 5, "score": 21892.790693580988 }, { "content": "\n\nuse {\n\n crate::schema::*,\n\n chrono::prelude::*,\n\n};\n\n\n\n//struct StoredTimestamp(DateTime<Utc>);\n\n//\n\n//impl Into<DateTime<Utc>> for StoredTimestamp {\n\n// fn into(self) -> DateTime<Utc> { self.0 }\n\n//}\n\n//\n\n//impl Queryable<S, B> for StoredTimestamp where\n\n// B: Backend,\n\n// String: Queryable<S, B>\n\n//{\n\n// type Row = <String as Queryable<S, B>>::Row;\n\n//\n\n// fn build(row: Self::Row) -> Self {\n\n//\n", "file_path": "src/models.rs", "rank": 6, "score": 21888.791658243328 }, { "content": "#[derive(Queryable)]\n\npub struct Registration {\n\n pub feed: String,\n\n pub guid: String,\n\n}\n\n\n\n#[derive(Insertable)]\n\n#[table_name=\"register\"]\n\npub struct NewRegistration<'a> {\n\n pub feed: &'a str,\n\n pub guid: &'a str,\n\n}\n\n\n", "file_path": "src/models.rs", "rank": 7, "score": 21885.835766288335 }, { "content": "table! {\n\n feeds (name) {\n\n name -> Text,\n\n uri -> Text,\n\n backlog -> Integer,\n\n fetch_since -> Nullable<Timestamp>,\n\n }\n\n}\n\n\n\ntable! {\n\n register (feed, guid) {\n\n feed -> Text,\n\n guid -> Text,\n\n }\n\n}\n\n\n\njoinable!(register -> feeds (feed));\n\n\n\nallow_tables_to_appear_in_same_query!(\n\n feeds,\n\n register,\n\n);\n", "file_path": "src/schema.rs", "rank": 8, "score": 21649.340586596252 }, { "content": "pragma foreign_keys = on;\n\n\n", "file_path": "migrations/2020-10-27-144918_initial_schema/up.sql", "rank": 9, "score": 20272.54171079363 }, { "content": "create table register(\n\n feed text not null references feeds(name) on delete cascade,\n\n guid text not null,\n\n primary key(feed, guid)\n\n);\n\n\n", "file_path": "migrations/2020-10-27-144918_initial_schema/up.sql", "rank": 10, "score": 17998.985631078456 }, { "content": "drop table register;\n\n\n", "file_path": "migrations/2020-10-27-144918_initial_schema/down.sql", "rank": 11, "score": 17998.985631078456 }, { "content": "\n\nuse url::Url;\n\n\n\nuse clap::Clap;\n\n\n\n#[derive(Clone, Debug)]\n\npub struct DatabasePath(std::path::PathBuf);\n\n\n\nimpl std::ops::Deref for DatabasePath {\n\n type Target = std::path::Path;\n\n fn deref(&self) -> &std::path::Path {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl Default for DatabasePath {\n\n fn default() -> Self {\n\n let dirs = directories::ProjectDirs::from(\"\", \"\", \"podchamp\").unwrap();\n\n let path = dirs.data_dir().join(\"podchamp.sqlite\");\n\n DatabasePath(path)\n", "file_path": "src/options.rs", "rank": 24, "score": 19.51447784041717 }, { "content": "\n\n let mut db = podchamp::Database::open(&opts.database_path)?;\n\n\n\n match &opts.command {\n\n Command::Add{name, link, backlog} => {\n\n let backlog = backlog.or(std::num::NonZeroU32::new(1)).unwrap();\n\n db.add_feed(name, link, backlog)?;\n\n eprintln!(\"Added {}\", name);\n\n }\n\n\n\n Command::Rm{name} => {\n\n db.remove_feed(name)?;\n\n }\n\n\n\n Command::Ls => {\n\n let results = db.get_feeds(podchamp::GetFeeds::All)?;\n\n if results.is_empty() { eprintln!(\"No feeds. You can add one with `podchamp add`.\"); }\n\n for feed in results {\n\n // TODO tabulate\n\n println!(\"{:16} {}\", feed.name, feed.uri);\n", "file_path": "src/main.rs", "rank": 25, "score": 17.77796273108393 }, { "content": "\n\nuse {\n\n crate::{Anyhow, Options},\n\n podchamp::{Database, GetFeeds, models::Feed},\n\n anyhow::bail,\n\n chrono::prelude::*,\n\n futures::{\n\n stream::FuturesUnordered,\n\n StreamExt as _,\n\n },\n\n url::Url,\n\n};\n\n\n\npub(crate)\n\nasync fn fetch<'a> (\n\n db: &'a mut Database,\n\n feed: Option<&'_ str>,\n\n opts: &'a Options,\n\n now: DateTime<Utc>,\n\n) -> Anyhow<()> {\n", "file_path": "src/fetch.rs", "rank": 26, "score": 16.434816773669677 }, { "content": "}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct RuntimeDirPath(std::path::PathBuf);\n\n\n\nimpl std::ops::Deref for RuntimeDirPath {\n\n type Target = std::path::Path;\n\n fn deref(&self) -> &std::path::Path {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl Default for RuntimeDirPath {\n\n fn default() -> Self {\n\n let dirs = directories::ProjectDirs::from(\"\", \"\", \"podchamp\").unwrap();\n\n let path = dirs.runtime_dir().unwrap();\n\n RuntimeDirPath(path.into())\n\n }\n\n}\n\n\n", "file_path": "src/options.rs", "rank": 27, "score": 16.229647040056644 }, { "content": "\n\n #[clap(subcommand)]\n\n pub command: Command\n\n}\n\n\n\nimpl Options {\n\n pub fn load() -> Self {\n\n Self::parse()\n\n }\n\n}\n\n\n\n#[derive(Clap)]\n\npub enum Command {\n\n /// Add a feed\n\n Add {\n\n /// A name for the feed\n\n name: String,\n\n\n\n /// The feed's link\n\n link: Url,\n", "file_path": "src/options.rs", "rank": 28, "score": 15.633456666165968 }, { "content": "impl std::str::FromStr for RuntimeDirPath {\n\n type Err = <std::path::PathBuf as std::str::FromStr>::Err;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n std::str::FromStr::from_str(s).map(RuntimeDirPath)\n\n }\n\n}\n\n\n\n// XXX see Display for DatabasePath\n\nimpl std::fmt::Display for RuntimeDirPath {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, \"{}\", self.0.to_str().unwrap())\n\n }\n\n}\n\n\n\n\n\n// TODO replace clap with argh once it's more mature\n\n#[derive(Clap)]\n\n#[clap(about, author, version)]\n\npub struct Options {\n\n /// Path to Podchamp's database file\n", "file_path": "src/options.rs", "rank": 29, "score": 14.804649788827195 }, { "content": " }\n\n }\n\n\n\n Command::Mod{feed, how} => {\n\n match how {\n\n Modification::Link{link} => {\n\n db.set_link(feed, link)?;\n\n eprintln!(\"Changed {} feed link to {}\", feed, link);\n\n }\n\n\n\n Modification::Backlog{n} => {\n\n db.set_backlog(feed, *n)?;\n\n eprintln!(\"Changed {} backlog to {}\", feed, n);\n\n }\n\n }\n\n }\n\n\n\n Command::Reset{feed} => {\n\n db.reset_register(&feed)?;\n\n eprintln!(\"Progress reset for {}\", feed);\n", "file_path": "src/main.rs", "rank": 30, "score": 14.130980559930377 }, { "content": "\n\n#![forbid(unsafe_code)]\n\n#![feature(try_blocks)]\n\n\n\nmod fetch;\n\nmod options;\n\n\n\nuse {\n\n crate::{fetch::fetch, options::*},\n\n chrono::prelude::*,\n\n};\n\n\n\npub(crate) use anyhow::Result as Anyhow;\n\n\n\n#[derive(Clone)]\n", "file_path": "src/main.rs", "rank": 31, "score": 13.31020459611165 }, { "content": " }\n\n}\n\n\n\nimpl std::str::FromStr for DatabasePath {\n\n type Err = <std::path::PathBuf as std::str::FromStr>::Err;\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n std::str::FromStr::from_str(s).map(DatabasePath)\n\n }\n\n}\n\n\n\n// XXX currently, clap uses Default to construct a default value...\n\n// ... and then immediately ToString=>Displays it, and then FromStrs it straight back\n\n// what the _fuck_\n\n// see https://github.com/clap-rs/clap/issues/1694\n\n// i doubt this will ever be fixed without turning clap on its head and making it typed all the way\n\n// through\n\nimpl std::fmt::Display for DatabasePath {\n\n fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(f, \"{}\", self.0.to_str().unwrap())\n\n }\n", "file_path": "src/options.rs", "rank": 32, "score": 11.874065402817168 }, { "content": " let feeds = db.get_feeds(match feed {\n\n None => GetFeeds::All,\n\n Some(feed) => GetFeeds::One(feed)\n\n })?;\n\n\n\n if feeds.is_empty() {\n\n eprintln!(\"No feeds. You can add one with `podchamp add`.\");\n\n return Ok(())\n\n }\n\n\n\n {\n\n eprint!(\"Fetching {}\", &feeds[0].name);\n\n for feed in &feeds[1..] {\n\n eprint!(\", {}\", &feed.name);\n\n }\n\n eprintln!();\n\n }\n\n\n\n let web_client = reqwest::Client::new();\n\n let mut channels = feeds.into_iter()\n", "file_path": "src/fetch.rs", "rank": 33, "score": 11.119660751470178 }, { "content": " since\n\n }\n\n else {\n\n // new feed, or backlog increased back past since-date - fetch from start of\n\n // backlog\n\n db.set_fetch_since(&feed.name, &backlog_start_date)?;\n\n backlog_start_date\n\n };\n\n\n\n for (item, link, date) in recents.iter()\n\n .take_while(|(_, _, date)| date >= &threshold)\n\n {\n\n // TODO do this in one go for all newest items\n\n if !db.is_episode_registered(&feed.name, &item.id)? {\n\n nothing_to_do = false;\n\n start_download(&opts, &feed, &item, &link, &date).await?;\n\n db.register_episode(&feed.name, &item.id)?;\n\n }\n\n }\n\n };\n", "file_path": "src/fetch.rs", "rank": 34, "score": 11.030164300869147 }, { "content": "#[derive(Clap)]\n\npub enum Modification {\n\n /// Set the feed's link\n\n Link {\n\n /// The new link\n\n link: Url,\n\n },\n\n\n\n /// Set the number of most-recent episodes to fetch\n\n Backlog {\n\n n: std::num::NonZeroU32,\n\n },\n\n}\n\n\n", "file_path": "src/options.rs", "rank": 35, "score": 10.265977159187479 }, { "content": " .filter(|(_, _, date)| date < &now)\n\n .collect();\n\n\n\n // sort the list by descending date\n\n recents.sort_unstable_by_key(|(_, _, date)| std::cmp::Reverse(*date));\n\n\n\n recents\n\n}\n\n\n\nasync fn start_download(\n\n opts: &Options,\n\n feed: &Feed,\n\n item: &feed_rs::model::Entry,\n\n link: &Url,\n\n date: &DateTime<Utc>)\n\n -> Anyhow<()>\n\n{\n\n let mut command = tokio::process::Command::new(&opts.downloader);\n\n command.arg(link.as_str());\n\n\n", "file_path": "src/fetch.rs", "rank": 36, "score": 10.020062437925937 }, { "content": " #[clap(long, default_value, env = \"PODCHAMP_DATABASE_PATH\")]\n\n pub database_path: DatabasePath,\n\n\n\n /// Path to a temporary folder\n\n #[clap(long, default_value, env = \"PODCHAMP_RUNTIME_DIR\")]\n\n pub runtime_dir_path: RuntimeDirPath,\n\n\n\n /// Command to invoke when downloading episodes\n\n ///\n\n /// This command is invoked with the URI of the file to be downloaded as its lone argument.\n\n /// Various feed and episode metadata is injected into its environment, in variables with names\n\n /// starting with `PODCHAMP_`\n\n #[clap(long, default_value = \"wget\", env = \"PODCHAMP_DOWNLOADER\")]\n\n pub downloader: String,\n\n\n\n /// The format for the episode's date passed to the downloader in `PODCHAMP_DATE`\n\n ///\n\n /// See `strftime(3)` for how to specify this\n\n #[clap(long, default_value = \"%F\", env = \"PODCHAMP_DATE_FORMAT\")]\n\n pub date_format: String,\n", "file_path": "src/options.rs", "rank": 37, "score": 9.924763515393638 }, { "content": "\n\n if let Err(e) = result { eprintln!(\"Fetch error: {}\", e); }\n\n }\n\n\n\n if nothing_to_do {\n\n eprintln!(\"Already up-to-date\");\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/fetch.rs", "rank": 38, "score": 9.363962001657484 }, { "content": " .map(|feed| {\n\n let request = web_client.get(&feed.uri).build().unwrap();\n\n let web_client = web_client.clone();\n\n tokio::spawn(async move {\n\n let result: Anyhow<_> = try {\n\n web_client.execute(request).await?\n\n .bytes().await?\n\n };\n\n (feed, result)\n\n })\n\n })\n\n .collect::<FuturesUnordered<_>>();\n\n\n\n let mut nothing_to_do = true;\n\n\n\n while let Some(join_result) = channels.next().await {\n\n let result: Anyhow<_> = try {\n\n let (feed, fetch_result) = join_result?;\n\n let bytes = fetch_result?;\n\n\n", "file_path": "src/fetch.rs", "rank": 39, "score": 7.877462951147674 }, { "content": "\n\n /// Number of most-recent episodes to fetch. Defaults to 1.\n\n #[clap(short = 'n', long = \"backlog\")]\n\n backlog: Option<std::num::NonZeroU32>,\n\n },\n\n\n\n /// Remove a feed\n\n #[clap(alias = \"remove\")]\n\n Rm {\n\n /// The feed to remove\n\n name: String,\n\n },\n\n\n\n /// List feeds\n\n #[clap(alias = \"list\")]\n\n Ls,\n\n\n\n /// Modify a feed's settings\n\n #[clap(alias = \"modify\")]\n\n Mod {\n", "file_path": "src/options.rs", "rank": 40, "score": 7.6171562954988445 }, { "content": " let channel = feed_rs::parser::parse(&bytes[..])?;\n\n\n\n // build a list of most-recent episodes\n\n let recents = collect_recent_episodes(&channel, &now);\n\n if recents.is_empty() {\n\n eprintln!(\"{} contains no recognizable episodes\", &feed.name);\n\n continue;\n\n }\n\n\n\n // find date of first episode within backlog\n\n let backlog_start_index = (feed.backlog as usize).max(1).min(recents.len()) - 1;\n\n let (_, _, backlog_start_date) = recents[backlog_start_index];\n\n\n\n // figure out what date to fetch back to\n\n let threshold = if let Some(since) = feed\n\n .fetch_since\n\n .map(|naive| DateTime::from_utc(naive, Utc))\n\n .filter(|since| since <= &backlog_start_date)\n\n {\n\n // mature feed - keep fetching from the established date\n", "file_path": "src/fetch.rs", "rank": 41, "score": 7.6027983867577476 }, { "content": "```\n\n\n\nNormally, when you add a new feed, it has a _backlog_ of 1. This means it will download only the\n\nmost recent episode the first time you fetch, and every episode newer than it subsequently. If you\n\nwant more to be going on with, you can set a larger backlog:\n\n\n\n```sh\n\n$ podchamp add -n 10 streetfight 'http://feeds.feedburner.com/streetfightradio'\n\n```\n\nor, if you added it already (or reset the feed) you can change the backlog:\n\n\n\n```sh\n\n$ podchamp mod streetfight backlog 10\n\n```\n\n\n\nThis will download the 10 most recent episodes the first time you fetch, and every episode newer\n\nthan the oldest of those subequently.\n\n\n\nCurrently there's no way to download particular episodes, but I'll implement it eventually.\n\n\n\nIf you decide you don't like a podcast and want podchamp to stop fetching it, you can\n\nremove its feed:\n\n\n\n```sh\n", "file_path": "README.md", "rank": 42, "score": 6.7299013546139586 }, { "content": " }\n\n\n\n Command::Fetch{feed} => fetch(&mut db, feed.as_deref(), &opts, now).await?,\n\n }\n\n\n\n instance.done();\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 43, "score": 6.591512675586415 }, { "content": " }\n\n }\n\n }\n\n}\n\n\n\n#[tokio::main(flavor = \"current_thread\")]\n\nasync fn main() -> Anyhow<()> {\n\n let now = Utc::now();\n\n\n\n let opts = options::Options::load();\n\n\n\n let instance = SingleInstance::new(&opts.runtime_dir_path)?;\n\n std::panic::set_hook({\n\n let hook = std::panic::take_hook();\n\n let instance = instance.clone();\n\n Box::new(move |info| {\n\n instance.done();\n\n (hook)(info)\n\n })\n\n });\n", "file_path": "src/main.rs", "rank": 44, "score": 6.579133389786957 }, { "content": "\n\n# Podchamp\n\n## what\n\nA no-frills tool for fetching your podcasts.\n\n\n\n**Work in progress,** but I've been using it exclusively for six months without any major issues.\n\n\n\n## why\n\nI've found very few podcast tools that aren't either horribly bloated (I don't want my downloader to\n\nalso be a half-assed media player), decent-but-fragile (shell scripts; tries too hard to make\n\nsense of hopelessly broken feeds), or inflexible (insists on performing the download itself; wants\n\nto know filename patterns and directories).\n\n\n\nI've used [greg](https://github.com/manolomartinez/greg) for years now, but that project is\n\nmore-or-less dead, and I don't like a few of its design choices, so this is a replacement of sorts.\n\n\n\n## who\n\nIt's for me. This will not get lots of extraneous features like playback or tagging; don't @ me.\n\n\n\n## how\n\nPodchamp keeps a record of feeds and previously-downloaded episodes in a small database. When\n\nchecking for new episodes, podchamp downloads the feed xml, parses it, and launches downloads for\n\nany episodes it doesn't remember downloading previously.\n\n\n\nYou add a feed like this:\n\n\n\n```sh\n\n$ podchamp add mbmbam 'https://feeds.simplecast.com/wjQvYtdl'\n\n```\n\n\n\nand then fetch new episodes like so:\n\n\n\n```sh\n\n$ podchamp fetch\n\nFetching mbmbam\n\n```\n\n\n\nIf there are new episodes of MBMBaM, podchamp launches `PODCHAMP_DOWNLOADER` with the download link\n\nas an argument. By default this is just wget, which is not super-useful; it's intended that you\n\nwrite your own script that does whatever you feel is appropriate. For example, here's a simplified\n\nversion of mine:\n\n\n\n```fish\n\n#!/usr/bin/fish\n\nset dir \"$HOME/podcasts/$PODCHAMP_FEED\"\n\nmkdir -p \"$dir\"\n\ncd \"$dir\"\n\nwget -q \"$PODCAST_URI\" -O - | \\\n\n nice ffmpeg -y \\\n\n -i pipe:0 -c:a libopus -b:a 64k \\\n\n -metadata title=\"$PODCHAMP_DATE - $PODCHAMP_TITLE\" \\\n\n -metadata artist=\"$PODCHAMP_FEED\" \\\n\n \"$PODCHAMP_DATE - $PODCHAMP_TITLE.opus\"\n", "file_path": "README.md", "rank": 45, "score": 6.517355632161014 }, { "content": "# to hell with bean dad\n\n$ podchamp rm roderickontheline\n\n```\n\n\n\nIn case you want to re-download previous episodes of a podcast (say, you lost the files, or want\n\nfresh copies to transcode them differently), you can reset the feed:\n\n\n\n```sh\n\n$ podchamp reset guaranteedaudio\n\n```\n\n\n\n## when\n\n\n\nThis will be considered done (i.e. 1.0) when I'm happy with it.\n\n\n\n## where\n\n\n\nOn linux, definitely. On windows or other platforms, possibly, but you're on your own; please don't\n\nask for support.\n\n\n\n## todo\n\n### yes\n\n\n\n- Config file - pretty minimal, probably.\n\n- More metadata - only a few pieces of feed and episode metadata are available to download scripts,\n\n and it would be trivial to export more.\n\n- Tests - duh.\n\n- Better docs - the above is incomplete; I need to explain environment variables, command-line\n\n options, and `fetch-since` dates.\n\n\n\n### maybe\n\n\n\n- Self-downloading - this could be added without too much hassle or bloat, but really it's\n\n out-of-scope.\n\n- More robust ordering and episode tracking - currently, podchamp ignores any feed item that has no\n\n `pubDate` or `guid`; this might be improvable without undue work.\n\n- Feeds listed in a text file - every time I think about this, it seems inherently brittle and\n\n error-prone; an \"import\" feature might be useful, though.\n\n\n\n### nope\n\n\n\n- Playback - is what media players are for.\n\n- Tagging - an absolute tar-pit of complexity; can already be accomplished more reliably by existing\n\n tools\n\n- Support for broken feeds - we may be stuck with the distaster that is RSS, but I have no interest\n\n in making podchamp try to work around _totally_ mangled feeds.\n\n\n\n## whence\n\n\n\n[Rachel Knight](https://automorphi.city/).\n\n\n\n## whither\n\n\n\nAll code here is MIT licensed.\n\n\n", "file_path": "README.md", "rank": 46, "score": 6.11982768202982 }, { "content": "# Changelog\n\nAll notable changes to this project will be documented in this file.\n\n\n\nThe format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),\n\nand this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).\n\n\n\n## [Unreleased]\n\n\n\n## [0.4.0] - 2021-04-29\n\n### Changed\n\n- Updated dependencies.\n\n### Added\n\n- Feeds are now fetched concurrently.\n\n- A single running instance is ensured with a lock directory.\n\n\n\n## [0.3.0] - 2021-01-06\n\n### Changed\n\n- Updated dependencies, including Tokio to 1.0.\n\n### Removed\n\n- dotenv support.\n\n### Fixed\n\n- `mod ... backlog` no longer lies in its success message.\n\n\n\n## [0.2.1] - 2020-11-08\n\n### Changed\n\n- Updated dependencies.\n\n\n\n## [0.2.0] - 2020-10-30\n\n### Added\n\n- Support for Atom and JSON feeds.\n\n\n\n## [0.1.0] - 2020-10-28\n\nInitial release.\n\n\n\n[Unreleased]: https://github.com/rkanati/podchamp/tree/master\n\n[0.4.0]: https://github.com/rkanati/podchamp/releases/tag/0.4.0\n\n[0.3.0]: https://github.com/rkanati/podchamp/releases/tag/0.3.0\n\n[0.2.1]: https://github.com/rkanati/podchamp/releases/tag/0.2.1\n\n[0.2.0]: https://github.com/rkanati/podchamp/tree/662f12ec382167d0f458272c26102d38d50f1577\n\n[0.1.0]: https://github.com/rkanati/podchamp/tree/06aeee5a1b5d37ba537c5295c9e2c35f0c873e2a\n\n\n", "file_path": "CHANGELOG.md", "rank": 47, "score": 4.1706039051331985 }, { "content": " let date = date.format(&opts.date_format)\n\n .to_string();\n\n\n\n let envs = [\n\n (\"PODCHAMP_FEED\", Some(&feed.name[..])),\n\n (\"PODCHAMP_DATE\", Some(&date[..])),\n\n (\"PODCHAMP_TITLE\", item.title.as_ref().map(|title| &title.content[..])),\n\n // (\"PODCHAMP_AUTHOR\", item.author()),\n\n // (\"PODCHAMP_DESCRIPTION\", item.summary),\n\n ];\n\n\n\n for (var, value) in envs.iter() {\n\n if let Some(value) = value {\n\n command.env(var, value);\n\n }\n\n }\n\n\n\n let child = command.spawn()?;\n\n let output = child.wait_with_output().await?;\n\n if !output.status.success() {\n\n bail!(\"Download command failed with code {:?}\", output.status.code());\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/fetch.rs", "rank": 48, "score": 4.124485587674967 }, { "content": " /// The name of the feed to modify\n\n feed: String,\n\n\n\n #[clap(subcommand)]\n\n how: Modification,\n\n },\n\n\n\n /// Fetch latest episodes\n\n Fetch {\n\n /// A particular feed to fetch\n\n feed: Option<String>,\n\n },\n\n\n\n /// Forget about episodes fetched previously\n\n Reset {\n\n /// The feed whose progress should be forgotten\n\n feed: String,\n\n },\n\n}\n\n\n", "file_path": "src/options.rs", "rank": 49, "score": 4.048915342478214 }, { "content": "# Release procedure\n\n- bump version number\n\n - follow semver; does this release contain a breaking change?\n\n- `cargo update`\n\n- test\n\n - fix problems from dep update if necessary\n\n- `git commit` as usual\n\n- update `CHANGELOG.md`\n\n - chicken-and-egg with `git tag`; figure out the release link url\n\n- `git commit` with shortlog for release\n\n- `git tag -s $VERSION`\n\n- `git push`\n\n- `git push --tags`\n\n- release on github?\n\n - probably not; tags are fine by themselves\n\n- `cargo publish`\n\n\n", "file_path": "release-procedure.md", "rank": 50, "score": 2.975888229679994 } ]
Rust
src/from.rs
Eijebong/derive_more
591918e68bb695cb90842ae3a1d70551ade64be2
use std::collections::HashMap; use std::ops::Index; use quote::{ToTokens, Tokens}; use syn::{Data, DataEnum, DeriveInput, Field, Fields}; use utils::{field_idents, get_field_types, named_to_vec, number_idents, unnamed_to_vec}; pub fn expand(input: &DeriveInput, trait_name: &str) -> Tokens { match input.data { Data::Struct(ref data_struct) => match data_struct.fields { Fields::Unnamed(ref fields) => tuple_from(input, &unnamed_to_vec(fields)), Fields::Named(ref fields) => struct_from(input, &named_to_vec(fields)), Fields::Unit => struct_from(input, &[]), }, Data::Enum(ref data_enum) => enum_from(input, data_enum), _ => panic!(format!( "Only structs and enums can use derive({})", trait_name )), } } pub fn from_impl<T: ToTokens>(input: &DeriveInput, fields: &[&Field], body: T) -> Tokens { let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); let input_type = &input.ident; let original_types = &get_field_types(fields); quote!{ impl#impl_generics ::std::convert::From<(#(#original_types),*)> for #input_type#ty_generics #where_clause { #[allow(unused_variables)] #[inline] fn from(original: (#(#original_types),*)) -> #input_type#ty_generics { #body } } } } fn tuple_from(input: &DeriveInput, fields: &[&Field]) -> Tokens { let input_type = &input.ident; let body = tuple_body(input_type, fields); from_impl(input, fields, body) } fn tuple_body<T: ToTokens>(return_type: T, fields: &[&Field]) -> Tokens { if fields.len() == 1 { quote!(#return_type(original)) } else { let field_names = &number_idents(fields.len()); quote!(#return_type(#(original.#field_names),*)) } } fn struct_from(input: &DeriveInput, fields: &[&Field]) -> Tokens { let input_type = &input.ident; let body = struct_body(input_type, fields); from_impl(input, fields, body) } fn struct_body<T: ToTokens>(return_type: T, fields: &[&Field]) -> Tokens { if fields.len() == 1 { let field_name = &fields[0].ident; quote!(#return_type{#field_name: original}) } else { let argument_field_names = &number_idents(fields.len()); let field_names = &field_idents(fields); quote!(#return_type{#(#field_names: original.#argument_field_names),*}) } } fn enum_from(input: &DeriveInput, data_enum: &DataEnum) -> Tokens { let mut type_signature_counts = HashMap::new(); let input_type = &input.ident; for variant in &data_enum.variants { match variant.fields { Fields::Unnamed(ref fields) => { let original_types = unnamed_to_vec(fields).iter().map(|f| &f.ty).collect(); let counter = type_signature_counts.entry(original_types).or_insert(0); *counter += 1; } Fields::Named(ref fields) => { let original_types = named_to_vec(fields).iter().map(|f| &f.ty).collect(); let counter = type_signature_counts.entry(original_types).or_insert(0); *counter += 1; } Fields::Unit => { let counter = type_signature_counts.entry(vec![]).or_insert(0); *counter += 1; } } } let mut tokens = Tokens::new(); for variant in &data_enum.variants { match variant.fields { Fields::Unnamed(ref fields) => { let field_vec = &unnamed_to_vec(fields); let original_types = get_field_types(field_vec); if *type_signature_counts.index(&original_types) == 1 { let variant_ident = &variant.ident; let body = tuple_body(quote!(#input_type::#variant_ident), field_vec); from_impl(input, field_vec, body).to_tokens(&mut tokens) } } Fields::Named(ref fields) => { let field_vec = &named_to_vec(fields); let original_types = get_field_types(field_vec); if *type_signature_counts.index(&original_types) == 1 { let variant_ident = &variant.ident; let body = struct_body(quote!(#input_type::#variant_ident), field_vec); from_impl(input, field_vec, body).to_tokens(&mut tokens) } } Fields::Unit => { if *type_signature_counts.index(&vec![]) == 1 { let variant_ident = &variant.ident; let body = struct_body(quote!(#input_type::#variant_ident), &[]); from_impl(input, &[], body).to_tokens(&mut tokens) } } } } tokens }
use std::collections::HashMap; use std::ops::Index; use quote::{ToTokens, Tokens}; use syn::{Data, DataEnum, DeriveInput, Field, Fields}; use utils::{field_idents, get_field_types, named_to_vec, number_idents, unnamed_to_vec}; pub fn expand(input: &DeriveInput, trait_name: &str) -> Tokens { match input.data { Data::Struct(ref data_struct) => match data_struct.fields { Fields::Unnamed(ref fields) => tuple_from(input, &unnamed_to_vec(fields)), Fields::Named(ref fields) => struct_from(input, &named_to_vec(fields)), Fields::Unit => struct_from(input, &[]), }, Data::Enum(ref data_enum) => enum_from(input, data_enum), _ => panic!(format!( "Only structs and enums can use derive({})", trait_name )), } } pub fn from_impl<T: ToTokens>(input: &DeriveInput, fields: &[&Field], body: T) -> Tokens { let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl(); let input_type = &input.ident; let original_types = &get_field_types(fields); quote!{ impl#impl_generics ::std::convert::From<(#(#original_types),*)> for #input_type#ty_generics #where_clause { #[allow(unused_variables)] #[inline] fn from(original: (#(#original_types),*)) -> #input_type#ty_generics { #body } } } } fn tuple_from(input: &DeriveInput, fields: &[&Field]) -> Tokens { let input_type = &input.ident; let body = tuple_body(input_type, fields); from_impl(input, fields, body) } fn tuple_body<T: ToTokens>(return_type: T, fields: &[&Field]) -> Tokens { if fields.len() == 1 { quote!(#return_type(original)) } else { let field_names = &number_idents(fields.len()); quote!(#return_type(#(original.#field_names),*)) } } fn struct_from(input: &DeriveInput, fields: &[&Field]) -> Tokens { let input_type = &input.ident; let body = struct_body(input_type, fields); from_impl(input, fields, body) } fn struct_body<T: ToTokens>(return_type: T, fields: &[&Field]) -> Tokens { if fields.len() == 1 { let field_name = &
*counter += 1; } Fields::Named(ref fields) => { let original_types = named_to_vec(fields).iter().map(|f| &f.ty).collect(); let counter = type_signature_counts.entry(original_types).or_insert(0); *counter += 1; } Fields::Unit => { let counter = type_signature_counts.entry(vec![]).or_insert(0); *counter += 1; } } } let mut tokens = Tokens::new(); for variant in &data_enum.variants { match variant.fields { Fields::Unnamed(ref fields) => { let field_vec = &unnamed_to_vec(fields); let original_types = get_field_types(field_vec); if *type_signature_counts.index(&original_types) == 1 { let variant_ident = &variant.ident; let body = tuple_body(quote!(#input_type::#variant_ident), field_vec); from_impl(input, field_vec, body).to_tokens(&mut tokens) } } Fields::Named(ref fields) => { let field_vec = &named_to_vec(fields); let original_types = get_field_types(field_vec); if *type_signature_counts.index(&original_types) == 1 { let variant_ident = &variant.ident; let body = struct_body(quote!(#input_type::#variant_ident), field_vec); from_impl(input, field_vec, body).to_tokens(&mut tokens) } } Fields::Unit => { if *type_signature_counts.index(&vec![]) == 1 { let variant_ident = &variant.ident; let body = struct_body(quote!(#input_type::#variant_ident), &[]); from_impl(input, &[], body).to_tokens(&mut tokens) } } } } tokens }
fields[0].ident; quote!(#return_type{#field_name: original}) } else { let argument_field_names = &number_idents(fields.len()); let field_names = &field_idents(fields); quote!(#return_type{#(#field_names: original.#argument_field_names),*}) } } fn enum_from(input: &DeriveInput, data_enum: &DataEnum) -> Tokens { let mut type_signature_counts = HashMap::new(); let input_type = &input.ident; for variant in &data_enum.variants { match variant.fields { Fields::Unnamed(ref fields) => { let original_types = unnamed_to_vec(fields).iter().map(|f| &f.ty).collect(); let counter = type_signature_counts.entry(original_types).or_insert(0);
random
[ { "content": "/// Provides the hook to expand `#[derive(Into)]` into an implementation of `Into`\n\npub fn expand(input: &DeriveInput, _: &str) -> Tokens {\n\n let input_type = &input.ident;\n\n let field_vec: Vec<_>;\n\n let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();\n\n let (field_names, fields) = match input.data {\n\n Data::Struct(ref data_struct) => match data_struct.fields {\n\n Fields::Unnamed(ref fields) => {\n\n field_vec = unnamed_to_vec(fields);\n\n (tuple_field_names(&field_vec), field_vec)\n\n }\n\n Fields::Named(ref fields) => {\n\n field_vec = named_to_vec(fields);\n\n (struct_field_names(&field_vec), field_vec)\n\n }\n\n Fields::Unit => (vec![], vec![]),\n\n },\n\n _ => panic!(\"Only structs can derive Into\"),\n\n };\n\n\n\n let original_types = &get_field_types(&fields);\n", "file_path": "src/into.rs", "rank": 0, "score": 234312.4995418685 }, { "content": "/// Provides the hook to expand `#[derive(Constructor)]` into an implementation of `Constructor`\n\npub fn expand(input: &DeriveInput, _: &str) -> Tokens {\n\n let input_type = &input.ident;\n\n let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();\n\n let ((body, vars), fields) = match input.data {\n\n Data::Struct(ref data_struct) => match data_struct.fields {\n\n Fields::Unnamed(ref fields) => {\n\n let field_vec = unnamed_to_vec(fields);\n\n (tuple_body(input_type, &field_vec), field_vec)\n\n }\n\n Fields::Named(ref fields) => {\n\n let field_vec = named_to_vec(fields);\n\n (struct_body(input_type, &field_vec), field_vec)\n\n }\n\n Fields::Unit => (struct_body(input_type, &[]), vec![]),\n\n },\n\n _ => panic!(\"Only structs can derive a constructor\"),\n\n };\n\n let original_types = &get_field_types(&fields);\n\n quote!{\n\n #[allow(missing_docs)]\n\n impl#impl_generics #input_type#ty_generics #where_clause {\n\n #[inline]\n\n pub fn new(#(#vars: #original_types),*) -> #input_type#ty_generics {\n\n #body\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/constructor.rs", "rank": 1, "score": 229738.71005918778 }, { "content": "/// Provides the hook to expand `#[derive(TryInto)]` into an implementation of `TryInto`\n\npub fn expand(input: &DeriveInput, _: &str) -> Tokens {\n\n match input.data {\n\n Data::Enum(ref data_enum) => enum_try_into(input, data_enum),\n\n _ => panic!(\"Only enums can derive TryInto\"),\n\n }\n\n}\n\n\n", "file_path": "src/try_into.rs", "rank": 2, "score": 229738.71005918778 }, { "content": "/// Provides the hook to expand `#[derive(FromStr)]` into an implementation of `From`\n\npub fn expand(input: &DeriveInput, trait_name: &str) -> Tokens {\n\n let trait_path = &quote!(::std::str::FromStr);\n\n let generics = add_extra_ty_param_bound(&input.generics, trait_path);\n\n let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();\n\n let input_type = &input.ident;\n\n let (result, field_type) = match input.data {\n\n Data::Struct(ref data_struct) => match data_struct.fields {\n\n Fields::Unnamed(ref fields) => {\n\n tuple_from_str(input_type, trait_name, &unnamed_to_vec(fields))\n\n }\n\n Fields::Named(ref fields) => {\n\n struct_from_str(input_type, trait_name, &named_to_vec(fields))\n\n }\n\n Fields::Unit => panic_one_field(trait_name),\n\n },\n\n _ => panic_one_field(trait_name),\n\n };\n\n quote!{\n\n impl#impl_generics #trait_path for #input_type#ty_generics #where_clause\n\n {\n\n type Err = <#field_type as #trait_path>::Err;\n\n #[inline]\n\n fn from_str(src: &str) -> ::std::result::Result<Self, Self::Err> {\n\n return ::std::result::Result::Ok(#result)\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/from_str.rs", "rank": 4, "score": 208772.5005448555 }, { "content": "/// Provides the hook to expand `#[derive(Index)]` into an implementation of `From`\n\npub fn expand(input: &DeriveInput, trait_name: &str) -> Tokens {\n\n let trait_ident = Ident::from(trait_name);\n\n let index_type = &Ident::from(\"__IdxT\");\n\n let trait_path = &quote!(::std::ops::#trait_ident<#index_type>);\n\n let input_type = &input.ident;\n\n let field_vec: Vec<&Field>;\n\n let member = match input.data {\n\n Data::Struct(ref data_struct) => match data_struct.fields {\n\n Fields::Unnamed(ref fields) => {\n\n field_vec = unnamed_to_vec(fields);\n\n tuple_from_str(trait_name, &field_vec)\n\n }\n\n Fields::Named(ref fields) => {\n\n field_vec = named_to_vec(fields);\n\n struct_from_str(trait_name, &field_vec)\n\n }\n\n Fields::Unit => panic_one_field(trait_name),\n\n },\n\n _ => panic_one_field(trait_name),\n\n };\n", "file_path": "src/index.rs", "rank": 8, "score": 199580.7598387329 }, { "content": "/// Provides the hook to expand `#[derive(Display)]` into an implementation of `From`\n\npub fn expand(input: &DeriveInput, trait_name: &str) -> Tokens {\n\n let trait_ident = Ident::from(trait_name);\n\n let trait_path = &quote!(::std::fmt::#trait_ident);\n\n let generics = add_extra_ty_param_bound(&input.generics, trait_path);\n\n let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();\n\n let input_type = &input.ident;\n\n let (member, field_type) = match input.data {\n\n Data::Struct(ref data_struct) => match data_struct.fields {\n\n Fields::Unnamed(ref fields) => tuple_from_str(trait_name, &unnamed_to_vec(fields)),\n\n Fields::Named(ref fields) => struct_from_str(trait_name, &named_to_vec(fields)),\n\n Fields::Unit => panic_one_field(trait_name),\n\n },\n\n _ => panic_one_field(trait_name),\n\n };\n\n quote!{\n\n impl#impl_generics #trait_path for #input_type#ty_generics #where_clause\n\n {\n\n #[inline]\n\n fn fmt(&self, formatter: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {\n\n <#field_type as #trait_path>::fmt(&#member, formatter)\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/display.rs", "rank": 9, "score": 199580.75983873286 }, { "content": "/// Provides the hook to expand `#[derive(Index)]` into an implementation of `From`\n\npub fn expand(input: &DeriveInput, trait_name: &str) -> Tokens {\n\n let trait_ident = Ident::from(trait_name);\n\n let trait_path = &quote!(::std::ops::#trait_ident);\n\n let input_type = &input.ident;\n\n let field_vec: Vec<&Field>;\n\n let member = match input.data {\n\n Data::Struct(ref data_struct) => match data_struct.fields {\n\n Fields::Unnamed(ref fields) => {\n\n field_vec = unnamed_to_vec(fields);\n\n tuple_from_str(trait_name, &field_vec)\n\n }\n\n Fields::Named(ref fields) => {\n\n field_vec = named_to_vec(fields);\n\n struct_from_str(trait_name, &field_vec)\n\n }\n\n Fields::Unit => panic_one_field(trait_name),\n\n },\n\n _ => panic_one_field(trait_name),\n\n };\n\n let field_type = &field_vec[0].ty;\n", "file_path": "src/deref.rs", "rank": 10, "score": 199580.7598387329 }, { "content": "pub fn expand(input: &DeriveInput, trait_name: &str) -> Tokens {\n\n let trait_ident = Ident::from(trait_name);\n\n let method_name = trait_name.to_lowercase();\n\n let method_ident = &Ident::from(method_name);\n\n let input_type = &input.ident;\n\n\n\n let generics = add_extra_type_param_bound_op_output(&input.generics, &trait_ident);\n\n let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();\n\n\n\n let (output_type, block) = match input.data {\n\n Data::Struct(ref data_struct) => match data_struct.fields {\n\n Fields::Unnamed(ref fields) => (\n\n quote!(#input_type#ty_generics),\n\n tuple_content(input_type, &unnamed_to_vec(fields), method_ident),\n\n ),\n\n Fields::Named(ref fields) => (\n\n quote!(#input_type#ty_generics),\n\n struct_content(input_type, &named_to_vec(fields), method_ident),\n\n ),\n\n _ => panic!(format!(\"Unit structs cannot use derive({})\", trait_name)),\n", "file_path": "src/not_like.rs", "rank": 11, "score": 199578.33890381386 }, { "content": "fn struct_from_str<'a>(trait_name: &str, fields: &[&'a Field]) -> Tokens {\n\n if fields.len() != 1 {\n\n panic_one_field(trait_name)\n\n };\n\n let field = &fields[0];\n\n let field_ident = &field.ident;\n\n quote!(self.#field_ident)\n\n}\n", "file_path": "src/deref.rs", "rank": 12, "score": 196687.0072550426 }, { "content": "fn struct_from_str<'a>(trait_name: &str, fields: &[&'a Field]) -> Tokens {\n\n if fields.len() != 1 {\n\n panic_one_field(trait_name)\n\n };\n\n let field = &fields[0];\n\n let field_ident = &field.ident;\n\n quote!(self.#field_ident)\n\n}\n", "file_path": "src/index.rs", "rank": 13, "score": 196687.0072550426 }, { "content": "/// Provides the hook to expand `#[derive(Index)]` into an implementation of `From`\n\npub fn expand(input: &DeriveInput, trait_name: &str) -> Tokens {\n\n let trait_ident = Ident::from(trait_name);\n\n let trait_path = &quote!(::std::ops::#trait_ident);\n\n let input_type = &input.ident;\n\n let field_vec: Vec<&Field>;\n\n let member = match input.data {\n\n Data::Struct(ref data_struct) => match data_struct.fields {\n\n Fields::Unnamed(ref fields) => {\n\n field_vec = unnamed_to_vec(fields);\n\n tuple_from_str(trait_name, &field_vec)\n\n }\n\n Fields::Named(ref fields) => {\n\n field_vec = named_to_vec(fields);\n\n struct_from_str(trait_name, &field_vec)\n\n }\n\n Fields::Unit => panic_one_field(trait_name),\n\n },\n\n _ => panic_one_field(trait_name),\n\n };\n\n let field_type = &field_vec[0].ty;\n", "file_path": "src/deref_mut.rs", "rank": 14, "score": 195851.04596060427 }, { "content": "/// Provides the hook to expand `#[derive(IndexMut)]` into an implementation of `From`\n\npub fn expand(input: &DeriveInput, trait_name: &str) -> Tokens {\n\n let trait_ident = Ident::from(trait_name);\n\n let index_type = &Ident::from(\"__IdxT\");\n\n let trait_path = &quote!(::std::ops::#trait_ident<#index_type>);\n\n let input_type = &input.ident;\n\n let field_vec: Vec<&Field>;\n\n let member = match input.data {\n\n Data::Struct(ref data_struct) => match data_struct.fields {\n\n Fields::Unnamed(ref fields) => {\n\n field_vec = unnamed_to_vec(fields);\n\n tuple_from_str(trait_name, &field_vec)\n\n }\n\n Fields::Named(ref fields) => {\n\n field_vec = named_to_vec(fields);\n\n struct_from_str(trait_name, &field_vec)\n\n }\n\n Fields::Unit => panic_one_field(trait_name),\n\n },\n\n _ => panic_one_field(trait_name),\n\n };\n", "file_path": "src/index_mut.rs", "rank": 15, "score": 195850.99522350222 }, { "content": "pub fn expand(input: &DeriveInput, trait_name: &str) -> Tokens {\n\n let trait_ident = Ident::from(trait_name);\n\n let trait_path = &quote!(::std::ops::#trait_ident);\n\n let method_name = trait_name.to_lowercase();\n\n let method_ident = &Ident::from(method_name);\n\n let input_type = &input.ident;\n\n\n\n let (block, fields) = match input.data {\n\n Data::Struct(ref data_struct) => match data_struct.fields {\n\n Fields::Unnamed(ref fields) => {\n\n let field_vec = unnamed_to_vec(fields);\n\n (\n\n tuple_content(input_type, &field_vec, method_ident),\n\n field_vec,\n\n )\n\n }\n\n Fields::Named(ref fields) => {\n\n let field_vec = named_to_vec(fields);\n\n (\n\n struct_content(input_type, &field_vec, method_ident),\n", "file_path": "src/mul_like.rs", "rank": 16, "score": 195848.6250256853 }, { "content": "pub fn expand(input: &DeriveInput, trait_name: &str) -> Tokens {\n\n let trait_ident = Ident::from(trait_name);\n\n let method_name = trait_name.to_lowercase();\n\n let method_ident = Ident::from(method_name);\n\n let input_type = &input.ident;\n\n\n\n let generics = add_extra_type_param_bound_op_output(&input.generics, &trait_ident);\n\n let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();\n\n\n\n let (output_type, block) = match input.data {\n\n Data::Struct(ref data_struct) => match data_struct.fields {\n\n Fields::Unnamed(ref fields) => (\n\n quote!(#input_type#ty_generics),\n\n tuple_content(input_type, &unnamed_to_vec(fields), &method_ident),\n\n ),\n\n Fields::Named(ref fields) => (\n\n quote!(#input_type#ty_generics),\n\n struct_content(input_type, &named_to_vec(fields), &method_ident),\n\n ),\n\n _ => panic!(format!(\"Unit structs cannot use derive({})\", trait_name)),\n", "file_path": "src/add_like.rs", "rank": 17, "score": 195848.6250256853 }, { "content": "fn enum_try_into(input: &DeriveInput, data_enum: &DataEnum) -> Tokens {\n\n let mut variants_per_types = HashMap::new();\n\n let (impl_generics, ty_generics, where_clause) = input.generics.split_for_impl();\n\n let input_type = &input.ident;\n\n\n\n for variant in &data_enum.variants {\n\n let original_types = match variant.fields {\n\n Fields::Unnamed(ref fields) => unnamed_to_vec(fields).iter().map(|f| &f.ty).collect(),\n\n Fields::Named(ref fields) => named_to_vec(fields).iter().map(|f| &f.ty).collect(),\n\n Fields::Unit => vec![],\n\n };\n\n variants_per_types\n\n .entry(original_types)\n\n .or_insert_with(Vec::new)\n\n .push(variant);\n\n }\n\n\n\n let mut tokens = Tokens::new();\n\n\n\n for (ref original_types, ref variants) in variants_per_types {\n", "file_path": "src/try_into.rs", "rank": 18, "score": 194470.76104845892 }, { "content": "fn struct_from_str<'a>(trait_name: &str, fields: &[&'a Field]) -> Tokens {\n\n if fields.len() != 1 {\n\n panic_one_field(trait_name)\n\n };\n\n let field = &fields[0];\n\n let field_ident = &field.ident;\n\n quote!(self.#field_ident)\n\n}\n", "file_path": "src/index_mut.rs", "rank": 19, "score": 192803.29417234924 }, { "content": "fn struct_from_str<'a>(trait_name: &str, fields: &[&'a Field]) -> Tokens {\n\n if fields.len() != 1 {\n\n panic_one_field(trait_name)\n\n };\n\n let field = &fields[0];\n\n let field_ident = &field.ident;\n\n quote!(self.#field_ident)\n\n}\n", "file_path": "src/deref_mut.rs", "rank": 20, "score": 192803.29417234924 }, { "content": "pub fn expand(input: &DeriveInput, trait_name: &str) -> Tokens {\n\n let trait_ident = Ident::from(trait_name);\n\n let method_name = trait_name.to_string();\n\n let method_name = method_name.trim_right_matches(\"Assign\");\n\n let method_name = method_name.to_lowercase();\n\n let method_ident = Ident::from(method_name.to_string() + \"_assign\");\n\n let input_type = &input.ident;\n\n\n\n let generics = add_extra_ty_param_bound_op(&input.generics, &trait_ident);\n\n let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();\n\n\n\n let exprs = match input.data {\n\n Data::Struct(ref data_struct) => match data_struct.fields {\n\n Fields::Unnamed(ref fields) => tuple_exprs(&unnamed_to_vec(fields), &method_ident),\n\n Fields::Named(ref fields) => struct_exprs(&named_to_vec(fields), &method_ident),\n\n _ => panic!(format!(\"Unit structs cannot use derive({})\", trait_name)),\n\n },\n\n\n\n _ => panic!(format!(\"Only structs can use derive({})\", trait_name)),\n\n };\n", "file_path": "src/add_assign_like.rs", "rank": 21, "score": 192348.8032337064 }, { "content": "pub fn expand(input: &DeriveInput, trait_name: &str) -> Tokens {\n\n let trait_ident = Ident::from(trait_name);\n\n let trait_path = &quote!(::std::ops::#trait_ident);\n\n let method_name = trait_name.to_string();\n\n let method_name = method_name.trim_right_matches(\"Assign\");\n\n let method_name = method_name.to_lowercase();\n\n let method_ident = Ident::from(method_name.to_string() + \"_assign\");\n\n let input_type = &input.ident;\n\n\n\n let (exprs, fields) = match input.data {\n\n Data::Struct(ref data_struct) => match data_struct.fields {\n\n Fields::Unnamed(ref fields) => {\n\n let field_vec = unnamed_to_vec(fields);\n\n (tuple_exprs(&field_vec, &method_ident), field_vec)\n\n }\n\n Fields::Named(ref fields) => {\n\n let field_vec = named_to_vec(fields);\n\n (struct_exprs(&field_vec, &method_ident), field_vec)\n\n }\n\n _ => panic!(format!(\"Unit structs cannot use derive({})\", trait_name)),\n", "file_path": "src/mul_assign_like.rs", "rank": 22, "score": 192348.8032337064 }, { "content": "fn struct_from_str<'a>(trait_name: &str, fields: &[&'a Field]) -> (Tokens, &'a Type) {\n\n if fields.len() != 1 {\n\n panic_one_field(trait_name)\n\n };\n\n let field = &fields[0];\n\n let field_ident = &field.ident;\n\n let field_type = &field.ty;\n\n (quote!(self.#field_ident), field_type)\n\n}\n", "file_path": "src/display.rs", "rank": 23, "score": 187455.19885556988 }, { "content": "pub fn struct_exprs(fields: &[&Field], method_ident: &Ident) -> Vec<Tokens> {\n\n let mut exprs = vec![];\n\n\n\n for field in fields {\n\n // It's safe to unwrap because struct fields always have an identifier\n\n let field_id = field.ident.as_ref().unwrap();\n\n // generates `x: self.x.add(rhs.x)`\n\n let expr = quote!(self.#field_id.#method_ident(rhs.#field_id));\n\n exprs.push(expr)\n\n }\n\n exprs\n\n}\n\n\n", "file_path": "src/add_like.rs", "rank": 25, "score": 176534.80148487858 }, { "content": "pub fn struct_exprs(fields: &[&Field], method_ident: &Ident) -> Vec<Tokens> {\n\n field_idents(fields)\n\n .iter()\n\n .map(|f| quote!(self.#f.#method_ident(rhs)))\n\n .collect()\n\n}\n", "file_path": "src/mul_like.rs", "rank": 26, "score": 176534.80148487858 }, { "content": "pub fn named_to_vec(fields: &FieldsNamed) -> Vec<&Field> {\n\n fields.named.iter().collect()\n\n}\n", "file_path": "src/utils.rs", "rank": 27, "score": 174342.2838372335 }, { "content": "pub fn unnamed_to_vec(fields: &FieldsUnnamed) -> Vec<&Field> {\n\n fields.unnamed.iter().collect()\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 28, "score": 174342.2838372335 }, { "content": "fn tuple_from_str<'a>(trait_name: &str, fields: &[&'a Field]) -> (Tokens) {\n\n if fields.len() != 1 {\n\n panic_one_field(trait_name)\n\n };\n\n quote!(self.0)\n\n}\n\n\n", "file_path": "src/index.rs", "rank": 30, "score": 173070.04916235656 }, { "content": "fn tuple_from_str<'a>(trait_name: &str, fields: &[&'a Field]) -> (Tokens) {\n\n if fields.len() != 1 {\n\n panic_one_field(trait_name)\n\n };\n\n quote!(self.0)\n\n}\n\n\n", "file_path": "src/deref.rs", "rank": 31, "score": 173070.04916235656 }, { "content": "fn tuple_from_str<'a>(trait_name: &str, fields: &[&'a Field]) -> (Tokens) {\n\n if fields.len() != 1 {\n\n panic_one_field(trait_name)\n\n };\n\n quote!(self.0)\n\n}\n\n\n", "file_path": "src/deref_mut.rs", "rank": 32, "score": 169961.99725430942 }, { "content": "fn tuple_from_str<'a>(trait_name: &str, fields: &[&'a Field]) -> (Tokens) {\n\n if fields.len() != 1 {\n\n panic_one_field(trait_name)\n\n };\n\n quote!(self.0)\n\n}\n\n\n", "file_path": "src/index_mut.rs", "rank": 33, "score": 169961.99725430942 }, { "content": "fn tuple_from_str<'a>(trait_name: &str, fields: &[&'a Field]) -> (Tokens, &'a Type) {\n\n if fields.len() != 1 {\n\n panic_one_field(trait_name)\n\n };\n\n let field = &fields[0];\n\n let field_type = &field.ty;\n\n (quote!(self.0), field_type)\n\n}\n\n\n", "file_path": "src/display.rs", "rank": 34, "score": 164613.90193753003 }, { "content": "fn struct_field_names(fields: &[&Field]) -> Vec<Tokens> {\n\n field_idents(fields)\n\n .iter()\n\n .map(|f| (*f).into_tokens())\n\n .collect()\n\n}\n", "file_path": "src/into.rs", "rank": 35, "score": 161451.35101215073 }, { "content": "fn struct_body(return_type: &Ident, fields: &[&Field]) -> (Tokens, Vec<Ident>) {\n\n let field_names: &Vec<Ident> = &field_idents(fields).iter().map(|f| **f).collect();\n\n let vars = field_names;\n\n (quote!(#return_type{#(#field_names: #vars),*}), vars.clone())\n\n}\n", "file_path": "src/constructor.rs", "rank": 36, "score": 160679.68108696726 }, { "content": "pub fn tuple_exprs(fields: &[&Field], method_ident: &Ident) -> Vec<Tokens> {\n\n number_idents(fields.len())\n\n .iter()\n\n .map(|i| quote!(self.#i.#method_ident(rhs)))\n\n .collect()\n\n}\n\n\n", "file_path": "src/mul_like.rs", "rank": 37, "score": 155101.396316832 }, { "content": "pub fn tuple_exprs(fields: &[&Field], method_ident: &Ident) -> Vec<Tokens> {\n\n let mut exprs = vec![];\n\n\n\n for i in 0..fields.len() {\n\n let i = Index::from(i);\n\n // generates `self.0.add(rhs.0)`\n\n let expr = quote!(self.#i.#method_ident(rhs.#i));\n\n exprs.push(expr);\n\n }\n\n exprs\n\n}\n\n\n", "file_path": "src/add_like.rs", "rank": 38, "score": 155101.396316832 }, { "content": "pub fn field_idents<'a>(fields: &'a [&'a Field]) -> Vec<&'a Ident> {\n\n fields\n\n .iter()\n\n .map(|f| {\n\n f.ident\n\n .as_ref()\n\n .expect(\"Tried to get field names of a tuple struct\")\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 40, "score": 141610.42947653474 }, { "content": "pub fn get_field_types<'a>(fields: &'a [&'a Field]) -> Vec<&'a Type> {\n\n get_field_types_iter(fields).collect()\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 41, "score": 139217.28065141238 }, { "content": "fn tuple_body(return_type: &Ident, fields: &[&Field]) -> (Tokens, Vec<Ident>) {\n\n let vars = &numbered_vars(fields.len(), \"\");\n\n (quote!(#return_type(#(#vars),*)), vars.clone())\n\n}\n\n\n", "file_path": "src/constructor.rs", "rank": 42, "score": 138564.71482672673 }, { "content": "fn panic_one_field(trait_name: &str) -> ! {\n\n panic!(format!(\n\n \"Only structs with one field can derive({})\",\n\n trait_name\n\n ))\n\n}\n\n\n", "file_path": "src/from_str.rs", "rank": 43, "score": 137927.59686517614 }, { "content": "fn tuple_field_names(fields: &[&Field]) -> Vec<Tokens> {\n\n number_idents(fields.len())\n\n .iter()\n\n .map(|f| f.into_tokens())\n\n .collect()\n\n}\n\n\n", "file_path": "src/into.rs", "rank": 44, "score": 136113.51181680395 }, { "content": "fn enum_content(input_type: &Ident, data_enum: &DataEnum, method_ident: &Ident) -> Tokens {\n\n let mut matches = vec![];\n\n let mut method_iter = iter::repeat(method_ident);\n\n\n\n for variant in &data_enum.variants {\n\n let subtype = &variant.ident;\n\n let subtype = quote!(#input_type::#subtype);\n\n\n\n match variant.fields {\n\n Fields::Unnamed(ref fields) => {\n\n // The patern that is outputted should look like this:\n\n // (Subtype(left_vars), TypePath(right_vars)) => Ok(TypePath(exprs))\n\n let size = unnamed_to_vec(fields).len();\n\n let l_vars = &numbered_vars(size, \"l_\");\n\n let r_vars = &numbered_vars(size, \"r_\");\n\n let method_iter = method_iter.by_ref();\n\n let matcher = quote!{\n\n (#subtype(#(#l_vars),*),\n\n #subtype(#(#r_vars),*)) => {\n\n ::std::result::Result::Ok(#subtype(#(#l_vars.#method_iter(#r_vars)),*))\n", "file_path": "src/add_like.rs", "rank": 45, "score": 134908.63685472446 }, { "content": "fn struct_content(input_type: &Ident, fields: &[&Field], method_ident: &Ident) -> Tokens {\n\n let mut exprs = vec![];\n\n\n\n for field in fields {\n\n // It's safe to unwrap because struct fields always have an identifier\n\n let field_id = field.ident.as_ref();\n\n // generates `x: self.x.not()`\n\n let expr = quote!(#field_id: self.#field_id.#method_ident());\n\n exprs.push(expr)\n\n }\n\n\n\n quote!(#input_type{#(#exprs),*})\n\n}\n\n\n", "file_path": "src/not_like.rs", "rank": 46, "score": 134364.85072835855 }, { "content": "fn struct_content(input_type: &Ident, fields: &[&Field], method_ident: &Ident) -> Tokens {\n\n // It's safe to unwrap because struct fields always have an identifier\n\n let exprs = struct_exprs(fields, method_ident);\n\n let field_names = field_idents(fields);\n\n\n\n quote!(#input_type{#(#field_names: #exprs),*})\n\n}\n\n\n", "file_path": "src/add_like.rs", "rank": 47, "score": 131752.33971693923 }, { "content": "fn panic_one_field(trait_name: &str) -> ! {\n\n panic!(format!(\n\n \"Only structs with one field can derive({})\",\n\n trait_name\n\n ))\n\n}\n\n\n", "file_path": "src/deref.rs", "rank": 48, "score": 128914.54488509681 }, { "content": "fn panic_one_field(trait_name: &str) -> ! {\n\n panic!(format!(\n\n \"Only structs with one field can derive({})\",\n\n trait_name\n\n ))\n\n}\n\n\n", "file_path": "src/display.rs", "rank": 49, "score": 128914.54488509678 }, { "content": "fn panic_one_field(trait_name: &str) -> ! {\n\n panic!(format!(\n\n \"Only structs with one field can derive({})\",\n\n trait_name\n\n ))\n\n}\n\n\n", "file_path": "src/index.rs", "rank": 50, "score": 128914.54488509681 }, { "content": "pub fn numbered_vars(count: usize, prefix: &str) -> Vec<Ident> {\n\n (0..count)\n\n .map(|i| Ident::from(format!(\"__{}{}\", prefix, i)))\n\n .collect()\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 51, "score": 128200.5413778095 }, { "content": "pub fn get_field_types_iter<'a>(fields: &'a [&'a Field]) -> Box<Iterator<Item = &'a Type> + 'a> {\n\n Box::new(fields.iter().map(|f| &f.ty))\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 52, "score": 126416.45705619283 }, { "content": "fn panic_one_field(trait_name: &str) -> ! {\n\n panic!(format!(\n\n \"Only structs with one field can derive({})\",\n\n trait_name\n\n ))\n\n}\n\n\n", "file_path": "src/index_mut.rs", "rank": 53, "score": 126173.88178879957 }, { "content": "fn panic_one_field(trait_name: &str) -> ! {\n\n panic!(format!(\n\n \"Only structs with one field can derive({})\",\n\n trait_name\n\n ))\n\n}\n\n\n", "file_path": "src/deref_mut.rs", "rank": 54, "score": 126173.88178879957 }, { "content": "fn struct_from_str<'a>(\n\n input_type: &Ident,\n\n trait_name: &str,\n\n fields: &[&'a Field],\n\n) -> (Tokens, &'a Type) {\n\n if fields.len() != 1 {\n\n panic_one_field(trait_name)\n\n };\n\n let field = &fields[0];\n\n let field_type = &field.ty;\n\n let field_ident = &field.ident;\n\n (\n\n quote!(#input_type{#field_ident: #field_type::from_str(src)?}),\n\n field_type,\n\n )\n\n}\n", "file_path": "src/from_str.rs", "rank": 55, "score": 124580.20427478188 }, { "content": "fn tuple_content<T: ToTokens>(input_type: &T, fields: &[&Field], method_ident: &Ident) -> Tokens {\n\n let mut exprs = vec![];\n\n\n\n for i in 0..fields.len() {\n\n let i = Index::from(i);\n\n // generates `self.0.add()`\n\n let expr = quote!(self.#i.#method_ident());\n\n exprs.push(expr);\n\n }\n\n\n\n quote!(#input_type(#(#exprs),*))\n\n}\n\n\n", "file_path": "src/not_like.rs", "rank": 56, "score": 114682.37336667474 }, { "content": "fn tuple_content<T: ToTokens>(input_type: &T, fields: &[&Field], method_ident: &Ident) -> Tokens {\n\n let exprs = tuple_exprs(fields, method_ident);\n\n quote!(#input_type(#(#exprs),*))\n\n}\n\n\n", "file_path": "src/add_like.rs", "rank": 57, "score": 112883.95136652596 }, { "content": "pub fn add_extra_where_clauses(generics: &Generics, type_where_clauses: Tokens) -> Generics {\n\n let mut type_where_clauses: WhereClause = parse_str(&type_where_clauses.to_string()).unwrap();\n\n let mut new_generics = generics.clone();\n\n if let Some(old_where) = new_generics.where_clause {\n\n type_where_clauses.predicates.extend(old_where.predicates)\n\n }\n\n new_generics.where_clause = Some(type_where_clauses);\n\n\n\n new_generics\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 58, "score": 107897.30742087359 }, { "content": "pub fn add_extra_generic_param(generics: &Generics, generic_param: Tokens) -> Generics {\n\n let generic_param: GenericParam = parse_str(&generic_param.to_string()).unwrap();\n\n let mut generics = generics.clone();\n\n generics.params.push(generic_param);\n\n\n\n generics\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 59, "score": 105804.02768279758 }, { "content": "pub fn add_where_clauses_for_new_ident<'a>(\n\n generics: &'a Generics,\n\n fields: &[&'a Field],\n\n type_ident: &Ident,\n\n type_where_clauses: Tokens,\n\n) -> Generics {\n\n let generic_param = if fields.len() > 1 {\n\n quote!(#type_ident: ::std::marker::Copy)\n\n } else {\n\n quote!(#type_ident)\n\n };\n\n\n\n let generics = add_extra_where_clauses(generics, type_where_clauses);\n\n add_extra_generic_param(&generics, generic_param)\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 60, "score": 104676.67663492818 }, { "content": "fn struct_content<'a, T: ToTokens>(\n\n input_type: &T,\n\n fields: &[&'a Field],\n\n method_ident: &Ident,\n\n) -> Tokens {\n\n let exprs = struct_exprs(fields, method_ident);\n\n let field_names = field_idents(fields);\n\n quote!(#input_type{#(#field_names: #exprs),*})\n\n}\n\n\n", "file_path": "src/mul_like.rs", "rank": 61, "score": 102392.85419282221 }, { "content": "pub fn add_extra_ty_param_bound<'a>(generics: &'a Generics, bound: &'a Tokens) -> Generics {\n\n let mut generics = generics.clone();\n\n let bound: TypeParamBound = parse_str(&bound.to_string()).unwrap();\n\n for type_param in &mut generics.type_params_mut() {\n\n type_param.bounds.push(bound.clone())\n\n }\n\n\n\n generics\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 62, "score": 100260.74440697306 }, { "content": "pub fn add_extra_type_param_bound_op_output<'a>(\n\n generics: &'a Generics,\n\n trait_ident: &'a Ident,\n\n) -> Generics {\n\n let mut generics = generics.clone();\n\n for type_param in &mut generics.type_params_mut() {\n\n let type_ident = &type_param.ident;\n\n let bound: TypeParamBound =\n\n parse_str(&quote!(::std::ops::#trait_ident<Output=#type_ident>).to_string()).unwrap();\n\n type_param.bounds.push(bound)\n\n }\n\n\n\n generics\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 63, "score": 99198.30824130648 }, { "content": "fn tuple_from_str<'a>(\n\n input_type: &Ident,\n\n trait_name: &str,\n\n fields: &[&'a Field],\n\n) -> (Tokens, &'a Type) {\n\n if fields.len() != 1 {\n\n panic_one_field(trait_name)\n\n };\n\n let field = &fields[0];\n\n let field_type = &field.ty;\n\n (quote!(#input_type(#field_type::from_str(src)?)), field_type)\n\n}\n\n\n", "file_path": "src/from_str.rs", "rank": 64, "score": 94919.88822251023 }, { "content": "pub fn number_idents(count: usize) -> Vec<Index> {\n\n (0..count).map(Index::from).collect()\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 65, "score": 92236.61105884366 }, { "content": "#[derive(FromStr)]\n\nstruct Point1D {\n\n x: i32,\n\n}\n", "file_path": "tests/from_str.rs", "rank": 66, "score": 89543.42654461505 }, { "content": "#[derive(FromStr)]\n\nstruct MyInt(i32);\n\n\n", "file_path": "tests/from_str.rs", "rank": 67, "score": 87406.56251375927 }, { "content": "#[derive(From, Not, Add)]\n\nenum StructEnum<T: Clone, U: Clone> {\n\n Struct { t: T },\n\n DoubleStruct { t: T, u: U },\n\n}\n", "file_path": "tests/generics.rs", "rank": 68, "score": 85578.48695537065 }, { "content": "fn enum_output_type_and_content(\n\n input: &DeriveInput,\n\n data_enum: &DataEnum,\n\n method_ident: &Ident,\n\n) -> (Tokens, Tokens) {\n\n let input_type = &input.ident;\n\n let (_, ty_generics, _) = input.generics.split_for_impl();\n\n let mut matches = vec![];\n\n let mut method_iter = iter::repeat(method_ident);\n\n // If the enum contains unit types that means it can error.\n\n let has_unit_type = data_enum.variants.iter().any(|v| v.fields == Fields::Unit);\n\n\n\n for variant in &data_enum.variants {\n\n let subtype = &variant.ident;\n\n let subtype = quote!(#input_type::#subtype);\n\n\n\n match variant.fields {\n\n Fields::Unnamed(ref fields) => {\n\n // The patern that is outputted should look like this:\n\n // (Subtype(vars)) => Ok(TypePath(exprs))\n", "file_path": "src/not_like.rs", "rank": 69, "score": 84426.46125111199 }, { "content": "fn tuple_content<'a, T: ToTokens>(\n\n input_type: &T,\n\n fields: &[&'a Field],\n\n method_ident: &Ident,\n\n) -> Tokens {\n\n let exprs = tuple_exprs(fields, method_ident);\n\n quote!(#input_type(#(#exprs),*))\n\n}\n\n\n", "file_path": "src/mul_like.rs", "rank": 70, "score": 76096.97278881392 }, { "content": "pub fn add_extra_ty_param_bound_op<'a>(generics: &'a Generics, trait_ident: &'a Ident) -> Generics {\n\n add_extra_ty_param_bound(generics, &quote!(::std::ops::#trait_ident))\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 71, "score": 75718.56304257034 }, { "content": "#[derive(Not)]\n\nenum EnumWithUnit {\n\n SmallInt(i32),\n\n Unit,\n\n}\n", "file_path": "tests/not.rs", "rank": 72, "score": 72937.86112600105 }, { "content": "#[derive(Eq, PartialEq, Debug)]\n\n#[derive(From)]\n\n#[derive(Add, Sub)]\n\n#[derive(Neg)]\n\nenum SimpleEnum {\n\n Int(i32),\n\n _Ints(i32, i32),\n\n LabeledInts { a: i32, b: i32 },\n\n _SomeUnit,\n\n}\n\n\n", "file_path": "tests/lib.rs", "rank": 73, "score": 71826.63104142409 }, { "content": "#[derive(Eq, PartialEq, Debug)]\n\n#[derive(From)]\n\n#[derive(Add, Sub)]\n\nenum MyIntEnum {\n\n SmallInt(i32),\n\n BigInt(i64),\n\n TwoInts(i32, i32),\n\n Point2D { x: i64, y: i64 },\n\n _UnsignedOne(u32),\n\n _UnsignedTwo(u32),\n\n _Uints1(u64, u64),\n\n _Uints2 { x: u64, y: u64 },\n\n Nothing,\n\n}\n\n\n", "file_path": "tests/lib.rs", "rank": 74, "score": 71826.54538741216 }, { "content": "#[derive(Eq, PartialEq, Debug)]\n\n#[derive(From)]\n\n#[derive(Add, Sub)]\n\nenum SimpleMyIntEnum {\n\n Int(i32),\n\n _UnsignedOne(u32),\n\n _UnsignedTwo(u32),\n\n}\n", "file_path": "tests/lib.rs", "rank": 75, "score": 70773.63040987955 }, { "content": "#[derive(Eq, PartialEq, Debug)]\n\n#[derive(From)]\n\n#[derive(Neg)]\n\nenum SimpleSignedIntEnum {\n\n Int(i32),\n\n Int2(i16),\n\n}\n\n\n", "file_path": "tests/lib.rs", "rank": 76, "score": 69774.92060854669 }, { "content": "#[derive(From, FromStr, Display, Index, Not, Add, Mul, IndexMut, AddAssign, Constructor)]\n\nstruct Struct<T: Clone> {\n\n t: T,\n\n}\n\n\n", "file_path": "tests/generics.rs", "rank": 77, "score": 69004.67720861256 }, { "content": "#[derive(Into)]\n\nstruct EmptyStruct {}\n\n\n", "file_path": "tests/into.rs", "rank": 78, "score": 67916.15826342111 }, { "content": "#[derive(From)]\n\nstruct EmptyStruct {}\n\n\n", "file_path": "tests/from.rs", "rank": 79, "score": 67916.15826342111 }, { "content": "#[derive(From)]\n\n#[derive(Into)]\n\n#[derive(Constructor)]\n\n#[derive(FromStr)]\n\n#[derive(Eq, PartialEq, Debug)]\n\n#[derive(Display)]\n\nstruct SimpleStruct {\n\n int1: u64,\n\n}\n\n\n", "file_path": "tests/lib.rs", "rank": 80, "score": 66867.85963282996 }, { "content": "#[derive(From)]\n\n#[derive(Constructor)]\n\n#[derive(Add, Sub, Mul, Div, Rem, BitAnd, BitOr, BitXor, Shr, Shl)]\n\n#[derive(Eq, PartialEq, Debug, Clone, Copy)]\n\n#[derive(AddAssign)]\n\nstruct NormalStruct {\n\n int1: u64,\n\n int2: u64,\n\n}\n\n\n", "file_path": "tests/lib.rs", "rank": 81, "score": 66864.61670404433 }, { "content": "#[derive(Constructor)]\n\nstruct EmptyStruct {}\n\n\n", "file_path": "tests/constructor.rs", "rank": 82, "score": 66864.30364281707 }, { "content": "#[derive(Eq, PartialEq, Debug)]\n\n#[derive(Add, Mul)]\n\nstruct DoubleUIntStruct {\n\n x: u32,\n\n y: u32,\n\n}\n\n\n", "file_path": "tests/lib.rs", "rank": 83, "score": 64923.260532754546 }, { "content": "#[derive(Not)]\n\nenum MixedInts {\n\n SmallInt(i32),\n\n BigInt(i64),\n\n TwoSmallInts(i32, i32),\n\n NamedSmallInts { x: i32, y: i32 },\n\n UnsignedOne(u32),\n\n UnsignedTwo(u32),\n\n}\n\n\n", "file_path": "tests/not.rs", "rank": 84, "score": 64697.81588609707 }, { "content": "#[derive(From)]\n\nenum MixedInts {\n\n SmallInt(i32),\n\n NamedBigInt { int: i64 },\n\n TwoSmallInts(i32, i32),\n\n NamedBigInts { x: i64, y: i64 },\n\n Unsigned(u32),\n\n NamedUnsigned { x: u32 },\n\n}\n", "file_path": "tests/from.rs", "rank": 85, "score": 64697.81588609707 }, { "content": "#[derive(Add)]\n\nenum MixedInts {\n\n SmallInt(i32),\n\n BigInt(i64),\n\n TwoSmallInts(i32, i32),\n\n NamedSmallInts { x: i32, y: i32 },\n\n UnsignedOne(u32),\n\n UnsignedTwo(u32),\n\n Unit,\n\n}\n", "file_path": "tests/add.rs", "rank": 86, "score": 63301.26832765922 }, { "content": "#[derive(Clone, Copy, TryInto)]\n\nenum MixedInts {\n\n SmallInt(i32),\n\n NamedBigInt { int: i64 },\n\n TwoSmallInts(i32, i32),\n\n NamedBigInts { x: i64, y: i64 },\n\n Unsigned(u32),\n\n NamedUnsigned { x: u32 },\n\n Unit,\n\n}\n\n\n", "file_path": "tests/try_into.rs", "rank": 87, "score": 63301.149905526574 }, { "content": "fn main() {\n\n if version_meta().unwrap().channel == Channel::Nightly {\n\n println!(\"cargo:rustc-cfg=feature=\\\"nightly\\\"\");\n\n }\n\n}\n", "file_path": "build.rs", "rank": 88, "score": 60655.95270955395 }, { "content": "#[derive(From, Not, Add)]\n\nenum TupleEnum<T: Clone, U: Clone> {\n\n Tuple(T),\n\n DoubleTuple(T, U),\n\n}\n\n\n", "file_path": "tests/generics.rs", "rank": 89, "score": 60240.64776002386 }, { "content": "#[derive(Eq, PartialEq, Debug)]\n\n#[derive(From, Into, Constructor)]\n\nstruct Unit;\n\n\n", "file_path": "tests/lib.rs", "rank": 90, "score": 60119.784744819204 }, { "content": "#[derive(Into)]\n\nstruct Point2D {\n\n x: i32,\n\n y: i32,\n\n}\n", "file_path": "tests/into.rs", "rank": 91, "score": 60119.63395563311 }, { "content": "#[derive(Not)]\n\nstruct Point2D {\n\n x: i32,\n\n y: i32,\n\n}\n\n\n", "file_path": "tests/not.rs", "rank": 92, "score": 60119.63395563311 }, { "content": "#[derive(From)]\n\nstruct EmptyTuple();\n\n\n", "file_path": "tests/from.rs", "rank": 93, "score": 60119.63395563311 }, { "content": "#[derive(From)]\n\nstruct EmptyUnit;\n\n\n", "file_path": "tests/from.rs", "rank": 94, "score": 60119.63395563311 }, { "content": "#[derive(From)]\n\nstruct Point2D {\n\n x: i32,\n\n y: i32,\n\n}\n\n\n", "file_path": "tests/from.rs", "rank": 95, "score": 60119.63395563311 }, { "content": "#[derive(Into)]\n\nstruct Point1D {\n\n x: i32,\n\n}\n\n\n", "file_path": "tests/into.rs", "rank": 96, "score": 60119.63395563311 }, { "content": "#[derive(From)]\n\nstruct Point1D {\n\n x: i32,\n\n}\n\n\n", "file_path": "tests/from.rs", "rank": 97, "score": 60119.63395563311 }, { "content": "#[derive(Into)]\n\nstruct EmptyUnit;\n\n\n", "file_path": "tests/into.rs", "rank": 98, "score": 60119.63395563311 }, { "content": "#[derive(Into)]\n\nstruct EmptyTuple();\n\n\n", "file_path": "tests/into.rs", "rank": 99, "score": 60119.63395563311 } ]
Rust
src/parser.rs
ikanago/qz
3e59c25af2107d72e1cbe776c15c0aadd42525fc
use crate::{ header::{HeaderName, HeaderValue}, method::Method, status::StatusCode, Uri, Version, }; use std::convert::TryFrom; use std::str; #[derive(Debug)] pub struct Parser<'a> { state: &'a [u8], } impl<'a> Parser<'a> { pub fn new(input: &'a [u8]) -> Self { Self { state: input } } pub fn consume(&mut self) -> Option<u8> { self.state.split_first().map(|(&b, tail)| { self.state = tail; b }) } pub fn read_until(&mut self, target: u8) -> Option<&[u8]> { let index = self.state.iter().position(|&b| b == target)?; let (found, tail) = self.state.split_at(index); self.state = tail; self.consume(); Some(found) } pub fn read_until_whitespace(&mut self) -> Option<&[u8]> { self.read_until(b' ') } pub fn expect(&mut self, target: u8, error: StatusCode) -> crate::Result<()> { match self.consume() { Some(b) if b == target => Ok(()), _ => Err(error), } } pub fn parse_request_line(&mut self) -> crate::Result<(Method, Uri, Version)> { let method = self.parse_method()?; let uri = self.parse_uri()?; let version = self.parse_version()?; self.expect(b'\n', StatusCode::BadRequest)?; Ok((method, uri, version)) } fn parse_method(&mut self) -> crate::Result<Method> { match self.read_until_whitespace() { Some(method) => Method::try_from(method), None => Err(StatusCode::BadRequest), } } fn parse_uri(&mut self) -> crate::Result<Uri> { let uri = self.read_until_whitespace().ok_or(StatusCode::BadRequest)?; if uri.starts_with(&[b'/']) { Ok(Uri::new(uri)) } else { Err(StatusCode::BadRequest) } } fn parse_version(&mut self) -> crate::Result<Version> { let protocol = self .read_until(b'/') .ok_or(StatusCode::HttpVersionNotSupported)?; match str::from_utf8(protocol) { Ok("HTTP") => (), _ => return Err(StatusCode::HttpVersionNotSupported), } let version = self .read_until(b'\r') .ok_or(StatusCode::HttpVersionNotSupported)?; Version::try_from(version) } pub fn parse_header(&mut self) -> crate::Result<(HeaderName, HeaderValue)> { let header_name = self .read_until(b':') .ok_or(StatusCode::BadRequest)? .to_vec(); let header_name = HeaderName::from(header_name); self.expect(b' ', StatusCode::BadRequest)?; let header_value = self .read_until(b'\r') .ok_or(StatusCode::BadRequest)? .to_vec(); self.expect(b'\n', StatusCode::BadRequest)?; Ok((header_name, header_value)) } pub fn parse_body(&mut self, body_len: usize) -> crate::Result<Vec<u8>> { if body_len > self.state.len() { return Err(StatusCode::LengthRequired); } Ok(self.state[..body_len].to_vec()) } } #[cfg(test)] mod tests { use super::*; #[test] fn read_char() { let mut p = Parser::new(&[42, 43]); assert_eq!(Some(b'*'), p.consume()); assert_eq!(Some(b'+'), p.consume()); assert_eq!(None, p.consume()); } #[test] fn read_until_delim() { let bytes = "GET /index.html HTTP/1.1\r\n".as_bytes(); let mut p = Parser::new(bytes); assert_eq!(Some("GET".as_bytes()), p.read_until(b' ')); assert_eq!(Some("/index.html".as_bytes()), p.read_until(b' ')); assert_eq!(Some("HTTP/1.1".as_bytes()), p.read_until(b'\r')); } #[test] fn read_until_empty() { let mut p = Parser::new(&[]); assert_eq!(None, p.read_until(b' ')); } #[test] fn parse_request_line() { let bytes = "GET /index.html HTTP/1.1\r\n".as_bytes(); let mut p = Parser::new(bytes); let (method, uri, version) = p.parse_request_line().unwrap(); assert_eq!(Method::Get, method); assert_eq!(Uri::new(b"/index.html"), uri); assert_eq!(Version::OneDotOne, version); } #[test] fn parse_method() { let bytes = "GET /index.html HTTP/1.1\r\n".as_bytes(); let mut p = Parser::new(bytes); assert_eq!(Ok(Method::Get), p.parse_method()); } #[test] fn parse_uri() { let bytes = "/index.html HTTP/1.1\r\n".as_bytes(); let mut p = Parser::new(bytes); assert_eq!(Ok(Uri::new(b"/index.html")), p.parse_uri()); } #[test] fn parse_version() { let bytes = "HTTP/1.1\r\n".as_bytes(); let mut p = Parser::new(bytes); assert_eq!(Ok(Version::OneDotOne), p.parse_version()); } #[test] fn parse_header() { let bytes = b"Accept: */*\r\n"; let mut p = Parser::new(bytes); assert_eq!(Ok((HeaderName::Accept, b"*/*".to_vec())), p.parse_header()); } #[test] fn parse_body() { let bytes = b"Hello, World!"; let mut p = Parser::new(bytes); assert_eq!(Ok(b"Hello, World!".to_vec()), p.parse_body(13)); } }
use crate::{ header::{HeaderName, HeaderValue}, method::Method, status::StatusCode, Uri, Version, }; use std::convert::TryFrom; use std::str; #[derive(Debug)] pub struct Parser<'a> { state: &'a [u8], } impl<'a> Parser<'a> { pub fn new(input: &'a [u8]) -> Self { Self { state: input } } pub fn consume(&mut self) -> Option<u8> { self.state.split_first().map(|(&b, tail)| { self.state = tail; b }) } pub fn read_until(&mut self, target: u8) -> Option<&[u8]> { let index = self.state.iter().position(|&b| b == target)?; let (found, tail) = self.state.split_at(index); self.state = tail; self.consume(); Some(found) } pub fn read_until_whitespace(&mut self) -> Option<&[u8]> { self.read_until(b' ') }
pub fn parse_request_line(&mut self) -> crate::Result<(Method, Uri, Version)> { let method = self.parse_method()?; let uri = self.parse_uri()?; let version = self.parse_version()?; self.expect(b'\n', StatusCode::BadRequest)?; Ok((method, uri, version)) } fn parse_method(&mut self) -> crate::Result<Method> { match self.read_until_whitespace() { Some(method) => Method::try_from(method), None => Err(StatusCode::BadRequest), } } fn parse_uri(&mut self) -> crate::Result<Uri> { let uri = self.read_until_whitespace().ok_or(StatusCode::BadRequest)?; if uri.starts_with(&[b'/']) { Ok(Uri::new(uri)) } else { Err(StatusCode::BadRequest) } } fn parse_version(&mut self) -> crate::Result<Version> { let protocol = self .read_until(b'/') .ok_or(StatusCode::HttpVersionNotSupported)?; match str::from_utf8(protocol) { Ok("HTTP") => (), _ => return Err(StatusCode::HttpVersionNotSupported), } let version = self .read_until(b'\r') .ok_or(StatusCode::HttpVersionNotSupported)?; Version::try_from(version) } pub fn parse_header(&mut self) -> crate::Result<(HeaderName, HeaderValue)> { let header_name = self .read_until(b':') .ok_or(StatusCode::BadRequest)? .to_vec(); let header_name = HeaderName::from(header_name); self.expect(b' ', StatusCode::BadRequest)?; let header_value = self .read_until(b'\r') .ok_or(StatusCode::BadRequest)? .to_vec(); self.expect(b'\n', StatusCode::BadRequest)?; Ok((header_name, header_value)) } pub fn parse_body(&mut self, body_len: usize) -> crate::Result<Vec<u8>> { if body_len > self.state.len() { return Err(StatusCode::LengthRequired); } Ok(self.state[..body_len].to_vec()) } } #[cfg(test)] mod tests { use super::*; #[test] fn read_char() { let mut p = Parser::new(&[42, 43]); assert_eq!(Some(b'*'), p.consume()); assert_eq!(Some(b'+'), p.consume()); assert_eq!(None, p.consume()); } #[test] fn read_until_delim() { let bytes = "GET /index.html HTTP/1.1\r\n".as_bytes(); let mut p = Parser::new(bytes); assert_eq!(Some("GET".as_bytes()), p.read_until(b' ')); assert_eq!(Some("/index.html".as_bytes()), p.read_until(b' ')); assert_eq!(Some("HTTP/1.1".as_bytes()), p.read_until(b'\r')); } #[test] fn read_until_empty() { let mut p = Parser::new(&[]); assert_eq!(None, p.read_until(b' ')); } #[test] fn parse_request_line() { let bytes = "GET /index.html HTTP/1.1\r\n".as_bytes(); let mut p = Parser::new(bytes); let (method, uri, version) = p.parse_request_line().unwrap(); assert_eq!(Method::Get, method); assert_eq!(Uri::new(b"/index.html"), uri); assert_eq!(Version::OneDotOne, version); } #[test] fn parse_method() { let bytes = "GET /index.html HTTP/1.1\r\n".as_bytes(); let mut p = Parser::new(bytes); assert_eq!(Ok(Method::Get), p.parse_method()); } #[test] fn parse_uri() { let bytes = "/index.html HTTP/1.1\r\n".as_bytes(); let mut p = Parser::new(bytes); assert_eq!(Ok(Uri::new(b"/index.html")), p.parse_uri()); } #[test] fn parse_version() { let bytes = "HTTP/1.1\r\n".as_bytes(); let mut p = Parser::new(bytes); assert_eq!(Ok(Version::OneDotOne), p.parse_version()); } #[test] fn parse_header() { let bytes = b"Accept: */*\r\n"; let mut p = Parser::new(bytes); assert_eq!(Ok((HeaderName::Accept, b"*/*".to_vec())), p.parse_header()); } #[test] fn parse_body() { let bytes = b"Hello, World!"; let mut p = Parser::new(bytes); assert_eq!(Ok(b"Hello, World!".to_vec()), p.parse_body(13)); } }
pub fn expect(&mut self, target: u8, error: StatusCode) -> crate::Result<()> { match self.consume() { Some(b) if b == target => Ok(()), _ => Err(error), } }
function_block-full_function
[ { "content": "pub fn filename_to_mime<P: AsRef<Path>>(filename: P) -> &'static [u8] {\n\n match filename.as_ref().extension().and_then(OsStr::to_str) {\n\n Some(\"txt\") => TEXT_PLAIN,\n\n Some(\"html\") => TEXT_HTML,\n\n Some(\"css\") => TEXT_CSS,\n\n Some(\"js\") => TEXT_JAVASCRIPT,\n\n Some(\"jpg\") => IMAGE_JPG,\n\n Some(\"png\") => IMAGE_PNG,\n\n Some(\"json\") => APPLICATION_JSON,\n\n _ => b\"application/octet-stream\",\n\n }\n\n}\n", "file_path": "src/mime.rs", "rank": 0, "score": 73645.38665893709 }, { "content": "fn find_file(path: &Uri, mount_dir: &Path, serve_at: &Path) -> crate::Result<PathBuf> {\n\n let path = std::str::from_utf8(&path.0).unwrap();\n\n let path = Path::new(path);\n\n let path = match path.strip_prefix(serve_at) {\n\n Ok(path) => path,\n\n Err(_) => return Err(StatusCode::NotFound),\n\n };\n\n\n\n let mut file_to_find = mount_dir.to_path_buf();\n\n for p in path {\n\n if p == OsStr::new(\".\") {\n\n continue;\n\n } else if p == OsStr::new(\"..\") {\n\n if !file_to_find.pop() {\n\n // Forbid to access file which is out of mount point to prevent directory\n\n // traversal attack.\n\n return Err(StatusCode::Forbidden);\n\n }\n\n } else {\n\n file_to_find.push(p);\n", "file_path": "src/static_files.rs", "rank": 1, "score": 65412.05108567384 }, { "content": "/// Check if the path has wild card at the end of the path.\n\nfn includes_wildcard(path: &[u8]) -> bool {\n\n path.ends_with(b\"/*\")\n\n}\n\n\n\nimpl<State> Router<State>\n\nwhere\n\n State: Clone + Send + Sync + 'static,\n\n{\n\n pub fn new() -> Self {\n\n Self {\n\n path: Vec::new(),\n\n handlers: HashMap::new(),\n\n children: Vec::new(),\n\n }\n\n }\n\n\n\n fn new_child<F: Handler<State>>(path: &[u8], method: Method, handler: F) -> Self {\n\n if includes_wildcard(path) && !path.starts_with(b\"*\") {\n\n let mut child = Self {\n\n path: path.to_vec(),\n", "file_path": "src/router.rs", "rank": 2, "score": 65309.33376217977 }, { "content": "#[async_trait]\n\npub trait Handler<State>: Send + Sync + 'static\n\nwhere\n\n State: Clone + Send + Sync + 'static,\n\n{\n\n async fn call(&self, request: Request, state: State) -> crate::Result<Response>;\n\n}\n\n\n\n#[async_trait]\n\nimpl<State, F, Fut> Handler<State> for F\n\nwhere\n\n State: Clone + Send + Sync + 'static,\n\n F: Send + Sync + 'static + Fn(Request, State) -> Fut,\n\n Fut: Future + Send + 'static,\n\n Fut::Output: Into<Response>,\n\n{\n\n async fn call(&self, request: Request, state: State) -> crate::Result<Response> {\n\n Ok(self(request, state).await.into())\n\n }\n\n}\n\n\n\nimpl<State> fmt::Debug for dyn Handler<State>\n\nwhere\n\n State: Clone + Send + Sync + 'static,\n\n{\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"Handler\")\n\n }\n\n}\n", "file_path": "src/handler.rs", "rank": 3, "score": 59457.96124844572 }, { "content": "#[async_trait]\n\npub trait Middleware<State>: Send + Sync + 'static\n\nwhere\n\n State: Clone + Send + Sync + 'static,\n\n{\n\n async fn call(\n\n &self,\n\n request: Request,\n\n state: State,\n\n next: MiddlewareChain<'_, State>,\n\n ) -> Response;\n\n}\n\n\n\n#[async_trait]\n\nimpl<F, Fut, State> Middleware<State> for F\n\nwhere\n\n F: Send + Sync + 'static + Fn(Request, State, MiddlewareChain<'_, State>) -> Fut,\n\n Fut: Future<Output = Response> + Send + 'static,\n\n State: Clone + Send + Sync + 'static,\n\n{\n\n async fn call(\n", "file_path": "src/middleware.rs", "rank": 4, "score": 59457.96124844572 }, { "content": "#[derive(Clone, Debug, PartialEq, Eq, Deserialize)]\n\nstruct User {\n\n username: String,\n\n password: String,\n\n}\n\n\n\nasync fn login(request: Request, _: ()) -> qz::Result<Response> {\n\n let user: User = request.body_form()?;\n\n println!(\"{:?}\", user);\n\n Ok(Redirect::see_other(\"/userpage\").into())\n\n}\n\n\n\n#[tokio::main]\n\nasync fn main() -> io::Result<()> {\n\n let server = Server::builder()\n\n .serve_file(\"/userpage\", \"./examples/assets/index.html\")?\n\n .serve_file(\"/login\", \"./examples/assets/form.html\")?\n\n .route(\"/login\", Method::Post, login)\n\n .build();\n\n Server::run(server, 8080).await\n\n}\n", "file_path": "examples/form.rs", "rank": 5, "score": 43381.224655746584 }, { "content": "#[derive(Clone)]\n\nstruct Counter {\n\n value: Arc<AtomicUsize>,\n\n}\n\n\n\nimpl Counter {\n\n fn new() -> Self {\n\n Self {\n\n value: Arc::new(AtomicUsize::new(0)),\n\n }\n\n }\n\n\n\n fn increment(&self) -> usize {\n\n self.value\n\n .fetch_add(1, std::sync::atomic::Ordering::Relaxed)\n\n + 1\n\n }\n\n}\n\n\n\nasync fn increment(_request: Request, state: Counter) -> String {\n\n let value = state.increment();\n", "file_path": "examples/counter.rs", "rank": 6, "score": 43381.224655746584 }, { "content": "console.log(\"Hello\");\n", "file_path": "examples/assets/index.js", "rank": 7, "score": 16539.918386046407 }, { "content": "import React from 'react';\n\nimport ReactDOM from 'react-dom';\n\nimport './index.css';\n\nimport App from './App';\n\n\n\nReactDOM.render(\n\n <React.StrictMode>\n\n <App />\n\n </React.StrictMode>,\n\n document.getElementById('root')\n\n);\n", "file_path": "examples/chat/frontend/src/index.js", "rank": 8, "score": 15415.053385681556 }, { "content": "use crate::{\n\n handler::Handler, header::HeaderName, request::Request, response::Response, status::StatusCode,\n\n Uri,\n\n};\n\nuse async_trait::async_trait;\n\n\n\n/// Handler performing redirection.\n\n#[derive(Debug)]\n\npub struct Redirect {\n\n status_code: StatusCode,\n\n uri: Uri,\n\n}\n\n\n\nimpl Redirect {\n\n /// Create a redirect to `uri`.\n\n /// This method is alias to `Redirect::found()`.\n\n pub fn new(uri: impl Into<Uri>) -> Self {\n\n Self::found(uri.into())\n\n }\n\n\n", "file_path": "src/redirect.rs", "rank": 12, "score": 16.89470016988584 }, { "content": " self\n\n }\n\n\n\n pub fn set_body(mut self, body: impl Into<Body>) -> Self {\n\n self.inner.set_body(body);\n\n self\n\n }\n\n\n\n fn parse_request_line(&mut self, bytes: &[u8]) -> crate::Result<()> {\n\n let mut p = Parser::new(bytes);\n\n let (method, uri, version) = p.parse_request_line()?;\n\n self.inner.method = method;\n\n self.inner.uri = uri;\n\n self.inner.version = version;\n\n Ok(())\n\n }\n\n\n\n fn parse_header(&mut self, bytes: &[u8]) -> crate::Result<()> {\n\n let mut p = Parser::new(bytes);\n\n let (name, value) = p.parse_header()?;\n", "file_path": "src/request.rs", "rank": 13, "score": 15.452078620191266 }, { "content": "use serde::de::DeserializeOwned;\n\n\n\nuse crate::{\n\n body::Body,\n\n header::{HeaderName, HeaderValue},\n\n method::Method,\n\n parser::Parser,\n\n status::StatusCode,\n\n Uri, Version,\n\n};\n\nuse std::{collections::HashMap, fmt, str};\n\n\n\n#[derive(Debug, Default, PartialEq, Eq)]\n\npub struct RequestBuilder {\n\n inner: Request,\n\n}\n\n\n\nimpl RequestBuilder {\n\n pub fn new() -> Self {\n\n Self {\n", "file_path": "src/request.rs", "rank": 14, "score": 14.756247341610312 }, { "content": "use crate::{\n\n handler::Handler, method::Method, request::Request, response::Response, status::StatusCode,\n\n};\n\nuse std::collections::HashMap;\n\n\n\n/// Associates URI with `Handler`.\n\n/// URI paths are represented as trie tree.\n\n/// This struct is a node of the tree.\n\n#[derive(Debug)]\n\npub struct Router<State>\n\nwhere\n\n State: Clone + Send + Sync + 'static,\n\n{\n\n path: Vec<u8>,\n\n handlers: HashMap<Method, Box<dyn Handler<State>>>,\n\n children: Vec<Router<State>>,\n\n}\n\n\n\n/// Check if the path has wild card at the end of the path.\n", "file_path": "src/router.rs", "rank": 15, "score": 14.025530728218564 }, { "content": " /// Create a permanent redirect to `uri`.\n\n pub fn moved_permanently(uri: impl Into<Uri>) -> Self {\n\n Self {\n\n status_code: StatusCode::MovedPermanently,\n\n uri: uri.into(),\n\n }\n\n }\n\n\n\n /// Create a redirect to `uri`.\n\n pub fn found(uri: impl Into<Uri>) -> Self {\n\n Self {\n\n status_code: StatusCode::Found,\n\n uri: uri.into(),\n\n }\n\n }\n\n\n\n /// Create a see other redirect to `uri`.\n\n pub fn see_other(uri: impl Into<Uri>) -> Self {\n\n Self {\n\n status_code: StatusCode::SeeOther,\n", "file_path": "src/redirect.rs", "rank": 16, "score": 14.018892947994386 }, { "content": "pub type Result<T, E = StatusCode> = std::result::Result<T, E>;\n\n\n\n#[derive(Debug, PartialEq, Eq)]\n\npub struct Uri(Vec<u8>);\n\n\n\nimpl Uri {\n\n pub fn new(path: &[u8]) -> Self {\n\n Self(path.to_vec())\n\n }\n\n}\n\n\n\nimpl From<&str> for Uri {\n\n fn from(s: &str) -> Self {\n\n Uri(s.as_bytes().to_vec())\n\n }\n\n}\n\n\n\nimpl AsRef<[u8]> for Uri {\n\n fn as_ref(&self) -> &[u8] {\n\n &self.0\n", "file_path": "src/lib.rs", "rank": 17, "score": 13.972339650402574 }, { "content": " (401, Unauthorized, \"Unauthorized\"),\n\n (403, Forbidden, \"Forbidden\"),\n\n (404, NotFound, \"Not Found\"),\n\n (405, MethodNotAllowed, \"Method Not Allowed\"),\n\n (411, LengthRequired, \"Length Required\"),\n\n (418, ImaTeapot, \"I'm a teapot\"),\n\n (500, InternalServerError, \"Internal Server Error\"),\n\n (505, HttpVersionNotSupported, \"HTTP Version not Supported\"),\n\n);\n\n\n\nimpl StatusCode {\n\n const ASCII_ZERO: u8 = 48;\n\n\n\n /// Convert status code into 3 bytes of ASCII.\n\n pub const fn as_bytes(&self) -> [u8; 3] {\n\n let code = self.code();\n\n [\n\n (code / 100) as u8 + Self::ASCII_ZERO,\n\n (code / 10 % 10) as u8 + Self::ASCII_ZERO,\n\n (code % 10) as u8 + Self::ASCII_ZERO,\n", "file_path": "src/status.rs", "rank": 18, "score": 13.7867661544313 }, { "content": "use crate::{\n\n header::HeaderName, middleware::Middleware, request::Request, response::Response,\n\n status::StatusCode, Uri,\n\n};\n\nuse async_trait::async_trait;\n\n\n\nuse super::MiddlewareChain;\n\n\n\n#[derive(Debug)]\n\npub struct BasicAuth {\n\n credential_hash: Vec<u8>,\n\n auth_root: Uri,\n\n}\n\n\n\nimpl BasicAuth {\n\n /// Create new Basic authentication middleware. `auth_root` is the root of subtree to protect\n\n pub fn new(username: &str, password: &str, auth_root: impl Into<Uri>) -> Self {\n\n let credential = format!(\"{}:{}\", username, password);\n\n let credential_hash = base64::encode(credential.as_bytes()).into_bytes();\n\n Self {\n", "file_path": "src/middleware/basic_auth.rs", "rank": 19, "score": 13.753613741111538 }, { "content": " self.inner\n\n }\n\n}\n\n\n\n/// Represents HTTP request. This struct is built from `RequestBuffer` and passed to `Handler`.\n\n#[derive(Debug, Default, PartialEq, Eq)]\n\npub struct Request {\n\n pub(crate) method: Method,\n\n pub(crate) uri: Uri,\n\n pub(crate) version: Version,\n\n pub(crate) headers: HashMap<HeaderName, HeaderValue>,\n\n pub(crate) body: Body,\n\n}\n\n\n\nimpl Request {\n\n pub fn builder() -> RequestBuilder {\n\n RequestBuilder::default()\n\n }\n\n\n\n pub fn method(&self) -> Method {\n", "file_path": "src/request.rs", "rank": 20, "score": 12.99835054951605 }, { "content": "use crate::{\n\n body::Body,\n\n header::{HeaderName, HeaderValue},\n\n mime,\n\n status::StatusCode,\n\n Version,\n\n};\n\nuse std::{collections::HashMap, convert::From};\n\nuse tokio::io::{self, AsyncWrite, AsyncWriteExt};\n\n\n\n/// Builder of `Response`.\n\n#[derive(Debug, Default, PartialEq, Eq)]\n\npub struct ResponseBuilder {\n\n inner: Response,\n\n}\n\n\n\nimpl ResponseBuilder {\n\n pub fn new() -> Self {\n\n Self {\n\n inner: Response::default(),\n", "file_path": "src/response.rs", "rank": 21, "score": 12.470953683424277 }, { "content": " }\n\n\n\n pub fn with<M: Middleware<State>>(mut self, middleware: M) -> Self {\n\n self.middlewares.push(Arc::new(middleware));\n\n self\n\n }\n\n\n\n pub fn route<F: Handler<State>>(mut self, path: &str, method: Method, handler: F) -> Self {\n\n self.router.add_route(path, method, handler);\n\n self\n\n }\n\n\n\n /// Serve files under the directory.\n\n /// `dir` is path to the directory and `serve_at` is a prefix of URI.\n\n /// e.g. `self.serve_dir(\"./static/html\", /static)` serves files under `./static/html` and\n\n /// URI for the files will be like `/static/index.html`\n\n pub fn serve_dir<P>(self, serve_at: &str, dir: P) -> Self\n\n where\n\n P: AsRef<Path>,\n\n {\n", "file_path": "src/server.rs", "rank": 22, "score": 12.411917947856429 }, { "content": " self\n\n }\n\n\n\n pub fn set_body(mut self, body: impl Into<Body>) -> Self {\n\n self.inner.set_body(body);\n\n self\n\n }\n\n\n\n pub fn build(self) -> Response {\n\n self.inner\n\n }\n\n}\n\n\n\n/// Represents HTTP response.\n\n#[derive(Debug, Default, PartialEq, Eq)]\n\npub struct Response {\n\n pub(crate) status_code: StatusCode,\n\n pub(crate) headers: HashMap<HeaderName, HeaderValue>,\n\n pub(crate) version: Version,\n\n pub(crate) body: Body,\n", "file_path": "src/response.rs", "rank": 23, "score": 12.096185950399555 }, { "content": " }\n\n }\n\n\n\n if !file_to_find.starts_with(&mount_dir) {\n\n return Err(StatusCode::NotFound);\n\n }\n\n if !file_to_find.exists() {\n\n return Err(StatusCode::NotFound);\n\n }\n\n Ok(file_to_find)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::Uri;\n\n use tokio::{\n\n fs::{self, File},\n\n io,\n\n };\n", "file_path": "src/static_files.rs", "rank": 24, "score": 11.402176734537521 }, { "content": "mod static_dir;\n\nmod static_file;\n\n\n\nuse crate::{status::StatusCode, Uri};\n\npub use static_dir::StaticDir;\n\npub use static_file::StaticFile;\n\nuse std::{\n\n ffi::OsStr,\n\n path::{Path, PathBuf},\n\n};\n\n\n", "file_path": "src/static_files.rs", "rank": 25, "score": 11.37940618616745 }, { "content": " router: Router<State>,\n\n state: State,\n\n}\n\n\n\nimpl ServerBuilder<()> {\n\n pub fn new() -> Self {\n\n Self::with_state(())\n\n }\n\n}\n\n\n\nimpl<State> ServerBuilder<State>\n\nwhere\n\n State: Clone + Send + Sync + 'static,\n\n{\n\n pub fn with_state(state: State) -> Self {\n\n Self {\n\n middlewares: Vec::new(),\n\n router: Router::new(),\n\n state,\n\n }\n", "file_path": "src/server.rs", "rank": 26, "score": 10.978225285090788 }, { "content": "use crate::{response::Response, status::StatusCode};\n\nuse serde::{de::DeserializeOwned, Serialize};\n\nuse std::{convert::From, fmt};\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub enum Body {\n\n None,\n\n Some(Vec<u8>),\n\n}\n\n\n\nimpl Body {\n\n pub fn len(&self) -> usize {\n\n match &self {\n\n Body::Some(bytes) => bytes.len(),\n\n Body::None => 0,\n\n }\n\n }\n\n\n\n pub fn is_empty(&self) -> bool {\n\n self.len() == 0\n", "file_path": "src/body.rs", "rank": 28, "score": 10.609733398661131 }, { "content": " Self {\n\n mount_dir: mount_dir.as_ref().to_path_buf(),\n\n serve_at: serve_at.as_ref().to_path_buf(),\n\n }\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl<State> Handler<State> for StaticDir\n\nwhere\n\n State: Clone + Send + Sync + 'static,\n\n{\n\n async fn call(&self, request: Request, _state: State) -> crate::Result<Response> {\n\n let found_file = find_file(\n\n request.uri(),\n\n self.mount_dir.as_path(),\n\n self.serve_at.as_path(),\n\n )?;\n\n\n\n let mime_type = mime::filename_to_mime(&found_file);\n\n let mut file_to_serve = File::open(found_file).await?;\n\n let mut buffer = Vec::new();\n\n file_to_serve.read_to_end(&mut buffer).await?;\n\n let mut response = Response::from(buffer);\n\n response.set_content_type(mime_type);\n\n Ok(response)\n\n }\n\n}\n", "file_path": "src/static_files/static_dir.rs", "rank": 29, "score": 10.608594244727973 }, { "content": " self.method\n\n }\n\n\n\n pub fn uri(&self) -> &Uri {\n\n &self.uri\n\n }\n\n\n\n pub fn version(&self) -> &Version {\n\n &self.version\n\n }\n\n\n\n pub fn headers(&self) -> &HashMap<HeaderName, HeaderValue> {\n\n &self.headers\n\n }\n\n\n\n pub fn get_header(&self, name: HeaderName) -> Option<&HeaderValue> {\n\n self.headers.get(&name)\n\n }\n\n\n\n pub fn set_header(&mut self, name: HeaderName, value: impl Into<HeaderValue>) {\n", "file_path": "src/request.rs", "rank": 30, "score": 10.596880997788322 }, { "content": " }\n\n}\n\n\n\nimpl Default for Uri {\n\n fn default() -> Self {\n\n Uri(b\"/\".to_vec())\n\n }\n\n}\n\n\n\nimpl fmt::Display for Uri {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}\", str::from_utf8(&self.0).unwrap())\n\n }\n\n}\n\n\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq)]\n\npub enum Version {\n\n OneDotOne,\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 31, "score": 10.527197744518801 }, { "content": "use std::{\n\n io,\n\n path::{Path, PathBuf},\n\n};\n\n\n\nuse crate::{handler::Handler, mime, request::Request, response::Response};\n\nuse async_trait::async_trait;\n\nuse tokio::{fs::File, io::AsyncReadExt};\n\n\n\npub struct StaticFile {\n\n path: PathBuf,\n\n}\n\n\n\nimpl StaticFile {\n\n pub fn mount<P: AsRef<Path>>(path: P) -> io::Result<Self> {\n\n let path = path.as_ref().canonicalize()?;\n\n Ok(Self { path })\n\n }\n\n}\n\n\n", "file_path": "src/static_files/static_file.rs", "rank": 32, "score": 10.48585032391017 }, { "content": "}\n\n\n\nimpl Response {\n\n pub fn new(status_code: StatusCode) -> Self {\n\n Self {\n\n status_code,\n\n ..Default::default()\n\n }\n\n }\n\n\n\n pub fn builder() -> ResponseBuilder {\n\n ResponseBuilder::default()\n\n }\n\n\n\n pub fn status_code(&self) -> StatusCode {\n\n self.status_code\n\n }\n\n\n\n pub fn version(&self) -> Version {\n\n self.version\n", "file_path": "src/response.rs", "rank": 33, "score": 10.463282773114427 }, { "content": "\n\npub async fn method_not_allowed<State>(_request: Request, _state: State) -> Response\n\nwhere\n\n State: Clone + Send + Sync + 'static,\n\n{\n\n Response::new(StatusCode::MethodNotAllowed)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::{body::Body, request::Request, response::Response};\n\n use async_trait::async_trait;\n\n\n\n #[test]\n\n fn lcp() {\n\n let node_x = Router::<()> {\n\n path: b\"abcde\".to_vec(),\n\n handlers: HashMap::new(),\n\n children: Vec::new(),\n", "file_path": "src/router.rs", "rank": 34, "score": 10.230877135960137 }, { "content": " pub fn into_json<T: DeserializeOwned>(&self) -> crate::Result<T> {\n\n match &self {\n\n Body::Some(bytes) => {\n\n serde_json::from_slice::<T>(&bytes).or(Err(StatusCode::BadRequest))\n\n }\n\n Body::None => Err(StatusCode::InternalServerError),\n\n }\n\n }\n\n}\n\n\n\nimpl Default for Body {\n\n fn default() -> Self {\n\n Self::None\n\n }\n\n}\n\n\n\nimpl AsRef<[u8]> for Body {\n\n fn as_ref(&self) -> &[u8] {\n\n match self {\n\n Self::None => &[],\n", "file_path": "src/body.rs", "rank": 35, "score": 10.188466839238215 }, { "content": " inner: Request::default(),\n\n }\n\n }\n\n\n\n pub fn set_method(mut self, method: Method) -> Self {\n\n self.inner.method = method;\n\n self\n\n }\n\n\n\n pub fn set_uri(mut self, uri: impl Into<Uri>) -> Self {\n\n self.inner.uri = uri.into();\n\n self\n\n }\n\n\n\n pub fn get_header(&self, name: HeaderName) -> Option<&HeaderValue> {\n\n self.inner.headers.get(&name)\n\n }\n\n\n\n pub fn set_header(mut self, name: HeaderName, value: impl Into<HeaderValue>) -> Self {\n\n self.inner.headers.insert(name, value.into());\n", "file_path": "src/request.rs", "rank": 36, "score": 10.157872020722161 }, { "content": "///\n\n/// There are two parsing strategies:\n\n/// * Read whole request, then parse it\n\n/// * Parse line by line\n\n///\n\n/// Although the first option is easier, parsing request with message body is difficult\n\n/// because it is hard to know when to finish reading from socket.\n\n/// So this struct parse a request with second strategy to get body size from `Content-Length`\n\n/// header while parsing.\n\npub struct RequestBuffer {\n\n buffer: Vec<u8>,\n\n state: ParseState,\n\n builder: RequestBuilder,\n\n}\n\n\n\nimpl RequestBuffer {\n\n pub fn new() -> Self {\n\n Self {\n\n buffer: Vec::new(),\n\n state: ParseState::RequestLine,\n", "file_path": "src/request.rs", "rank": 37, "score": 10.128987710582916 }, { "content": "}\n\n\n\nimpl TryFrom<&[u8]> for Version {\n\n type Error = StatusCode;\n\n fn try_from(value: &[u8]) -> Result<Self> {\n\n match value {\n\n b\"1.1\" => Ok(Version::OneDotOne),\n\n _ => Err(StatusCode::HttpVersionNotSupported),\n\n }\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 39, "score": 9.811786813487027 }, { "content": "use crate::status::StatusCode;\n\nuse std::{convert::TryFrom, fmt};\n\n\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]\n\npub enum Method {\n\n Get,\n\n Post,\n\n Options,\n\n}\n\n\n\nimpl Default for Method {\n\n fn default() -> Self {\n\n Self::Get\n\n }\n\n}\n\n\n\nimpl fmt::Display for Method {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n Method::Get => write!(f, \"GET\"),\n", "file_path": "src/method.rs", "rank": 41, "score": 9.682512041715782 }, { "content": "use crate::{\n\n handler::Handler,\n\n method::Method,\n\n middleware::{Middleware, MiddlewareChain},\n\n request::{ParseState, Request, RequestBuffer},\n\n response::Response,\n\n router::Router,\n\n static_files::{StaticDir, StaticFile},\n\n};\n\nuse std::{path::Path, sync::Arc};\n\nuse tokio::io::{self, AsyncReadExt, AsyncWriteExt};\n\nuse tokio::net::{TcpListener, TcpStream};\n\n\n\n/// Builder of `Server`.\n\n/// The purpose of this struct is to make `Server.router` immutable.\n\npub struct ServerBuilder<State>\n\nwhere\n\n State: Clone + Send + Sync + 'static,\n\n{\n\n middlewares: Vec<Arc<dyn Middleware<State>>>,\n", "file_path": "src/server.rs", "rank": 42, "score": 9.680463403200307 }, { "content": "use crate::model::{Post, Posts, User};\n\n\n\n#[derive(Debug)]\n\npub struct Db {\n\n users: Vec<User>,\n\n posts: Vec<Post>,\n\n}\n\n\n\nimpl Db {\n\n pub fn new() -> Self {\n\n Self {\n\n users: Vec::new(),\n\n posts: Vec::new(),\n\n }\n\n }\n\n\n\n pub fn register(&mut self, user: User) {\n\n self.users.push(user);\n\n }\n\n\n\n pub fn current_posts(&self, count: usize) -> Posts {\n\n Posts::new(self.posts.iter().take(count).cloned().collect())\n\n }\n\n\n\n pub fn create_post(&mut self, post: Post) {\n\n self.posts.push(post);\n\n }\n\n}\n", "file_path": "examples/chat/src/db.rs", "rank": 43, "score": 9.613423271063002 }, { "content": " builder: RequestBuilder::new(),\n\n }\n\n }\n\n\n\n pub fn complete(self) -> Request {\n\n self.builder.build()\n\n }\n\n\n\n /// Extend buffer of this struct with `data` and try to parse given request data.\n\n pub fn try_parse(&mut self, data: &[u8]) -> crate::Result<ParseState> {\n\n self.buffer.extend_from_slice(&data);\n\n let mut buf_iter = self.buffer.iter();\n\n let mut parse_start = 0;\n\n let mut parse_end = 0;\n\n // self.buffer may contain multiple lines(multiple CRLFs).\n\n loop {\n\n if let ParseState::Completed = self.state {\n\n return Ok(ParseState::Completed);\n\n }\n\n\n", "file_path": "src/request.rs", "rank": 44, "score": 9.610946233500783 }, { "content": " Some(handler) => return &**handler,\n\n None => return &method_not_allowed,\n\n }\n\n }\n\n if let (Some(c), Some(d)) = (child.path.get(0), key_remaining.iter().next()) {\n\n if c == d {\n\n return child.find(key_remaining, method);\n\n }\n\n }\n\n }\n\n &not_found\n\n }\n\n}\n\n\n\npub async fn not_found<State>(_request: Request, _state: State) -> Response\n\nwhere\n\n State: Clone + Send + Sync + 'static,\n\n{\n\n Response::new(StatusCode::NotFound)\n\n}\n", "file_path": "src/router.rs", "rank": 45, "score": 9.449515800303615 }, { "content": "const DEFAULT_ALLOW_METHODS: &[u8] = b\"POST, GET, OPTIONS\";\n\nconst DEFAULT_ALLOW_HEADERS: &[u8] = b\"*\";\n\nconst DEFAULT_MAX_AGE: &[u8] = b\"86400\";\n\n\n\nimpl Cors {\n\n pub fn new() -> Self {\n\n Self {\n\n allow_origin: Origin::Any,\n\n allow_methods: DEFAULT_ALLOW_METHODS.into(),\n\n allow_headers: DEFAULT_ALLOW_HEADERS.into(),\n\n max_age: DEFAULT_MAX_AGE.into(),\n\n }\n\n }\n\n\n\n pub fn allow_origin(mut self, origin: impl Into<Origin>) -> Self {\n\n self.allow_origin = origin.into();\n\n self\n\n }\n\n\n\n pub fn allow_methods(mut self, methods: impl Into<HeaderValue>) -> Self {\n", "file_path": "src/middleware/cors.rs", "rank": 46, "score": 9.433685626933205 }, { "content": "impl Version {\n\n fn as_bytes(&self) -> &'static [u8; 3] {\n\n match &self {\n\n Version::OneDotOne => b\"1.1\",\n\n }\n\n }\n\n}\n\n\n\nimpl Default for Version {\n\n fn default() -> Self {\n\n Self::OneDotOne\n\n }\n\n}\n\n\n\nimpl fmt::Display for Version {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self {\n\n Version::OneDotOne => write!(f, \"1.1\"),\n\n }\n\n }\n", "file_path": "src/lib.rs", "rank": 47, "score": 9.417009109688719 }, { "content": "pub mod method;\n\npub mod middleware;\n\npub mod mime;\n\nmod parser;\n\npub mod redirect;\n\npub mod request;\n\npub mod response;\n\nmod router;\n\npub mod server;\n\npub mod static_files;\n\npub mod status;\n\n\n\nuse crate::status::StatusCode;\n\nuse std::{\n\n convert::{From, TryFrom},\n\n fmt, str,\n\n};\n\n\n\n/// All errornous function returns this type. because all error in this crate converges HTTP error which is\n\n/// represented by status code.\n", "file_path": "src/lib.rs", "rank": 48, "score": 9.298150177484466 }, { "content": " _ => continue,\n\n }\n\n }\n\n // If there is no child in `self.children` that matches new path, just insert it.\n\n self.children\n\n .push(Router::new_child(new_path_remaining, method, handler));\n\n }\n\n }\n\n\n\n // `find` returns `Handler` even there's no method in the route.\n\n // The main purpose is CORS handling which needs to process OPTIONS method for preflight.\n\n // if `Router` does not return `Handler` in such error, dummy handler to \"/*\" for OPTIONS\n\n // method should be registered to pass the request to middlewares. This is ugly.\n\n pub fn find<'a, B: AsRef<[u8]>>(&'a self, key: B, method: Method) -> &'a dyn Handler<State> {\n\n let key = key.as_ref();\n\n if key.is_empty() {\n\n return &not_found;\n\n }\n\n if &self.path[..] > key {\n\n // e.g. `self.path` is \"hoge\" and `key` is \"ho\".\n", "file_path": "src/router.rs", "rank": 49, "score": 9.254640621907981 }, { "content": " uri: uri.into(),\n\n }\n\n }\n\n}\n\n\n\n#[async_trait]\n\nimpl<State> Handler<State> for Redirect\n\nwhere\n\n State: Clone + Send + Sync + 'static,\n\n{\n\n async fn call(&self, _request: Request, _state: State) -> crate::Result<Response> {\n\n let response = Response::builder()\n\n .set_status_code(self.status_code)\n\n .set_header(HeaderName::Location, self.uri.0.clone())\n\n .build();\n\n Ok(response)\n\n }\n\n}\n\n\n\nimpl From<Redirect> for Response {\n", "file_path": "src/redirect.rs", "rank": 50, "score": 9.199167771357597 }, { "content": "use std::path::{Path, PathBuf};\n\n\n\nuse crate::{\n\n handler::Handler, mime, request::Request, response::Response, static_files::find_file,\n\n};\n\nuse async_trait::async_trait;\n\nuse tokio::{fs::File, io::AsyncReadExt};\n\n\n\n#[derive(Debug)]\n\npub struct StaticDir {\n\n mount_dir: PathBuf,\n\n serve_at: PathBuf,\n\n}\n\n\n\nimpl StaticDir {\n\n pub fn mount<P1, P2>(mount_dir: P1, serve_at: P2) -> Self\n\n where\n\n P1: AsRef<Path>,\n\n P2: AsRef<Path>,\n\n {\n", "file_path": "src/static_files/static_dir.rs", "rank": 51, "score": 9.188502776581467 }, { "content": " &self,\n\n request: Request,\n\n state: State,\n\n next: MiddlewareChain<'_, State>,\n\n ) -> Response {\n\n self(request, state, next).await\n\n }\n\n}\n\n\n\n/// Sequence of Middlewares.\n\n///\n\n/// Middlewares' process is recursive:\n\n/// * Modify `Request` (basically modify headers)\n\n/// * Run next middleware\n\n/// * Modify `Response` (basically modify headers)\n\npub struct MiddlewareChain<'a, State>\n\nwhere\n\n State: Clone + Send + Sync + 'static,\n\n{\n\n pub(crate) handler: &'a dyn Handler<State>,\n", "file_path": "src/middleware.rs", "rank": 52, "score": 8.741608810568476 }, { "content": " pos\n\n }\n\n\n\n pub fn add_route<B: AsRef<[u8]>, F: Handler<State>>(\n\n &mut self,\n\n new_path: B,\n\n method: Method,\n\n handler: F,\n\n ) {\n\n let new_path = new_path.as_ref();\n\n // For the first time to insert node to root.\n\n if self.path.is_empty() && self.children.is_empty() {\n\n self.children\n\n .push(Router::new_child(new_path, method, handler));\n\n return;\n\n }\n\n if self.path == new_path {\n\n self.handlers.insert(method, Box::new(handler));\n\n return;\n\n }\n", "file_path": "src/router.rs", "rank": 53, "score": 8.72845619807641 }, { "content": " pub(crate) middlewares: &'a [Arc<dyn Middleware<State>>],\n\n}\n\n\n\nimpl<State> MiddlewareChain<'_, State>\n\nwhere\n\n State: Clone + Send + Sync + 'static,\n\n{\n\n pub async fn run(mut self, request: Request, state: State) -> Response {\n\n if let Some((first, remaining)) = self.middlewares.split_first() {\n\n self.middlewares = remaining;\n\n first.call(request, state, self).await\n\n } else {\n\n match self.handler.call(request, state).await {\n\n Ok(response) => response,\n\n Err(code) => code.into(),\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/middleware.rs", "rank": 54, "score": 8.63654002322341 }, { "content": "use async_trait::async_trait;\n\n\n\nuse crate::{handler::Handler, request::Request, response::Response};\n\nuse std::{future::Future, sync::Arc};\n\n\n\nmod basic_auth;\n\nmod cors;\n\n\n\npub use basic_auth::BasicAuth;\n\npub use cors::Cors;\n\n\n\n/// Middleware preprocesses request before generating response in `Handler` and postprocesses\n\n/// response.\n\n#[async_trait]\n", "file_path": "src/middleware.rs", "rank": 55, "score": 8.554769798225044 }, { "content": "use crate::{request::Request, response::Response};\n\nuse async_trait::async_trait;\n\nuse std::{fmt, future::Future};\n\n\n\n/// Abstruction over all process to create response from request.\n\n///\n\n/// `State` is something to use in handler process such as database connection, counter and so on.\n\n#[async_trait]\n", "file_path": "src/handler.rs", "rank": 56, "score": 8.48714199024494 }, { "content": "\n\nimpl From<Vec<u8>> for Origin {\n\n fn from(v: Vec<u8>) -> Self {\n\n if v == &[b'*'] {\n\n return Self::Any;\n\n }\n\n Self::Single(v)\n\n }\n\n}\n\n\n\nimpl From<&Vec<u8>> for Origin {\n\n fn from(v: &Vec<u8>) -> Self {\n\n Self::from(v.clone())\n\n }\n\n}\n\n\n\nimpl From<&[u8]> for Origin {\n\n fn from(v: &[u8]) -> Self {\n\n Self::from(v.to_vec())\n\n }\n", "file_path": "src/middleware/cors.rs", "rank": 57, "score": 8.415632240188362 }, { "content": " }\n\n }\n\n\n\n pub fn set_status_code(mut self, status_code: StatusCode) -> Self {\n\n self.inner.status_code = status_code;\n\n self\n\n }\n\n\n\n pub fn set_header(mut self, name: HeaderName, value: impl Into<HeaderValue>) -> Self {\n\n self.inner.set_header(name, value.into());\n\n self\n\n }\n\n\n\n pub fn set_content_length(mut self, length: usize) -> Self {\n\n self.inner.set_content_length(length);\n\n self\n\n }\n\n\n\n pub fn set_content_type(mut self, mime_type: &[u8]) -> Self {\n\n self.inner.set_content_type(mime_type);\n", "file_path": "src/response.rs", "rank": 58, "score": 8.388570250903292 }, { "content": " }\n\n}\n\n\n\nimpl<State> Server<State>\n\nwhere\n\n State: Clone + Send + Sync + 'static,\n\n{\n\n const INITIAL_BUFFER_SIZE: usize = 4096;\n\n\n\n pub fn builder_with_state(state: State) -> ServerBuilder<State> {\n\n ServerBuilder::with_state(state)\n\n }\n\n\n\n pub async fn run(server: Self, port: u16) -> io::Result<()> {\n\n let listener = TcpListener::bind(format!(\"0.0.0.0:{}\", port)).await?;\n\n println!(\"Listening on {}\", listener.local_addr()?);\n\n loop {\n\n let (mut stream, _) = match listener.accept().await {\n\n Ok(stream) => stream,\n\n Err(err) => {\n", "file_path": "src/server.rs", "rank": 59, "score": 8.375917060479189 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]\n\npub struct User {\n\n username: String,\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]\n\npub struct Post {\n\n username: String,\n\n text: String,\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq, Serialize)]\n\npub struct Posts {\n\n posts: Vec<Post>,\n\n}\n\n\n\nimpl Posts {\n\n pub fn new(posts: Vec<Post>) -> Self {\n\n Self { posts }\n\n }\n\n}\n", "file_path": "examples/chat/src/model.rs", "rank": 60, "score": 7.955690892898881 }, { "content": "\n\n // Creates ./static_dir_test/static/index.html for tests.\n\n // First this function uses `tempfile` crate, but temporary directory which is created with the\n\n // crate is deleted when it goes out of scope.\n\n async fn setup_dir() -> io::Result<(PathBuf, PathBuf)> {\n\n let static_dir = PathBuf::from(\"./static_dir_test/static\");\n\n fs::create_dir_all(static_dir.clone()).await?;\n\n\n\n let file_path = static_dir.join(\"index.html\");\n\n File::create(file_path).await?;\n\n Ok((static_dir, PathBuf::from(\"/static\")))\n\n }\n\n\n\n #[tokio::test]\n\n async fn find_static_file() -> io::Result<()> {\n\n let (static_dir, serve_at) = setup_dir().await?;\n\n find_file(\n\n &Uri::new(b\"/static/index.html\"),\n\n static_dir.as_path(),\n\n serve_at.as_path(),\n", "file_path": "src/static_files.rs", "rank": 61, "score": 7.866277255987303 }, { "content": " }\n\n\n\n /// Reason phrase corresponding to each status code.\n\n pub const fn reason_phrase(&self) -> &'static [u8] {\n\n match &self {\n\n $(\n\n StatusCode::$entry => $phrase.as_bytes(),\n\n )+\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\ndefine_status_codes!(\n\n (200, Ok, \"OK\"),\n\n (301, MovedPermanently, \"Moved Permanently\"),\n\n (302, Found, \"Found\"),\n\n (303, SeeOther, \"See Other\"),\n\n (400, BadRequest, \"Bad Request\"),\n", "file_path": "src/status.rs", "rank": 62, "score": 7.838110475983865 }, { "content": "use std::{convert::From, fmt};\n\n\n\npub type HeaderValue = Vec<u8>;\n\n\n\nmacro_rules! define_headers {\n\n ($(($name:ident, $upper_str:expr, $lower_str:expr),)+) => {\n\n #[derive(Clone, Debug, Hash, PartialEq, Eq)]\n\n pub enum HeaderName {\n\n $($name,)+\n\n Unknown,\n\n }\n\n\n\n impl AsRef<[u8]> for HeaderName {\n\n fn as_ref(&self) -> &[u8] {\n\n match self {\n\n $(HeaderName::$name => $upper_str,)+\n\n HeaderName::Unknown => b\"Unknown\",\n\n }\n\n }\n\n }\n", "file_path": "src/header.rs", "rank": 63, "score": 7.837589283780623 }, { "content": " ]\n\n }\n\n}\n\n\n\nimpl Default for StatusCode {\n\n fn default() -> Self {\n\n StatusCode::Ok\n\n }\n\n}\n\n\n\nimpl From<io::Error> for StatusCode {\n\n fn from(err: io::Error) -> Self {\n\n use io::ErrorKind::*;\n\n match err.kind() {\n\n NotFound => StatusCode::NotFound,\n\n PermissionDenied => StatusCode::Forbidden,\n\n _ => StatusCode::InternalServerError,\n\n }\n\n }\n\n}\n\n\n\nimpl From<StatusCode> for Response {\n\n fn from(code: StatusCode) -> Self {\n\n Response::builder().set_status_code(code).build()\n\n }\n\n}\n", "file_path": "src/status.rs", "rank": 64, "score": 7.824163781885563 }, { "content": "use crate::response::Response;\n\nuse std::io;\n\n\n\nmacro_rules! define_status_codes {\n\n ($(($num:expr, $entry:ident, $phrase:expr),)+) => {\n\n #[derive(Clone, Copy, Debug, PartialEq, Eq)]\n\n pub enum StatusCode {\n\n $(\n\n $entry,\n\n )+\n\n }\n\n\n\n impl StatusCode {\n\n /// Status code as an integer.\n\n pub const fn code(&self) -> u16 {\n\n match &self {\n\n $(\n\n StatusCode::$entry => $num,\n\n )+\n\n }\n", "file_path": "src/status.rs", "rank": 65, "score": 7.728620668331913 }, { "content": " Self::Some(bytes) => bytes,\n\n }\n\n }\n\n}\n\n\n\nimpl From<String> for Body {\n\n fn from(s: String) -> Self {\n\n Self::Some(s.into())\n\n }\n\n}\n\n\n\nimpl<'a> From<&'a str> for Body {\n\n fn from(s: &'a str) -> Self {\n\n Self::Some(s.into())\n\n }\n\n}\n\n\n\nimpl From<Vec<u8>> for Body {\n\n fn from(bytes: Vec<u8>) -> Self {\n\n Self::Some(bytes)\n", "file_path": "src/body.rs", "rank": 66, "score": 7.589273307188597 }, { "content": "\n\n #[test]\n\n fn build_request() {\n\n let data = b\"GET /~/index.html HTTP/1.1\\r\\n\\r\\n\"\n\n .chunks(9)\n\n .map(|c| c.to_vec())\n\n .collect::<Vec<_>>();\n\n let mut request_buf = RequestBuffer::new();\n\n for message in data {\n\n match request_buf.try_parse(&message) {\n\n Ok(ParseState::Completed) => break,\n\n Ok(_) => continue,\n\n Err(err) => panic!(\"{:?}\", err),\n\n }\n\n }\n\n assert_eq!(\n\n Request::builder()\n\n .set_method(Method::Get)\n\n .set_uri(Uri::new(b\"/~/index.html\"))\n\n .build(),\n", "file_path": "src/request.rs", "rank": 67, "score": 7.5216187360383335 }, { "content": "}\n\n\n\nimpl From<String> for Origin {\n\n fn from(s: String) -> Self {\n\n if s == \"*\" {\n\n return Self::Any;\n\n }\n\n Self::Single(s.into_bytes())\n\n }\n\n}\n\n\n\nimpl From<&str> for Origin {\n\n fn from(s: &str) -> Self {\n\n Self::from(s.to_string())\n\n }\n\n}\n\n\n\nimpl AsRef<[u8]> for Origin {\n\n fn as_ref(&self) -> &[u8] {\n\n match self {\n", "file_path": "src/middleware/cors.rs", "rank": 68, "score": 7.495731560090816 }, { "content": " Origin::Any => b\"*\",\n\n Origin::Single(v) => v,\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::server::ServerBuilder;\n\n\n\n use super::*;\n\n\n\n const ALLOW_ORIGIN: &[u8] = b\"localhost:3000\";\n\n\n\n fn server() -> ServerBuilder<()> {\n\n ServerBuilder::new().route(\"/\", Method::Get, |_, _| async { \"Hello\" })\n\n }\n\n\n\n // Simple request is a request which does not need a preflight request.\n\n #[tokio::test]\n", "file_path": "src/middleware/cors.rs", "rank": 69, "score": 7.370991240088086 }, { "content": " request: Request,\n\n state: State,\n\n next: MiddlewareChain<'_, State>,\n\n ) -> Response\n\n where\n\n State: Clone + Send + Sync + 'static,\n\n {\n\n let uri = request.uri();\n\n if self.is_protected_uri(uri) {\n\n if let Err(code) = self.check_credential(&request) {\n\n assert_eq!(StatusCode::Unauthorized, code);\n\n let mut response = Response::from(code);\n\n response.set_header(HeaderName::WwwAuthenticate, \"Basic\");\n\n return response;\n\n }\n\n }\n\n next.run(request, state).await\n\n }\n\n}\n\n\n", "file_path": "src/middleware/basic_auth.rs", "rank": 71, "score": 7.28022486338949 }, { "content": "impl fmt::Display for Request {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n writeln!(f, \"{} {} HTTP/{}\", self.method, self.uri, self.version)?;\n\n for (name, value) in self.headers.iter() {\n\n writeln!(f, \"{}: {}\", name, str::from_utf8(&value).unwrap())?;\n\n }\n\n writeln!(f, \"{}\", self.body())?;\n\n Ok(())\n\n }\n\n}\n\n\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq)]\n\npub enum ParseState {\n\n RequestLine,\n\n Headers,\n\n Completed,\n\n}\n\n\n\n/// Construct `Request` from chunked data, which is mainly got from TCP stream.\n\n/// Currently, tokio does not buffered IO for `TcpStream`, so I implemented this by my own.\n", "file_path": "src/request.rs", "rank": 72, "score": 7.277596382015952 }, { "content": "use crate::{\n\n header::{HeaderName, HeaderValue},\n\n method::Method,\n\n middleware::{Middleware, MiddlewareChain},\n\n request::Request,\n\n response::Response,\n\n status::StatusCode,\n\n};\n\nuse async_trait::async_trait;\n\nuse std::convert::From;\n\n\n\n/// Middleware to handle CORS issue.\n\n#[derive(Debug)]\n\npub struct Cors {\n\n allow_origin: Origin,\n\n allow_methods: HeaderValue,\n\n allow_headers: HeaderValue,\n\n max_age: HeaderValue,\n\n}\n\n\n", "file_path": "src/middleware/cors.rs", "rank": 74, "score": 6.9355047465170605 }, { "content": " router: Arc::new(self.router),\n\n state: self.state,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Server<State>\n\nwhere\n\n State: Clone + Send + Sync + 'static,\n\n{\n\n // Wrap with `Arc` to pass over tokio task without moving `self`.\n\n middlewares: Arc<Vec<Arc<dyn Middleware<State>>>>,\n\n router: Arc<Router<State>>,\n\n state: State,\n\n}\n\n\n\nimpl Server<()> {\n\n pub fn builder() -> ServerBuilder<()> {\n\n ServerBuilder::new()\n", "file_path": "src/server.rs", "rank": 75, "score": 6.8419410484410434 }, { "content": " .set_method(Method::Get)\n\n .set_uri(Uri::new(b\"/~/index.html\"))\n\n .set_header(HeaderName::Accept, b\"*/*\".to_vec())\n\n .set_header(HeaderName::Host, b\"localhost:8080\".to_vec())\n\n .build(),\n\n request_buf.complete()\n\n );\n\n }\n\n\n\n #[test]\n\n fn build_request_with_large_chunk() {\n\n let data = b\"GET /~/index.html HTTP/1.1\\r\\nAccept: */*\\r\\nHost: localhost:8080\\r\\nUser-Agent: curl\\r\\n\"\n\n .chunks(64)\n\n .map(|c| c.to_vec())\n\n .collect::<Vec<_>>();\n\n let mut request_buf = RequestBuffer::new();\n\n for message in data {\n\n match request_buf.try_parse(&message) {\n\n Ok(ParseState::Completed) => break,\n\n Ok(_) => continue,\n", "file_path": "src/request.rs", "rank": 76, "score": 6.742868030314847 }, { "content": " self.path = path.to_vec();\n\n let mut handlers: HashMap<Method, Box<dyn Handler<State> + 'static>> = HashMap::new();\n\n handlers.insert(method, Box::new(handler));\n\n self.children.push(Self {\n\n path: b\"*\".to_vec(),\n\n handlers,\n\n children: Vec::new(),\n\n });\n\n }\n\n\n\n /// Return how many common character path of `Route` nodes and an arugument have.\n\n fn longest_common_prefix(&self, other: &[u8]) -> usize {\n\n let mut pos = 0;\n\n for (char_self, char_other) in self.path.iter().zip(other.iter()) {\n\n if char_self == char_other {\n\n pos += 1;\n\n } else {\n\n break;\n\n }\n\n }\n", "file_path": "src/router.rs", "rank": 77, "score": 6.732233130345898 }, { "content": " .collect::<Vec<_>>();\n\n let mut request_buf = RequestBuffer::new();\n\n for message in data {\n\n match request_buf.try_parse(&message) {\n\n Ok(ParseState::Completed) => break,\n\n Ok(_) => continue,\n\n Err(_) => panic!(),\n\n }\n\n }\n\n assert_eq!(\n\n Request::builder()\n\n .set_method(Method::Get)\n\n .set_uri(Uri::new(b\"/~/index.html\"))\n\n .set_header(HeaderName::ContentLength, b\"13\".to_vec())\n\n .set_body(b\"Hello, World!\".to_vec())\n\n .build(),\n\n request_buf.complete()\n\n );\n\n }\n\n}\n", "file_path": "src/request.rs", "rank": 78, "score": 6.565488469740859 }, { "content": " self.headers.insert(name, value.into());\n\n }\n\n\n\n pub fn body(&self) -> &Body {\n\n &self.body\n\n }\n\n\n\n pub fn set_body(&mut self, body: impl Into<Body>) {\n\n self.body = body.into();\n\n }\n\n\n\n pub fn body_form<T: DeserializeOwned>(&self) -> crate::Result<T> {\n\n self.body.into_form()\n\n }\n\n\n\n pub fn body_json<T: DeserializeOwned>(&self) -> crate::Result<T> {\n\n self.body.into_json()\n\n }\n\n}\n\n\n", "file_path": "src/request.rs", "rank": 79, "score": 6.496117379589558 }, { "content": " let mut serve_at_wildcard = serve_at.trim_end_matches('/').to_string();\n\n serve_at_wildcard.push_str(\"/*\");\n\n self.route(\n\n &serve_at_wildcard,\n\n Method::Get,\n\n StaticDir::mount(dir, serve_at),\n\n )\n\n }\n\n\n\n pub fn serve_file<P>(self, serve_at: &str, path: P) -> io::Result<Self>\n\n where\n\n P: AsRef<Path>,\n\n {\n\n let file = StaticFile::mount(path)?;\n\n Ok(self.route(&serve_at, Method::Get, file))\n\n }\n\n\n\n pub fn build(self) -> Server<State> {\n\n Server {\n\n middlewares: Arc::new(self.middlewares),\n", "file_path": "src/server.rs", "rank": 80, "score": 6.247280440112793 }, { "content": " Response::builder()\n\n .set_header(HeaderName::ContentLength, s.len().to_string())\n\n .set_content_type(mime::TEXT_PLAIN)\n\n .set_body(Body::from(s))\n\n .build()\n\n }\n\n}\n\n\n\nimpl From<Vec<u8>> for Response {\n\n fn from(bytes: Vec<u8>) -> Self {\n\n Response::builder()\n\n .set_header(HeaderName::ContentLength, bytes.len().to_string())\n\n .set_body(Body::from(bytes))\n\n .build()\n\n }\n\n}\n\n\n\nimpl<'a> From<&'a [u8]> for Response {\n\n fn from(bytes: &'a [u8]) -> Self {\n\n Response::builder()\n", "file_path": "src/response.rs", "rank": 81, "score": 6.237599443871241 }, { "content": " }\n\n}\n\n\n\nimpl From<&[u8]> for Body {\n\n fn from(bytes: &[u8]) -> Self {\n\n Self::Some(bytes.into())\n\n }\n\n}\n\n\n\nimpl From<Body> for Response {\n\n fn from(body: Body) -> Self {\n\n Response::builder().set_body(body).build()\n\n }\n\n}\n\n\n\nimpl fmt::Display for Body {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match &self {\n\n Body::Some(bytes) => match std::str::from_utf8(&bytes) {\n\n Ok(s) => write!(f, \"{}\", s),\n\n Err(_) => write!(f, \"{:?}\", bytes),\n\n },\n\n Body::None => write!(f, \"\"),\n\n }\n\n }\n\n}\n", "file_path": "src/body.rs", "rank": 82, "score": 6.154197155352232 }, { "content": " return self.handle_preflight();\n\n }\n\n\n\n let mut response = next.run(request, state).await;\n\n response.set_header(\n\n HeaderName::AccessControlAllowOrigin,\n\n self.allow_origin.as_ref(),\n\n );\n\n response\n\n }\n\n}\n\n\n\n/// Represents origin. For simplicity, multiple origins are not supported.\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub enum Origin {\n\n /// Wildcard, means any origin.\n\n Any,\n\n /// One origin.\n\n Single(Vec<u8>),\n\n}\n", "file_path": "src/middleware/cors.rs", "rank": 83, "score": 6.139342059014234 }, { "content": " }\n\n\n\n /// Parse `Body` as `application/json` data.\n\n ///\n\n /// # Examples\n\n /// ```rust\n\n /// use qz::body::Body;\n\n /// use serde::Serialize;\n\n /// #[derive(Serialize)]\n\n /// struct User {\n\n /// username: String,\n\n /// password: String,\n\n /// }\n\n ///\n\n /// let user = User { username: \"John\".to_string(), password: \"qwerty\".to_string() };\n\n /// let body = Body::from_json(&user).unwrap();\n\n /// assert_eq!(Body::from(r#\"{\"username\":\"John\",\"password\":\"qwerty\"}\"#), body);\n\n /// ```\n\n pub fn from_json(json: &impl Serialize) -> crate::Result<Body> {\n\n let bytes = serde_json::to_vec(&json).or(Err(StatusCode::BadRequest))?;\n", "file_path": "src/body.rs", "rank": 84, "score": 6.045640764350613 }, { "content": "use qz::server::Server;\n\nuse std::io;\n\n\n\n#[tokio::main]\n\nasync fn main() -> io::Result<()> {\n\n let server = Server::builder().serve_dir(\"/\", \"./target/doc\").build();\n\n Server::run(server, 8080).await\n\n}\n", "file_path": "examples/static_dir.rs", "rank": 85, "score": 6.040950847032002 }, { "content": "use std::ffi::OsStr;\n\nuse std::path::Path;\n\n\n\nmacro_rules! define_mime_types {\n\n ($(($entry:ident, $lit:expr),)+) => {\n\n $(\n\n pub const $entry: &'static [u8] = $lit.as_bytes();\n\n )+\n\n };\n\n}\n\n\n\ndefine_mime_types!(\n\n (TEXT_PLAIN, \"text/plain\"),\n\n (TEXT_HTML, \"text/html\"),\n\n (TEXT_CSS, \"text/css\"),\n\n (TEXT_JAVASCRIPT, \"text/javascript\"),\n\n (IMAGE_JPG, \"image/jpg\"),\n\n (IMAGE_PNG, \"image/png\"),\n\n (APPLICATION_JSON, \"application/json\"),\n\n (APPLICATION_WWW_FORM, \"application/x-www-form-urlencoded\"),\n\n);\n\n\n", "file_path": "src/mime.rs", "rank": 86, "score": 6.031315452475216 }, { "content": " loop {\n\n match stream.read(&mut buf).await {\n\n Ok(0) => return Err(()),\n\n Ok(_) => match request_buf.try_parse(&buf) {\n\n Ok(ParseState::Completed) => break,\n\n Ok(_) => continue,\n\n Err(code) => return Ok(Response::from(code)),\n\n },\n\n Err(_) => {\n\n return Err(());\n\n }\n\n };\n\n }\n\n\n\n let request = request_buf.complete();\n\n let response = self.respond(request).await;\n\n Ok(response)\n\n }\n\n\n\n pub(crate) async fn respond(self, request: Request) -> Response {\n", "file_path": "src/server.rs", "rank": 87, "score": 5.98405495781334 }, { "content": " let Server {\n\n middlewares,\n\n router,\n\n state,\n\n } = self;\n\n\n\n println!(\"{}\", request);\n\n let handler = router.find(request.uri(), request.method());\n\n let chain = MiddlewareChain {\n\n handler,\n\n middlewares: &middlewares,\n\n };\n\n chain.run(request, state).await\n\n }\n\n}\n", "file_path": "src/server.rs", "rank": 88, "score": 5.961133306248215 }, { "content": " /// use qz::body::Body;\n\n /// use serde::Deserialize;\n\n /// #[derive(Deserialize)]\n\n /// struct User {\n\n /// username: String,\n\n /// password: String,\n\n /// }\n\n ///\n\n /// let body = Body::from(\"username=John&password=qwerty\");\n\n /// let user: User = body.into_form().unwrap();\n\n /// assert_eq!(\"John\", &user.username);\n\n /// assert_eq!(\"qwerty\", &user.password);\n\n /// ```\n\n pub fn into_form<T: DeserializeOwned>(&self) -> crate::Result<T> {\n\n match &self {\n\n Body::Some(bytes) => {\n\n serde_urlencoded::from_bytes::<T>(bytes).or(Err(StatusCode::BadRequest))\n\n }\n\n Body::None => Err(StatusCode::InternalServerError),\n\n }\n", "file_path": "src/body.rs", "rank": 89, "score": 5.86780432115217 }, { "content": "\n\n struct DummyB;\n\n #[async_trait]\n\n impl Middleware<()> for DummyB {\n\n async fn call(\n\n &self,\n\n mut request: Request,\n\n state: (),\n\n next: MiddlewareChain<'_, ()>,\n\n ) -> Response {\n\n request.set_header(HeaderName::Host, \"localhost\");\n\n let mut response = next.run(request, state).await;\n\n response.set_header(HeaderName::Location, \"example.com\");\n\n response\n\n }\n\n }\n\n\n\n struct Test;\n\n #[async_trait]\n\n impl Middleware<()> for Test {\n", "file_path": "src/middleware.rs", "rank": 90, "score": 5.818880034575164 }, { "content": " fn from(redirect: Redirect) -> Self {\n\n (&redirect).into()\n\n }\n\n}\n\n\n\nimpl From<&Redirect> for Response {\n\n fn from(redirect: &Redirect) -> Self {\n\n Response::builder()\n\n .set_status_code(redirect.status_code)\n\n .set_header(HeaderName::Location, redirect.uri.as_ref())\n\n .build()\n\n }\n\n}\n", "file_path": "src/redirect.rs", "rank": 91, "score": 5.782710960611883 }, { "content": "use qz::server::Server;\n\nuse std::io;\n\n\n\n#[tokio::main]\n\nasync fn main() -> io::Result<()> {\n\n let server = Server::builder()\n\n .serve_file(\"/pages\", \"./examples/assets/index.html\")?\n\n .build();\n\n Server::run(server, 8080).await\n\n}\n", "file_path": "examples/static_file.rs", "rank": 92, "score": 5.7533194509317225 }, { "content": " Method::Post => write!(f, \"POST\"),\n\n Method::Options => write!(f, \"OPTIONS\"),\n\n }\n\n }\n\n}\n\n\n\nimpl TryFrom<&[u8]> for Method {\n\n type Error = StatusCode;\n\n fn try_from(value: &[u8]) -> Result<Self, Self::Error> {\n\n match std::str::from_utf8(value) {\n\n Ok(\"GET\") => Ok(Method::Get),\n\n Ok(\"POST\") => Ok(Method::Post),\n\n Ok(\"OPTONS\") => Ok(Method::Options),\n\n _ => Err(StatusCode::MethodNotAllowed),\n\n }\n\n }\n\n}\n", "file_path": "src/method.rs", "rank": 93, "score": 5.7125579666781165 }, { "content": " self.allow_methods = methods.into();\n\n self\n\n }\n\n\n\n pub fn allow_headers(mut self, headers: impl Into<HeaderValue>) -> Self {\n\n self.allow_headers = headers.into();\n\n self\n\n }\n\n\n\n pub fn max_age(mut self, max_age: impl Into<HeaderValue>) -> Self {\n\n self.max_age = max_age.into();\n\n self\n\n }\n\n\n\n fn is_valid_origin(&self, origin: &HeaderValue) -> bool {\n\n match &self.allow_origin {\n\n Origin::Any => true,\n\n Origin::Single(v) => v == origin,\n\n }\n\n }\n", "file_path": "src/middleware/cors.rs", "rank": 94, "score": 5.688927180451522 }, { "content": " credential_hash,\n\n auth_root: auth_root.into(),\n\n }\n\n }\n\n\n\n fn is_protected_uri(&self, uri: &Uri) -> bool {\n\n uri.0.starts_with(&self.auth_root.0)\n\n }\n\n\n\n fn check_credential(&self, request: &Request) -> crate::Result<()> {\n\n let credential = request\n\n .get_header(HeaderName::Authorization)\n\n .ok_or(StatusCode::Unauthorized)?;\n\n // credential = [b\"Basic:\", b\"xxxxxxx\"]\n\n let mut credential = credential.split(|&c| c == b' ');\n\n credential\n\n .next()\n\n .and_then(|c| if c == b\"Basic\" { Some(()) } else { None })\n\n .ok_or(StatusCode::Unauthorized)?;\n\n credential\n", "file_path": "src/middleware/basic_auth.rs", "rank": 95, "score": 5.554761904716571 }, { "content": " Err(_) => panic!(),\n\n }\n\n }\n\n assert_eq!(\n\n Request::builder()\n\n .set_method(Method::Get)\n\n .set_uri(Uri::new(b\"/~/index.html\"))\n\n .set_header(HeaderName::Accept, b\"*/*\".to_vec())\n\n .set_header(HeaderName::Host, b\"localhost:8080\".to_vec())\n\n .set_header(HeaderName::UserAgent, b\"curl\".to_vec())\n\n .build(),\n\n request_buf.complete()\n\n );\n\n }\n\n\n\n #[test]\n\n fn build_request_with_body() {\n\n let data = b\"GET /~/index.html HTTP/1.1\\r\\nContent-Length: 13\\r\\n\\r\\nHello, World!\"\n\n .chunks(64)\n\n .map(|c| c.to_vec())\n", "file_path": "src/request.rs", "rank": 96, "score": 5.429342351342292 }, { "content": " fn from(res: crate::Result<Response>) -> Self {\n\n match res {\n\n Ok(res) => res,\n\n Err(code) => code.into(),\n\n }\n\n }\n\n}\n\n\n\nimpl From<String> for Response {\n\n fn from(s: String) -> Self {\n\n Response::builder()\n\n .set_header(HeaderName::ContentLength, s.len().to_string())\n\n .set_content_type(mime::TEXT_PLAIN)\n\n .set_body(Body::from(s))\n\n .build()\n\n }\n\n}\n\n\n\nimpl<'a> From<&'a str> for Response {\n\n fn from(s: &'a str) -> Self {\n", "file_path": "src/response.rs", "rank": 97, "score": 5.404992000950666 }, { "content": " self.inner.headers.insert(name, value);\n\n Ok(())\n\n }\n\n\n\n fn parse_body(&mut self, bytes: &[u8]) -> crate::Result<()> {\n\n let body_len = std::str::from_utf8(\n\n self.inner\n\n .get_header(HeaderName::ContentLength)\n\n .ok_or(StatusCode::LengthRequired)?,\n\n )\n\n .or(Err(StatusCode::LengthRequired))?\n\n .parse::<usize>()\n\n .or(Err(StatusCode::LengthRequired))?;\n\n let mut p = Parser::new(bytes);\n\n let body = p.parse_body(body_len)?;\n\n self.inner.set_body(body);\n\n Ok(())\n\n }\n\n\n\n pub fn build(self) -> Request {\n", "file_path": "src/request.rs", "rank": 98, "score": 5.330302374271005 }, { "content": "//! qz is a HTTP server library for my learn. All of the specification of HTTP/1.1 is not covered\n\n//! and not performant enough. The name of qz is comes from \"給仕\", which means waiter in Japanese.\n\n//!\n\n//! # Getting Started\n\n//!\n\n//! To build app with this library, first add following to `Cargo.toml`:\n\n//! ```toml\n\n//! qz = \"0.1\"\n\n//! tokio = { version = \"1.5\", features = [\"macros\"] }\n\n//! serde = { version = \"1.0\", features = [\"derive\"] }\n\n//! ```\n\n//!\n\n//! # Examples\n\n//!\n\n//! ```no_run\n\n//! use qz::{method::Method, request::Request, response::Response, server::Server};\n\n//! use std::io;\n\n//!\n\n//! async fn hello(_request: Request, _: ()) -> impl Into<Response> {\n\n//! \"hello\"\n", "file_path": "src/lib.rs", "rank": 99, "score": 5.3212992700622905 } ]
Rust
crates/codec/src/codec/limit.rs
YZITE/futures
9d4300dfaa22d0bc7cb51bf41edd305aa95b2839
#![allow(missing_docs)] use super::{Decoder, Encoder, EncoderError}; use bytes::{Buf, BytesMut}; pub trait SkipAheadHandler: Sized + std::fmt::Debug { fn continue_skipping(self, src: &[u8]) -> Result<(usize, Option<Self>), ()>; } impl SkipAheadHandler for () { fn continue_skipping(self, _: &[u8]) -> Result<(usize, Option<Self>), ()> { Ok((0, None)) } } pub trait DecoderWithSkipAhead: Decoder { type Handler: SkipAheadHandler; fn prepare_skip_ahead(&mut self, src: &mut BytesMut) -> Self::Handler; } #[derive(Debug)] pub struct Limit<C: DecoderWithSkipAhead> { inner: C, max_frame_size: usize, skip_ahead_state: Option<<C as DecoderWithSkipAhead>::Handler>, decoder_defunct: bool, } impl<C> Limit<C> where C: DecoderWithSkipAhead, { pub fn new(inner: C, max_frame_size: usize) -> Self { Self { inner, max_frame_size, skip_ahead_state: None, decoder_defunct: false, } } } #[derive(Debug, thiserror::Error)] pub enum LimitError<E: std::error::Error + 'static> { #[error("frame size limit exceeded (detected at {0} bytes)")] LimitExceeded(usize), #[error("codec couldn't recover from invalid / too big frame")] Defunct, #[error(transparent)] Inner(#[from] E), } impl<C: DecoderWithSkipAhead + EncoderError> EncoderError for Limit<C> { type Error = LimitError<<C as EncoderError>::Error>; } impl<Item, C> Encoder<Item> for Limit<C> where Item: ?Sized, C: Encoder<Item> + DecoderWithSkipAhead, { fn encode(&mut self, src: &Item, dst: &mut BytesMut) -> Result<(), Self::Error> { let mut tmp_dst = dst.split_off(dst.len()); self.inner.encode(src, &mut tmp_dst)?; if tmp_dst.len() > self.max_frame_size { return Err(LimitError::LimitExceeded(tmp_dst.len())); } dst.unsplit(tmp_dst); Ok(()) } } impl<C> Decoder for Limit<C> where C: DecoderWithSkipAhead, { type Item = <C as Decoder>::Item; type Error = LimitError<<C as Decoder>::Error>; fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> { while let Some(sas) = self.skip_ahead_state.take() { match sas.continue_skipping(&src) { Ok((amount, next)) => { self.skip_ahead_state = next; debug_assert!(amount <= src.len()); src.advance(amount); debug_assert!(amount != 0 || self.skip_ahead_state.is_none()); if src.len() == 0 { return Ok(None); } } Err(()) => { self.decoder_defunct = true; } } } if self.decoder_defunct { src.clear(); return Err(LimitError::Defunct); } match self.inner.decode(src) { Ok(None) if src.len() > self.max_frame_size => { self.skip_ahead_state = Some(self.inner.prepare_skip_ahead(src)); Err(LimitError::LimitExceeded(src.len())) } Ok(x) => Ok(x), Err(x) => Err(LimitError::Inner(x)), } } } /* #[cfg(test)] mod tests { use super::*; mod decode { use super::*; #[test] fn x() { } } } */
#![allow(missing_docs)] use super::{Decoder, Encoder, EncoderError}; use bytes::{Buf, BytesMut}; pub trait SkipAheadHandler: Sized + std::fmt::Debug { fn continue_skipping(self, src: &[u8]) -> Result<(usize, Option<Self>), ()>; } impl SkipAheadHandler for () { fn continue_skipping(self, _: &[u8]) -> Result<(usize, Option<Self>), ()> { Ok((0, None)) } } pub trait DecoderWithSkipAhead: Decoder { type Handler: SkipAheadHandler; fn prepare_skip_ahead(&mut self, src: &mut BytesMut) -> Self::Handler; } #[derive(Debug)] pub struct Limit<C: DecoderWithSkipAhead> { inner: C, max_frame_size: usize, skip_ahead_state: Option<<C as DecoderWithSkipAhead>::Handler>, decoder_defunct: bool, } impl<C> Limit<C> where C: DecoderWithSkipAhead, { pub fn new(inner: C, max_frame_size: usize) -> Self { Self { inner, max_frame_size, skip_ahead_state: None, decoder_defunct: false, } } } #[derive(Debug, thiserror::Error)] pub enum LimitError<E: std::error::Error + 'static> { #[error("frame size limit exceeded (detected at {0} bytes)")] LimitExceeded(usize), #[error("codec couldn't recover from invalid / too big frame")] Defunct, #[error(transparent)] Inner(#[from] E), } impl<C: DecoderWithSkipAhead + EncoderError> EncoderError for Limit<C> { type Error = LimitError<<C as EncoderError>::Error>; } impl<Item, C> Encoder<Item> for Limit<C> where Item: ?Sized, C: Encoder<Item> + DecoderWithSkipAhead, { fn encode(&mut self, src: &Item, dst: &mut BytesMut) -> Result<(), Self::Error> { let mut tmp_dst = dst.split_off(dst.len()); self.inner.encode(src, &mut tmp_dst)?; if tmp_dst.len() > self.max_frame_size { return Err(LimitError::LimitExceeded(tmp_dst.len())); } dst.unsplit(tmp_dst); Ok(()) } } impl<C> Decoder for Limit<C> where C: DecoderWithSkipAhead, { type Item = <C as Decoder>::Item; type Error = LimitError<<C as Decoder>::Error>; fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> { while let Some(sas) = self.skip_ahead_state.take() { match sas.continue_skipping(&src) { Ok((amount, next)) => { self.skip_ahead_state = next; debug_assert!(amount <= src.len()); src.advance(amount); debug_assert!(amount != 0 || self.skip_ahead_state.is_none()); if src.len() == 0 { return Ok(None); } } Err(()) => { self.decoder_defunct = true; } } } if self.decoder_defunct { src.clear(); return Err(LimitError::Defunct); }
} } /* #[cfg(test)] mod tests { use super::*; mod decode { use super::*; #[test] fn x() { } } } */
match self.inner.decode(src) { Ok(None) if src.len() > self.max_frame_size => { self.skip_ahead_state = Some(self.inner.prepare_skip_ahead(src)); Err(LimitError::LimitExceeded(src.len())) } Ok(x) => Ok(x), Err(x) => Err(LimitError::Inner(x)), }
if_condition
[ { "content": "/// Encoding of messages as bytes, for use with [`Framed`](crate::Framed).\n\n///\n\n/// `Item` is the type of items consumed by `encode`\n\npub trait Encoder<Item: ?Sized>: EncoderError {\n\n /// Encodes an item into the `BytesMut` provided by dst.\n\n fn encode(&mut self, item: &Item, dst: &mut BytesMut) -> Result<(), Self::Error>;\n\n}\n\n\n\nmacro_rules! impl_phantom {\n\n ($t:ident < $($param:ident),+ >) => {\n\n impl<$($param),+> $t<$($param),+> {\n\n #[allow(missing_docs)]\n\n pub const fn new() -> Self {\n\n Self(PhantomData)\n\n }\n\n }\n\n impl<$($param),+> ::std::clone::Clone for $t<$($param),+> {\n\n fn clone(&self) -> Self { Self::new() }\n\n }\n\n impl<$($param),+> ::std::fmt::Debug for $t<$($param),+> {\n\n fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {\n\n f.debug_struct(stringify!($t)).finish()\n\n }\n", "file_path": "crates/codec/src/codec/mod.rs", "rank": 0, "score": 189573.08832434652 }, { "content": "/// helper trait\n\npub trait EncoderError {\n\n /// The type of encoding errors.\n\n type Error: std::error::Error + 'static;\n\n}\n\n\n", "file_path": "crates/codec/src/codec/mod.rs", "rank": 1, "score": 151093.31774888278 }, { "content": "pub trait SinkExt<Item>: Sink<Item> {\n\n fn send_unpin(&mut self, item: Item) -> Send<'_, Self, Item>\n\n where\n\n Self: Unpin,\n\n {\n\n Send {\n\n sink: Pin::new(self),\n\n item: Some(item),\n\n }\n\n }\n\n\n\n fn send(self: Pin<&mut Self>, item: Item) -> Send<'_, Self, Item> {\n\n Send {\n\n sink: self,\n\n item: Some(item),\n\n }\n\n }\n\n\n\n fn send_all_unpin<'a, St>(&'a mut self, st: &'a mut St) -> SendAll<'a, Self, St, Item>\n\n where\n", "file_path": "crates/util/src/sink.rs", "rank": 4, "score": 124037.95421149576 }, { "content": "#[must_use = \"sinks do nothing unless polled\"]\n\npub trait Sink<Item>: FlushSink {\n\n /// Begin the process of sending a value to the sink.\n\n /// Each call to this function must be preceded by a successful call to\n\n /// `poll_ready` which returned `Poll::Ready(Ok(()))`.\n\n ///\n\n /// As the name suggests, this method only *begins* the process of sending\n\n /// the item. If the sink employs buffering, the item isn't fully processed\n\n /// until the buffer is fully flushed. Since sinks are designed to work with\n\n /// asynchronous I/O, the process of actually writing out the data to an\n\n /// underlying object takes place asynchronously. **You *must* use\n\n /// `poll_flush` or `poll_close` in order to guarantee completion of a\n\n /// send**.\n\n ///\n\n /// Implementations of `poll_ready` and `start_send` will usually involve\n\n /// flushing behind the scenes in order to make room for new messages.\n\n /// It is only necessary to call `poll_flush` if you need to guarantee that\n\n /// *all* of the items placed into the `Sink` have been sent.\n\n ///\n\n /// In most cases, if the sink encounters an error, the sink will\n\n /// permanently be unable to receive items.\n", "file_path": "crates/sink/src/lib.rs", "rank": 5, "score": 116595.06197895165 }, { "content": "/// Decoding of frames via buffers, for use with [`Framed`](crate::Framed).\n\npub trait Decoder {\n\n /// The type of items returned by `decode`\n\n type Item;\n\n /// The type of decoding errors.\n\n type Error: std::error::Error + 'static;\n\n\n\n /// Decode an item from the src `BytesMut` into an item\n\n fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error>;\n\n\n\n /// Called when the input stream reaches EOF, signaling a last attempt to decode\n\n ///\n\n /// # Notes\n\n ///\n\n /// The default implementation of this method invokes the `Decoder::decode` method.\n\n fn decode_eof(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {\n\n self.decode(src)\n\n }\n\n}\n\n\n", "file_path": "crates/codec/src/codec/mod.rs", "rank": 6, "score": 115418.13643525266 }, { "content": "/// invariant: `std::mem::size_of::<Self>()` has the same length as the serialization of `Self`\n\npub trait LengthType {\n\n /// this method should write the given `x` into the destination buffer\n\n fn encode(x: usize, dst: &mut BytesMut) -> Result<(), OverflowError>;\n\n\n\n /// this method should decode the length from the buffer\n\n /// (it shouldn't and can't discard it, tho)\n\n ///\n\n /// pre-condition: `src.len() >= std::mem::size_of::<Self>()`\n\n fn start_decode(src: &[u8]) -> u64;\n\n}\n\n\n\nmacro_rules! impl_length {\n\n ($($x:ty => $y:expr),+ $(,)?) => {\n\n $(\n\n impl LengthType for $x {\n\n fn encode(x: usize, dst: &mut BytesMut) -> Result<(), OverflowError> {\n\n let this = Self::try_from(x).map_err(|_| OverflowError)?;\n\n dst.extend_from_slice(&Self::to_be_bytes(this));\n\n Ok(())\n\n }\n", "file_path": "crates/codec/src/codec/length.rs", "rank": 7, "score": 112362.08917023578 }, { "content": "#[must_use = \"sinks do nothing unless polled\"]\n\npub trait FlushSink {\n\n /// The type of value produced by the sink when an error occurs.\n\n type Error;\n\n\n\n /// Attempts to prepare the `Sink` to receive a value.\n\n ///\n\n /// This method must be called and return `Poll::Ready(Ok(()))` prior to\n\n /// each call to `start_send`.\n\n ///\n\n /// This method returns `Poll::Ready` once the underlying sink is ready to\n\n /// receive data. If this method returns `Poll::Pending`, the current task\n\n /// is registered to be notified (via `cx.waker().wake_by_ref()`) when `poll_ready`\n\n /// should be called again.\n\n ///\n\n /// In most cases, if the sink encounters an error, the sink will\n\n /// permanently be unable to receive items.\n\n fn poll_ready(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>>;\n\n\n\n /// Flush any remaining output from this sink.\n\n ///\n", "file_path": "crates/sink/src/lib.rs", "rank": 8, "score": 91819.85851616166 }, { "content": "#[test]\n\nfn decodes() {\n\n let mut buf = [0u8; 32];\n\n let expected = buf.clone();\n\n let cur = Cursor::new(&mut buf[..]);\n\n let mut framed = Framed::new(cur, BytesCodec {});\n\n\n\n let read = block_on(framed.try_next()).unwrap().unwrap();\n\n assert_eq!(&read[..], &expected[..]);\n\n\n\n assert!(block_on(framed.try_next()).unwrap().is_none());\n\n}\n", "file_path": "crates/codec/tests/bytes.rs", "rank": 9, "score": 91561.73717013781 }, { "content": "#[bench]\n\nfn medium(b: &mut test::Bencher) {\n\n let data = [\n\n [\"a\"; 128].join(\"b\"),\n\n [\"b\"; 128].join(\"c\"),\n\n [\"c\"; 128].join(\"d\"),\n\n ]\n\n .join(\"\\n\");\n\n b.iter(|| {\n\n block_on(async {\n\n let read = Cursor::new(test::black_box(&data));\n\n let mut framed = Framed::new(read, Lines);\n\n\n\n framed.try_next().await.unwrap();\n\n framed.try_next().await.unwrap();\n\n framed.try_next().await.is_ok()\n\n })\n\n })\n\n}\n\n\n", "file_path": "crates/codec/benches/lines.rs", "rank": 10, "score": 66878.6691168389 }, { "content": "#[bench]\n\nfn long(b: &mut test::Bencher) {\n\n let data = [\n\n [\"a\"; 2048].join(\"b\"),\n\n [\"b\"; 2048].join(\"c\"),\n\n [\"c\"; 2048].join(\"d\"),\n\n ]\n\n .join(\"\\n\");\n\n b.iter(|| {\n\n block_on(async {\n\n let read = Cursor::new(test::black_box(&data));\n\n let mut framed = Framed::new(read, Lines);\n\n\n\n framed.try_next().await.unwrap();\n\n framed.try_next().await.unwrap();\n\n framed.try_next().await.is_ok()\n\n })\n\n })\n\n}\n", "file_path": "crates/codec/benches/lines.rs", "rank": 11, "score": 66878.6691168389 }, { "content": "#[bench]\n\nfn short(b: &mut test::Bencher) {\n\n let data = [\n\n [\"a\"; 16].join(\"b\"),\n\n [\"b\"; 16].join(\"c\"),\n\n [\"c\"; 16].join(\"d\"),\n\n ]\n\n .join(\"\\n\");\n\n b.iter(|| {\n\n block_on(async {\n\n let read = Cursor::new(test::black_box(&data));\n\n let mut framed = Framed::new(read, Lines);\n\n\n\n framed.try_next().await.unwrap();\n\n framed.try_next().await.unwrap();\n\n framed.try_next().await.is_ok()\n\n })\n\n })\n\n}\n\n\n", "file_path": "crates/codec/benches/lines.rs", "rank": 12, "score": 66878.6691168389 }, { "content": "struct OneByteAtATime<'a> {\n\n input: &'a [u8],\n\n}\n\nimpl AsyncRead for OneByteAtATime<'_> {\n\n fn poll_read(\n\n mut self: Pin<&mut Self>,\n\n _cx: &mut Context<'_>,\n\n buf: &mut [u8],\n\n ) -> Poll<io::Result<usize>> {\n\n if self.input.is_empty() {\n\n Poll::Ready(Ok(0))\n\n } else {\n\n buf[0] = self.input[0];\n\n self.input = &self.input[1..];\n\n Poll::Ready(Ok(1))\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/codec/tests/read.rs", "rank": 13, "score": 62556.09162561255 }, { "content": "/// A decoder that only returns `a` characters from the input.\n\nstruct AllTheAs;\n\n\n\nimpl Decoder for AllTheAs {\n\n type Item = char;\n\n type Error = io::Error;\n\n\n\n fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {\n\n while !src.is_empty() {\n\n let buf = src.split_to(1);\n\n let c = char::from(buf[0]);\n\n if c == 'a' {\n\n return Ok(Some(c));\n\n }\n\n }\n\n Ok(None)\n\n }\n\n}\n\n\n", "file_path": "crates/codec/tests/read.rs", "rank": 14, "score": 44221.596111349674 }, { "content": "/// framed.send_unpin(\"Hello World!\").await?;\n\n///\n\n/// while let Some(bytes) = framed.try_next().await? {\n\n/// dbg!(bytes);\n\n/// }\n\n/// # Ok::<_, yz_futures_codec::Error<_>>(())\n\n/// # }).unwrap();\n\n/// ```\n\n#[derive(Clone, Debug, Default, PartialEq)]\n\npub struct BytesCodec;\n\n\n\nimpl super::EncoderError for BytesCodec {\n\n type Error = Infallible;\n\n}\n\n\n\nimpl<Item> Encoder<Item> for BytesCodec\n\nwhere\n\n Item: AsRef<[u8]> + ?Sized,\n\n{\n\n fn encode(&mut self, src: &Item, dst: &mut BytesMut) -> Result<(), Self::Error> {\n", "file_path": "crates/codec/src/codec/bytes.rs", "rank": 15, "score": 43286.935963266755 }, { "content": " dst.extend_from_slice(src.as_ref());\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl Decoder for BytesCodec {\n\n type Item = Bytes;\n\n type Error = Infallible;\n\n\n\n fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {\n\n let len = src.len();\n\n Ok(if len > 0 {\n\n Some(src.split_to(len).freeze())\n\n } else {\n\n None\n\n })\n\n }\n\n}\n", "file_path": "crates/codec/src/codec/bytes.rs", "rank": 16, "score": 43283.398153401664 }, { "content": "use super::{Decoder, Encoder};\n\nuse bytes::{Bytes, BytesMut};\n\nuse std::convert::Infallible;\n\n\n\n/// A simple codec that ships bytes around\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// # futures_lite::future::block_on(async move {\n\n/// use bytes::Bytes;\n\n/// use futures_util::{stream::TryStreamExt, io::Cursor};\n\n/// use yz_futures_codec::{codec::BytesCodec, Framed};\n\n/// use yz_futures_util::sink::SinkExt;\n\n///\n\n/// let mut buf = vec![];\n\n/// // Cursor implements AsyncRead and AsyncWrite\n\n/// let cur = Cursor::new(&mut buf);\n\n/// let mut framed = Framed::new(cur, BytesCodec);\n\n///\n", "file_path": "crates/codec/src/codec/bytes.rs", "rank": 17, "score": 43267.19764908204 }, { "content": "// An AsyncWrite which is always ready and just consumes the data\n\nstruct AsyncWriteNull {\n\n // number of poll_write calls\n\n pub num_poll_write: usize,\n\n\n\n // size of the last poll_write\n\n pub last_write_size: usize,\n\n}\n\nimpl AsyncWrite for AsyncWriteNull {\n\n fn poll_write(\n\n mut self: Pin<&mut Self>,\n\n _cx: &mut Context<'_>,\n\n buf: &[u8],\n\n ) -> Poll<std::io::Result<usize>> {\n\n self.num_poll_write += 1;\n\n self.last_write_size = buf.len();\n\n Poll::Ready(Ok(buf.len()))\n\n }\n\n\n\n fn poll_flush(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<std::io::Result<()>> {\n\n Poll::Ready(Ok(()))\n\n }\n\n\n\n fn poll_close(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<std::io::Result<()>> {\n\n Poll::Ready(Ok(()))\n\n }\n\n}\n\n\n", "file_path": "crates/codec/tests/write.rs", "rank": 25, "score": 41207.940068526084 }, { "content": "// Sends two lines at once, then nothing else forever\n\nstruct MockBurstySender {\n\n sent: bool,\n\n}\n\nimpl AsyncRead for MockBurstySender {\n\n fn poll_read(\n\n mut self: Pin<&mut Self>,\n\n _cx: &mut Context<'_>,\n\n buf: &mut [u8],\n\n ) -> Poll<io::Result<usize>> {\n\n const MESSAGES: &'static [u8] = b\"one\\ntwo\\n\";\n\n if !self.sent && buf.len() >= MESSAGES.len() {\n\n self.sent = true;\n\n buf[0..MESSAGES.len()].clone_from_slice(MESSAGES);\n\n Poll::Ready(Ok(MESSAGES.len()))\n\n } else {\n\n Poll::Pending\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/codec/tests/read.rs", "rank": 26, "score": 41207.940068526084 }, { "content": "#[test]\n\nfn it_works() {\n\n let buf = \"Hello\\nWorld\\nError\".to_owned();\n\n let cur = Cursor::new(buf);\n\n\n\n let mut framed = Framed::new(cur, Lines {});\n\n let next = block_on(framed.try_next()).unwrap().unwrap();\n\n assert_eq!(next, \"Hello\\n\");\n\n let next = block_on(framed.try_next()).unwrap().unwrap();\n\n assert_eq!(next, \"World\\n\");\n\n\n\n assert!(block_on(framed.try_next()).is_err());\n\n}\n", "file_path": "crates/codec/tests/lines.rs", "rank": 27, "score": 40687.51924968502 }, { "content": "#[test]\n\nfn line_write() {\n\n let curs = Cursor::new(vec![0u8; 16]);\n\n let mut framer = Framed::new(curs, Lines {});\n\n block_on(framer.send_unpin(\"Hello\\n\")).unwrap();\n\n block_on(framer.send_unpin(\"World\\n\")).unwrap();\n\n let (curs, _) = framer.release();\n\n assert_eq!(&curs.get_ref()[0..12], b\"Hello\\nWorld\\n\");\n\n assert_eq!(curs.position(), 12);\n\n}\n\n\n", "file_path": "crates/codec/tests/write.rs", "rank": 28, "score": 39703.56026593591 }, { "content": "#[test]\n\nfn read_few_messages() {\n\n let string: &[u8] = b\"aabbbabbbabbbabb\";\n\n let input = OneByteAtATime { input: string };\n\n let mut framed = Framed::new(input, AllTheAs);\n\n for _ in 0..5 {\n\n let item = block_on(framed.next()).unwrap().unwrap();\n\n assert_eq!(item, 'a');\n\n }\n\n}\n", "file_path": "crates/codec/tests/read.rs", "rank": 29, "score": 39703.56026593591 }, { "content": "#[test]\n\nfn line_write_to_eof() {\n\n let mut buf = [0u8; 16];\n\n let curs = Cursor::new(&mut buf[..]);\n\n let mut framer = Framed::new(curs, Lines {});\n\n let _err = block_on(framer.send_unpin(\"This will fill up the buffer\\n\")).unwrap_err();\n\n let (curs, _) = framer.release();\n\n assert_eq!(curs.position(), 16);\n\n assert_eq!(&curs.get_ref()[0..16], b\"This will fill u\");\n\n}\n\n\n", "file_path": "crates/codec/tests/write.rs", "rank": 30, "score": 38793.909366136664 }, { "content": "#[test]\n\nfn line_read_multi() {\n\n let io = MockBurstySender { sent: false };\n\n let mut framed = Framed::new(io, Lines {});\n\n let one = block_on(framed.next()).unwrap().unwrap();\n\n assert_eq!(one, \"one\\n\");\n\n let two = block_on(framed.next()).unwrap().unwrap();\n\n assert_eq!(two, \"two\\n\");\n\n}\n\n\n", "file_path": "crates/codec/tests/read.rs", "rank": 31, "score": 38793.909366136664 }, { "content": "#[test]\n\nfn send_high_water_mark() {\n\n // stream will output 999 bytes, 1 at at a time, and will always be ready\n\n let mut stream = stream::iter((0..999).map(|_| b\"\\0\").map(Ok));\n\n\n\n // sink will eat whatever it receives\n\n let io = AsyncWriteNull {\n\n num_poll_write: 0,\n\n last_write_size: 0,\n\n };\n\n\n\n // expect two sends\n\n let mut framer = Framed::new(io, BytesCodec {});\n\n framer.w_high_water_mark = 500;\n\n block_on(framer.send_all_unpin(&mut stream)).unwrap();\n\n let (io, _) = framer.release();\n\n assert_eq!(io.num_poll_write, 2);\n\n assert_eq!(io.last_write_size, 499);\n\n}\n", "file_path": "crates/codec/tests/write.rs", "rank": 32, "score": 37950.45526700933 }, { "content": "#[test]\n\nfn same_msgs_are_received_as_were_sent() {\n\n let cur = Cursor::new(vec![0; 256]);\n\n let mut framed = Framed::new(cur, Length::<u64>::new());\n\n\n\n let send_msgs = async {\n\n framed.send_unpin(\"msg1\").await.unwrap();\n\n framed.send_unpin(\"msg2\").await.unwrap();\n\n framed.send_unpin(\"msg3\").await.unwrap();\n\n };\n\n block_on(send_msgs);\n\n\n\n let (mut cur, _) = framed.release();\n\n cur.set_position(0);\n\n let framed = Framed::new(cur, Length::<u64>::new());\n\n\n\n let recv_msgs = framed\n\n .take(3)\n\n .map(|res| res.unwrap())\n\n .map(|buf| String::from_utf8(buf.to_vec()).unwrap())\n\n .collect::<Vec<_>>();\n\n let msgs: Vec<String> = block_on(recv_msgs);\n\n\n\n assert!(msgs == vec![\"msg1\", \"msg2\", \"msg3\"]);\n\n}\n", "file_path": "crates/codec/tests/length_delimited.rs", "rank": 33, "score": 37950.45526700933 }, { "content": "use futures_lite::future::block_on;\n\nuse futures_util::{io::Cursor, stream::TryStreamExt};\n\nuse yz_futures_codec::{codec::BytesCodec, Framed};\n\n\n\n#[test]\n", "file_path": "crates/codec/tests/bytes.rs", "rank": 34, "score": 26442.47327216077 }, { "content": "\n\n /// Returns a reference to the read buffer.\n\n pub fn read_buffer(&self) -> &BytesMut {\n\n &self.r_buffer\n\n }\n\n}\n\n\n\nimpl<T: AsyncRead, U: Decoder> Stream for Framed<T, U> {\n\n type Item = Result<U::Item, Error<U::Error>>;\n\n\n\n fn poll_next(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {\n\n let mut this = self.project();\n\n let mut buf = [0u8; INITIAL_CAPACITY];\n\n let mut ended = false;\n\n\n\n loop {\n\n match this\n\n .codec\n\n .decode(&mut this.r_buffer)\n\n .map_err(Error::Codec)?\n", "file_path": "crates/codec/src/lib.rs", "rank": 35, "score": 18689.004781111733 }, { "content": " }\n\n}\n\n\n\nimpl<'a, Item, T, U> Sink<&'a Item> for Framed<T, U>\n\nwhere\n\n Item: ?Sized,\n\n T: AsyncWrite,\n\n U: Encoder<Item>,\n\n{\n\n fn start_send(self: Pin<&mut Self>, item: &'a Item) -> Result<(), Self::Error> {\n\n let this = self.project();\n\n this.codec.encode(item, this.w_buffer).map_err(Error::Codec)\n\n }\n\n}\n", "file_path": "crates/codec/src/lib.rs", "rank": 36, "score": 18683.43247896248 }, { "content": " {\n\n Some(item) => return Poll::Ready(Some(Ok(item))),\n\n None if ended => {\n\n return if this.r_buffer.is_empty() {\n\n Poll::Ready(None)\n\n } else {\n\n match this\n\n .codec\n\n .decode_eof(&mut this.r_buffer)\n\n .map_err(Error::Codec)?\n\n {\n\n Some(item) => Poll::Ready(Some(Ok(item))),\n\n None if this.r_buffer.is_empty() => Poll::Ready(None),\n\n None => Poll::Ready(Some(Err(io::Error::new(\n\n io::ErrorKind::UnexpectedEof,\n\n \"bytes remaining in stream\",\n\n )\n\n .into()))),\n\n }\n\n };\n", "file_path": "crates/codec/src/lib.rs", "rank": 37, "score": 18681.244738097084 }, { "content": " /// An error which originated in the codec\n\n #[error(\"codec error: {0}\")]\n\n Codec(#[source] C),\n\n\n\n /// An error which originated in the underlying I/O object\n\n #[error(\"I/O error: {0}\")]\n\n Io(#[from] std::io::Error),\n\n}\n\n\n\n/// Codecs\n\npub mod codec;\n\nuse codec::{Decoder, Encoder, EncoderError};\n\n\n\n/// A unified `Stream` and `Sink` interface to an underlying I/O object,\n\n/// using the `Encoder` and `Decoder` traits to encode and decode frames.\n\n///\n\n/// # Example\n\n/// ```\n\n/// use bytes::Bytes;\n\n/// use futures_util::{stream::TryStreamExt, io::Cursor};\n", "file_path": "crates/codec/src/lib.rs", "rank": 38, "score": 18681.2348347397 }, { "content": "impl<T, U> Framed<T, U> {\n\n /// Creates a new `Framed` transport with the given codec.\n\n /// A codec is a type which implements `Decoder` and `Encoder`.\n\n pub fn new(inner: T, codec: U) -> Self {\n\n Self {\n\n inner,\n\n codec,\n\n\n\n w_buffer: BytesMut::with_capacity(INITIAL_CAPACITY),\n\n\n\n // 2^17 bytes, which is slightly over 60% of the default\n\n // TCP send buffer size (SO_SNDBUF)\n\n w_high_water_mark: 131072,\n\n\n\n r_buffer: BytesMut::with_capacity(INITIAL_CAPACITY),\n\n }\n\n }\n\n\n\n /// Release the I/O and Codec\n\n pub fn release(self) -> (T, U) {\n", "file_path": "crates/codec/src/lib.rs", "rank": 39, "score": 18680.741911329267 }, { "content": " Si: ?Sized + Sink<O>,\n\n St: ?Sized + Stream<Item = Result<O, <Si as FlushSink>::Error>>,\n\n{\n\n type Output = Result<(), <Si as FlushSink>::Error>;\n\n\n\n fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n let this = Pin::into_inner(self);\n\n\n\n // If we've got an item buffered already, we need to write it to the\n\n // sink before we can do anything else\n\n if let Some(item) = this.buffered.take() {\n\n ready!(this.try_start_send(cx, item))?\n\n }\n\n\n\n while let Some(x) = &mut this.stream {\n\n match x.as_mut().poll_next(cx)? {\n\n Poll::Ready(Some(item)) => ready!(this.try_start_send(cx, item))?,\n\n Poll::Ready(None) => {\n\n this.stream = None;\n\n ready!(this.sink.as_mut().poll_flush(cx))?;\n", "file_path": "crates/util/src/sink.rs", "rank": 40, "score": 18680.581154130323 }, { "content": "}\n\n\n\nimpl<T, U> FlushSink for Framed<T, U>\n\nwhere\n\n T: AsyncWrite,\n\n U: EncoderError,\n\n{\n\n type Error = Error<U::Error>;\n\n\n\n fn poll_ready(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n let high_water_mark = self.w_high_water_mark - 1;\n\n self.poll_flush_until(cx, high_water_mark)\n\n .map_err(Into::into)\n\n }\n\n fn poll_flush(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n self.poll_flush_until(cx, 0).map_err(Into::into)\n\n }\n\n fn poll_close(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n ready!(self.as_mut().poll_flush(cx))?;\n\n self.project().inner.poll_close(cx).map_err(Into::into)\n", "file_path": "crates/codec/src/lib.rs", "rank": 41, "score": 18679.062571615574 }, { "content": " fn start_send(mut self: Pin<&mut Self>, item: T) -> Result<(), Self::Error> {\n\n self.push_back(item);\n\n Ok(())\n\n }\n\n }\n\n\n\n impl<S: ?Sized + FlushSink + Unpin> FlushSink for alloc::boxed::Box<S> {\n\n type Error = S::Error;\n\n\n\n fn poll_ready(\n\n mut self: Pin<&mut Self>,\n\n cx: &mut Context<'_>,\n\n ) -> Poll<Result<(), Self::Error>> {\n\n Pin::new(&mut **self).poll_ready(cx)\n\n }\n\n\n\n fn poll_flush(\n\n mut self: Pin<&mut Self>,\n\n cx: &mut Context<'_>,\n\n ) -> Poll<Result<(), Self::Error>> {\n", "file_path": "crates/sink/src/lib.rs", "rank": 42, "score": 18678.983854414873 }, { "content": " impl<T: Unpin> FlushSink for alloc::vec::Vec<T> {\n\n type Error = Never;\n\n\n\n fn poll_ready(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n Poll::Ready(Ok(()))\n\n }\n\n\n\n fn poll_flush(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n Poll::Ready(Ok(()))\n\n }\n\n\n\n fn poll_close(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n Poll::Ready(Ok(()))\n\n }\n\n }\n\n\n\n impl<T: Unpin> Sink<T> for alloc::vec::Vec<T> {\n\n fn start_send(mut self: Pin<&mut Self>, item: T) -> Result<(), Self::Error> {\n\n self.push(item);\n\n Ok(())\n", "file_path": "crates/sink/src/lib.rs", "rank": 43, "score": 18678.92948015577 }, { "content": " fn start_send(self: Pin<&mut Self>, item: Item) -> Result<(), Self::Error>;\n\n}\n\n\n\nimpl<S: ?Sized + FlushSink + Unpin> FlushSink for &mut S {\n\n type Error = S::Error;\n\n\n\n fn poll_ready(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n Pin::new(&mut **self).poll_ready(cx)\n\n }\n\n\n\n fn poll_flush(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n Pin::new(&mut **self).poll_flush(cx)\n\n }\n\n\n\n fn poll_close(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n Pin::new(&mut **self).poll_close(cx)\n\n }\n\n}\n\n\n\nimpl<S: ?Sized + Sink<Item> + Unpin, Item> Sink<Item> for &mut S {\n", "file_path": "crates/sink/src/lib.rs", "rank": 44, "score": 18678.660706280276 }, { "content": " }\n\n _ => {\n\n let n = ready!(this.inner.as_mut().poll_read(cx, &mut buf))?;\n\n this.r_buffer.extend_from_slice(&buf[..n]);\n\n ended = n == 0;\n\n continue;\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl<T: AsyncWrite, U> Framed<T, U> {\n\n fn poll_flush_until(\n\n self: Pin<&mut Self>,\n\n cx: &mut Context<'_>,\n\n limit: usize,\n\n ) -> Poll<Result<(), io::Error>> {\n\n let mut this = self.project();\n\n let orig_len = this.w_buffer.len();\n", "file_path": "crates/codec/src/lib.rs", "rank": 45, "score": 18678.06258526518 }, { "content": " fn poll_close(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n self.get_mut().as_mut().poll_close(cx)\n\n }\n\n}\n\n\n\nimpl<P, Item> Sink<Item> for Pin<P>\n\nwhere\n\n P: DerefMut + Unpin,\n\n P::Target: Sink<Item>,\n\n{\n\n fn start_send(self: Pin<&mut Self>, item: Item) -> Result<(), Self::Error> {\n\n self.get_mut().as_mut().start_send(item)\n\n }\n\n}\n\n\n\n#[cfg(feature = \"alloc\")]\n\nmod if_alloc {\n\n use super::*;\n\n use core::convert::Infallible as Never;\n\n\n", "file_path": "crates/sink/src/lib.rs", "rank": 46, "score": 18677.504425908348 }, { "content": "use core::future::Future;\n\nuse core::marker::Unpin;\n\nuse core::pin::Pin;\n\nuse core::task::{Context, Poll};\n\nuse futures_core::{ready, Stream};\n\n#[doc(no_inline)]\n\npub use yz_futures_sink::{FlushSink, Sink};\n\n\n\n#[derive(Debug)]\n\n#[must_use = \"futures do nothing unless you `.await` or poll them\"]\n\npub struct Send<'a, Si: ?Sized, Item> {\n\n sink: Pin<&'a mut Si>,\n\n item: Option<Item>,\n\n}\n\n\n\nimpl<Si: ?Sized, Item> Unpin for Send<'_, Si, Item> {}\n\n\n\nimpl<Si: Sink<Item> + ?Sized, Item> Future for Send<'_, Si, Item> {\n\n type Output = Result<(), Si::Error>;\n\n\n", "file_path": "crates/util/src/sink.rs", "rank": 47, "score": 18677.47419845493 }, { "content": " Pin::new(&mut **self).poll_flush(cx)\n\n }\n\n\n\n fn poll_close(\n\n mut self: Pin<&mut Self>,\n\n cx: &mut Context<'_>,\n\n ) -> Poll<Result<(), Self::Error>> {\n\n Pin::new(&mut **self).poll_close(cx)\n\n }\n\n }\n\n\n\n impl<S: ?Sized + Sink<Item> + Unpin, Item> Sink<Item> for alloc::boxed::Box<S> {\n\n fn start_send(mut self: Pin<&mut Self>, item: Item) -> Result<(), Self::Error> {\n\n Pin::new(&mut **self).start_send(item)\n\n }\n\n }\n\n}\n", "file_path": "crates/sink/src/lib.rs", "rank": 48, "score": 18677.405966135448 }, { "content": " Self: Unpin,\n\n St: Stream<Item = Result<Item, Self::Error>> + Unpin,\n\n {\n\n SendAll {\n\n sink: Pin::new(self),\n\n stream: Some(Pin::new(st)),\n\n buffered: None,\n\n }\n\n }\n\n\n\n fn send_all<'a, St>(self: Pin<&'a mut Self>, st: Pin<&'a mut St>) -> SendAll<'a, Self, St, Item>\n\n where\n\n Self: Unpin,\n\n St: Stream<Item = Result<Item, Self::Error>> + Unpin,\n\n {\n\n SendAll {\n\n sink: self,\n\n stream: Some(st),\n\n buffered: None,\n\n }\n\n }\n\n}\n\n\n\nimpl<Item, Si: Sink<Item>> SinkExt<Item> for Si {}\n", "file_path": "crates/util/src/sink.rs", "rank": 49, "score": 18677.38567947168 }, { "content": "//! # Ok::<_, Error<_>>(())\n\n//! # }).unwrap();\n\n//! ```\n\n\n\n#![forbid(unsafe_code)]\n\n#![deny(missing_docs)]\n\n#![warn(missing_debug_implementations, rust_2018_idioms)]\n\n#![warn(clippy::all)]\n\n\n\nuse bytes::Buf;\n\npub use bytes::{Bytes, BytesMut};\n\nuse futures_core::{ready, Stream};\n\nuse futures_io::{AsyncRead, AsyncWrite};\n\nuse std::task::{Context, Poll};\n\nuse std::{io, ops::Deref, pin::Pin};\n\nuse yz_futures_sink::{FlushSink, Sink};\n\n\n\n/// The generic error enum for this crate.\n\n#[derive(Debug, thiserror::Error)]\n\npub enum Error<C: std::error::Error + 'static> {\n", "file_path": "crates/codec/src/lib.rs", "rank": 50, "score": 18677.125899296094 }, { "content": " St: ?Sized + Stream<Item = Result<O, <Si as FlushSink>::Error>>,\n\n{\n\n fn try_start_send(\n\n &mut self,\n\n cx: &mut Context<'_>,\n\n item: O,\n\n ) -> Poll<Result<(), <Si as FlushSink>::Error>> {\n\n debug_assert!(self.buffered.is_none());\n\n match self.sink.as_mut().poll_ready(cx)? {\n\n Poll::Ready(()) => Poll::Ready(self.sink.as_mut().start_send(item)),\n\n Poll::Pending => {\n\n self.buffered = Some(item);\n\n Poll::Pending\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl<Si, St, O> Future for SendAll<'_, Si, St, O>\n\nwhere\n", "file_path": "crates/util/src/sink.rs", "rank": 51, "score": 18676.76813792183 }, { "content": " }\n\n }\n\n\n\n impl<T: Unpin> FlushSink for alloc::collections::VecDeque<T> {\n\n type Error = Never;\n\n\n\n fn poll_ready(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n Poll::Ready(Ok(()))\n\n }\n\n\n\n fn poll_flush(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n Poll::Ready(Ok(()))\n\n }\n\n\n\n fn poll_close(self: Pin<&mut Self>, _: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n Poll::Ready(Ok(()))\n\n }\n\n }\n\n\n\n impl<T: Unpin> Sink<T> for alloc::collections::VecDeque<T> {\n", "file_path": "crates/sink/src/lib.rs", "rank": 52, "score": 18676.482584513553 }, { "content": "pub struct Framed<T, U> {\n\n #[pin]\n\n inner: T,\n\n\n\n /// the codec used to encode and decode frames\n\n pub codec: U,\n\n\n\n // write\n\n w_buffer: BytesMut,\n\n /// The high-water mark for writes, in bytes\n\n ///\n\n /// The send *high-water mark* prevents the `Sink` part\n\n /// from accepting additional messages to send when its\n\n /// buffer exceeds this length, in bytes. Attempts to enqueue\n\n /// additional messages will be deferred until progress is\n\n /// made on the underlying `AsyncWrite`. This applies\n\n /// back-pressure on fast senders and prevents unbounded\n\n /// buffer growth.\n\n ///\n\n /// The default high-water mark is 2^17 bytes. Applications\n", "file_path": "crates/codec/src/lib.rs", "rank": 53, "score": 18676.270906657843 }, { "content": " /// which desire low latency may wish to reduce this value.\n\n /// There is little point to increasing this value beyond\n\n /// your socket's `SO_SNDBUF` size. On linux, this defaults\n\n /// to 212992 bytes but is user-adjustable.\n\n pub w_high_water_mark: usize,\n\n\n\n // read\n\n r_buffer: BytesMut,\n\n}\n\n\n\nimpl<T, U> Deref for Framed<T, U> {\n\n type Target = T;\n\n\n\n fn deref(&self) -> &T {\n\n &self.inner\n\n }\n\n}\n\n\n\nconst INITIAL_CAPACITY: usize = 8 * 1024;\n\n\n", "file_path": "crates/codec/src/lib.rs", "rank": 54, "score": 18676.09433778293 }, { "content": " fn start_send(mut self: Pin<&mut Self>, item: Item) -> Result<(), Self::Error> {\n\n Pin::new(&mut **self).start_send(item)\n\n }\n\n}\n\n\n\nimpl<P> FlushSink for Pin<P>\n\nwhere\n\n P: DerefMut + Unpin,\n\n P::Target: FlushSink,\n\n{\n\n type Error = <P::Target as FlushSink>::Error;\n\n\n\n fn poll_ready(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n self.get_mut().as_mut().poll_ready(cx)\n\n }\n\n\n\n fn poll_flush(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {\n\n self.get_mut().as_mut().poll_flush(cx)\n\n }\n\n\n", "file_path": "crates/sink/src/lib.rs", "rank": 55, "score": 18675.691938827906 }, { "content": "pub struct SendAll<'a, Si, St, O>\n\nwhere\n\n Si: ?Sized + FlushSink,\n\n St: ?Sized + Stream<Item = Result<O, <Si as FlushSink>::Error>>,\n\n{\n\n sink: Pin<&'a mut Si>,\n\n stream: Option<Pin<&'a mut St>>,\n\n buffered: Option<O>,\n\n}\n\n\n\nimpl<Si, St, O> Unpin for SendAll<'_, Si, St, O>\n\nwhere\n\n Si: ?Sized + FlushSink,\n\n St: ?Sized + Stream<Item = Result<O, <Si as FlushSink>::Error>>,\n\n{\n\n}\n\n\n\nimpl<Si, St, O> SendAll<'_, Si, St, O>\n\nwhere\n\n Si: ?Sized + Sink<O>,\n", "file_path": "crates/util/src/sink.rs", "rank": 56, "score": 18675.404251942236 }, { "content": "\n\n while this.w_buffer.len() > limit {\n\n let num_write = ready!(this.inner.as_mut().poll_write(cx, &this.w_buffer))?;\n\n\n\n if num_write == 0 {\n\n return Poll::Ready(Err(io::Error::new(\n\n io::ErrorKind::UnexpectedEof,\n\n \"FramedWrite: end of input\",\n\n )));\n\n }\n\n\n\n this.w_buffer.advance(num_write);\n\n }\n\n\n\n if orig_len != this.w_buffer.len() {\n\n this.inner.poll_flush(cx)\n\n } else {\n\n Poll::Ready(Ok(()))\n\n }\n\n }\n", "file_path": "crates/codec/src/lib.rs", "rank": 57, "score": 18674.451396868793 }, { "content": " /// Returns `Poll::Ready(Ok(()))` when no buffered items remain. If this\n\n /// value is returned then it is guaranteed that all previous values sent\n\n /// via `start_send` have been flushed.\n\n ///\n\n /// Returns `Poll::Pending` if there is more work left to do, in which\n\n /// case the current task is scheduled (via `cx.waker().wake_by_ref()`) to wake up when\n\n /// `poll_flush` should be called again.\n\n ///\n\n /// In most cases, if the sink encounters an error, the sink will\n\n /// permanently be unable to receive items.\n\n fn poll_flush(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>>;\n\n\n\n /// Flush any remaining output and close this sink, if necessary.\n\n ///\n\n /// Returns `Poll::Ready(Ok(()))` when no buffered items remain and the sink\n\n /// has been successfully closed.\n\n ///\n\n /// Returns `Poll::Pending` if there is more work left to do, in which\n\n /// case the current task is scheduled (via `cx.waker().wake_by_ref()`) to wake up when\n\n /// `poll_close` should be called again.\n", "file_path": "crates/sink/src/lib.rs", "rank": 58, "score": 18673.809350657786 }, { "content": "/// use yz_futures_codec::{codec::BytesCodec, Framed, Error};\n\n/// use yz_futures_util::sink::SinkExt;\n\n///\n\n/// # futures_lite::future::block_on(async move {\n\n/// let cur = Cursor::new(vec![0u8; 12]);\n\n/// let mut framed = Framed::new(cur, BytesCodec {});\n\n///\n\n/// // Send bytes to `buf` through the `BytesCodec`\n\n/// framed.send_unpin(\"Hello world!\").await?;\n\n///\n\n/// // Release the I/O and codec\n\n/// let (cur, _) = framed.release();\n\n/// assert_eq!(cur.get_ref(), b\"Hello world!\");\n\n/// # Ok::<_, Error<_>>(())\n\n/// # }).unwrap();\n\n/// ```\n\n// NOTE(zserik): yes, I tried pin-project-lite,\n\n// but it doesn't support structs with field docs.\n\n#[pin_project::pin_project]\n\n#[derive(Debug)]\n", "file_path": "crates/codec/src/lib.rs", "rank": 59, "score": 18673.66640467655 }, { "content": " fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {\n\n let this = Pin::into_inner(self);\n\n if let Some(item) = this.item.take() {\n\n match this.sink.as_mut().poll_ready(cx)? {\n\n Poll::Ready(()) => this.sink.as_mut().start_send(item)?,\n\n Poll::Pending => {\n\n this.item = Some(item);\n\n return Poll::Pending;\n\n }\n\n }\n\n }\n\n\n\n // we're done sending the item, but want to block on flushing the\n\n // sink\n\n this.sink.as_mut().poll_flush(cx)\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\n#[must_use = \"futures do nothing unless you `.await` or poll them\"]\n", "file_path": "crates/util/src/sink.rs", "rank": 60, "score": 18673.563394430574 }, { "content": "// Copyright (c) 2020 Erik Zscheile\n\n// Copyright (c) 2019 Matt Hunzinger\n\n\n\n//! Utilities for encoding and decoding frames using `async/await`.\n\n//!\n\n//! Contains adapters to go from streams of bytes, [`AsyncRead`](futures_io::AsyncRead)\n\n//! and [`AsyncWrite`](futures_io::AsyncWrite), to framed streams implementing [`Sink`](yz_futures_sink::Sink) and [`Stream`](futures_core::Stream).\n\n//! Framed streams are also known as `transports`.\n\n//!\n\n//! ```\n\n//! # futures_lite::future::block_on(async move {\n\n//! use futures_util::{TryStreamExt, io::Cursor};\n\n//! use yz_futures_codec::{codec::Lines, Framed, Error};\n\n//!\n\n//! let io = Cursor::new(Vec::new());\n\n//! let mut framed = Framed::new(io, Lines);\n\n//!\n\n//! while let Some(line) = framed.try_next().await? {\n\n//! dbg!(line);\n\n//! }\n", "file_path": "crates/codec/src/lib.rs", "rank": 61, "score": 18673.037579530777 }, { "content": " (self.inner, self.codec)\n\n }\n\n\n\n /// Consumes the `Framed`, returning its underlying I/O stream.\n\n ///\n\n /// Note that care should be taken to not tamper with the underlying stream\n\n /// of data coming in as it may corrupt the stream of frames otherwise\n\n /// being worked with.\n\n pub fn into_inner(self) -> T {\n\n self.release().0\n\n }\n\n\n\n /// Returns a mutable reference to the underlying I/O stream.\n\n ///\n\n /// Note that care should be taken to not tamper with the underlying stream\n\n /// of data coming in as it may corrupt the stream of frames otherwise\n\n /// being worked with.\n\n pub fn inner_mut(&mut self) -> &mut T {\n\n &mut self.inner\n\n }\n", "file_path": "crates/codec/src/lib.rs", "rank": 62, "score": 18672.96302596398 }, { "content": "//! Asynchronous sinks\n\n//!\n\n//! This crate contains the `Sink` trait which allows values to be sent\n\n//! asynchronously.\n\n\n\n#![no_std]\n\n#![forbid(unsafe_code)]\n\n#![warn(\n\n missing_docs,\n\n missing_debug_implementations,\n\n rust_2018_idioms,\n\n unreachable_pub,\n\n single_use_lifetimes\n\n)]\n\n#![warn(clippy::all)]\n\n#![doc(test(attr(deny(warnings), allow(dead_code, unused_assignments, unused_variables))))]\n\n\n\n#[cfg(feature = \"alloc\")]\n\nextern crate alloc;\n\n\n\nuse core::ops::DerefMut;\n\nuse core::pin::Pin;\n\nuse core::task::{Context, Poll};\n\n\n\n/// This is a helper trait which contains all methods which don't depend on the\n\n/// item type.\n\n#[must_use = \"sinks do nothing unless polled\"]\n", "file_path": "crates/sink/src/lib.rs", "rank": 63, "score": 18669.48797206497 }, { "content": " ///\n\n /// If this function encounters an error, the sink should be considered to\n\n /// have failed permanently, and no more `Sink` methods should be called.\n\n fn poll_close(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>>;\n\n}\n\n\n\n/// A `Sink` is a value into which other values can be sent, asynchronously.\n\n///\n\n/// Basic examples of sinks include the sending side of:\n\n///\n\n/// - Channels\n\n/// - Sockets\n\n/// - Pipes\n\n///\n\n/// In addition to such \"primitive\" sinks, it's typical to layer additional\n\n/// functionality, such as buffering, on top of an existing sink.\n\n///\n\n/// Sending to a sink is \"asynchronous\" in the sense that the value may not be\n\n/// sent in its entirety immediately. Instead, values are sent in a two-phase\n\n/// way: first by initiating a send, and then by polling for completion. This\n", "file_path": "crates/sink/src/lib.rs", "rank": 64, "score": 18667.666168077445 }, { "content": "#![no_std]\n\n#![forbid(unsafe_code)]\n\n\n\n#[doc(no_inline)]\n\npub use futures_core::ready;\n\n\n\npub mod stream {\n\n #[doc(no_inline)]\n\n pub use futures_core::Stream;\n\n}\n\n\n\npub mod sink;\n", "file_path": "crates/util/src/lib.rs", "rank": 65, "score": 18666.7193267163 }, { "content": " return Poll::Ready(Ok(()));\n\n }\n\n Poll::Pending => {\n\n ready!(this.sink.as_mut().poll_flush(cx))?;\n\n return Poll::Pending;\n\n }\n\n }\n\n }\n\n Poll::Ready(Ok(()))\n\n }\n\n}\n\n\n", "file_path": "crates/util/src/sink.rs", "rank": 66, "score": 18665.596714676645 }, { "content": "/// two-phase setup is analogous to buffered writing in synchronous code, where\n\n/// writes often succeed immediately, but internally are buffered and are\n\n/// *actually* written only upon flushing.\n\n///\n\n/// In addition, the `Sink` may be *full*, in which case it is not even possible\n\n/// to start the sending process.\n\n///\n\n/// As with `Future` and `Stream`, the `Sink` trait is built from a few core\n\n/// required methods, and a host of default methods for working in a\n\n/// higher-level way. The `Sink::send_all` combinator is of particular\n\n/// importance: you can use it to send an entire stream to a sink, which is\n\n/// the simplest way to ultimately consume a stream.\n", "file_path": "crates/sink/src/lib.rs", "rank": 67, "score": 18663.42145983632 }, { "content": " L: LengthType,\n\n{\n\n fn encode(&mut self, src: &Item, dst: &mut BytesMut) -> Result<(), Self::Error> {\n\n let src = src.as_ref();\n\n dst.reserve(Self::HEADER_LEN + src.len());\n\n L::encode(src.len(), dst)?;\n\n dst.extend_from_slice(src);\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl<L: LengthType> Decoder for Length<L> {\n\n type Item = Bytes;\n\n type Error = OverflowError;\n\n\n\n fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {\n\n Ok(if src.len() < std::mem::size_of::<L>() {\n\n None\n\n } else {\n\n let len = usize::try_from(L::start_decode(&src)).map_err(|_| OverflowError)?;\n", "file_path": "crates/codec/src/codec/length.rs", "rank": 68, "score": 17935.79202168373 }, { "content": "/// Ok(())\n\n/// }\n\n/// }\n\n///\n\n/// impl Decoder for MyStringCodec {\n\n/// type Item = String;\n\n/// type Error = MyError;\n\n///\n\n/// fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {\n\n/// match self.0.decode(src)? {\n\n/// Some(bytes) => {\n\n/// match String::from_utf8(bytes.to_vec()) {\n\n/// Ok(string) => Ok(Some(string)),\n\n/// Err(e) => Err(MyError::StringDecode(e))\n\n/// }\n\n/// },\n\n/// None => Ok(None),\n\n/// }\n\n/// }\n\n/// }\n", "file_path": "crates/codec/src/codec/length.rs", "rank": 69, "score": 17932.431463168436 }, { "content": "#[derive(Clone, Debug, Default, PartialEq)]\n\npub struct Lines;\n\n\n\nimpl super::EncoderError for Lines {\n\n type Error = Infallible;\n\n}\n\n\n\nimpl<Item> Encoder<Item> for Lines\n\nwhere\n\n Item: AsRef<str> + ?Sized,\n\n{\n\n fn encode(&mut self, item: &Item, dst: &mut BytesMut) -> Result<(), Self::Error> {\n\n let item = item.as_ref();\n\n dst.reserve(item.len());\n\n dst.put(item.as_bytes());\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl Decoder for Lines {\n", "file_path": "crates/codec/src/codec/lines.rs", "rank": 70, "score": 17928.094582143258 }, { "content": "\n\n fn start_decode(src: &[u8]) -> u64 {\n\n let mut len_bytes = [0u8; $y];\n\n len_bytes.copy_from_slice(&src[..$y]);\n\n Self::from_be_bytes(len_bytes).into()\n\n }\n\n }\n\n )+\n\n }\n\n}\n\n\n\nimpl_length!(u8 => 1, u16 => 2, u32 => 4, u64 => 8);\n\n\n\nimpl<L: LengthType> super::EncoderError for Length<L> {\n\n type Error = OverflowError;\n\n}\n\n\n\nimpl<Item, L> Encoder<Item> for Length<L>\n\nwhere\n\n Item: AsRef<[u8]> + ?Sized,\n", "file_path": "crates/codec/src/codec/length.rs", "rank": 71, "score": 17926.61385401991 }, { "content": "/// #[error(\"item length overflow\")]\n\n/// Overflow,\n\n///\n\n/// #[error(\"string decoding failed\")]\n\n/// StringDecode(#[from] std::string::FromUtf8Error),\n\n/// }\n\n///\n\n/// impl From<OverflowError> for MyError {\n\n/// fn from(_: OverflowError) -> MyError {\n\n/// MyError::Overflow\n\n/// }\n\n/// }\n\n///\n\n/// impl EncoderError for MyStringCodec {\n\n/// type Error = MyError;\n\n/// }\n\n///\n\n/// impl Encoder<str> for MyStringCodec {\n\n/// fn encode(&mut self, src: &str, dst: &mut BytesMut) -> Result<(), Self::Error> {\n\n/// self.0.encode(src.as_bytes(), dst)?;\n", "file_path": "crates/codec/src/codec/length.rs", "rank": 72, "score": 17924.735480787655 }, { "content": " type Item = String;\n\n type Error = std::string::FromUtf8Error;\n\n\n\n fn decode(&mut self, src: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {\n\n match memchr(b'\\n', src) {\n\n Some(pos) => {\n\n let buf = src.split_to(pos + 1);\n\n String::from_utf8(buf.to_vec()).map(Some)\n\n }\n\n _ => Ok(None),\n\n }\n\n }\n\n}\n", "file_path": "crates/codec/src/codec/lines.rs", "rank": 73, "score": 17924.464102183807 }, { "content": " type Error = Error;\n\n\n\n fn decode(&mut self, buf: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {\n\n // Build streaming JSON iterator over data\n\n let de = serde_json::Deserializer::from_slice(&buf);\n\n let mut iter = de.into_iter::<Dec>();\n\n\n\n // Attempt to fetch an item and generate response\n\n let res = match iter.next() {\n\n Some(Ok(v)) => Ok(Some(v)),\n\n Some(Err(ref e)) if e.is_eof() => Ok(None),\n\n Some(Err(e)) => Err(e),\n\n None => Ok(None),\n\n };\n\n\n\n // Update offset from iterator\n\n let offset = iter.byte_offset();\n\n\n\n // Advance buffer\n\n buf.advance(offset);\n", "file_path": "crates/codec/src/codec/json.rs", "rank": 74, "score": 17924.378993720416 }, { "content": " if usize::try_from(self.remaining).map(|rem| src.len() >= rem) == Ok(true) {\n\n (self.remaining.try_into().unwrap(), None)\n\n } else\n\n /* src.len() < self.remaining */\n\n {\n\n self.remaining -= u64::try_from(src.len()).unwrap();\n\n (src.len(), Some(self))\n\n },\n\n )\n\n }\n\n}\n\n\n\nimpl<L: LengthType> super::DecoderWithSkipAhead for Length<L> {\n\n type Handler = LenSkipAhead;\n\n\n\n fn prepare_skip_ahead(&mut self, src: &mut BytesMut) -> Self::Handler {\n\n assert!(src.len() > std::mem::size_of::<L>());\n\n\n\n let len = L::start_decode(&src);\n\n\n", "file_path": "crates/codec/src/codec/length.rs", "rank": 75, "score": 17923.392172805256 }, { "content": "\n\n fn decode(&mut self, buf: &mut BytesMut) -> Result<Option<Self::Item>, Self::Error> {\n\n // Build deserializer\n\n let mut de = serde_cbor::Deserializer::from_slice(&buf);\n\n\n\n // Attempt deserialization\n\n let res: Result<Dec, _> = serde::de::Deserialize::deserialize(&mut de);\n\n\n\n // If we ran out before parsing, return none and try again later\n\n let res = match res {\n\n Ok(v) => Ok(Some(v)),\n\n Err(e) if e.is_eof() => Ok(None),\n\n Err(e) => Err(e),\n\n };\n\n\n\n // Update offset from iterator\n\n let offset = de.byte_offset();\n\n\n\n // Advance buffer\n\n buf.advance(offset);\n", "file_path": "crates/codec/src/codec/cbor.rs", "rank": 76, "score": 17921.737464049093 }, { "content": " if src.len() - Self::HEADER_LEN >= len {\n\n // Skip the length header we already read.\n\n src.advance(Self::HEADER_LEN);\n\n Some(src.split_to(len).freeze())\n\n } else {\n\n None\n\n }\n\n })\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct LenSkipAhead {\n\n remaining: u64,\n\n}\n\n\n\nimpl super::SkipAheadHandler for LenSkipAhead {\n\n fn continue_skipping(mut self, src: &[u8]) -> Result<(usize, Option<Self>), ()> {\n\n use std::convert::TryInto;\n\n Ok(\n", "file_path": "crates/codec/src/codec/length.rs", "rank": 77, "score": 17921.215116776482 }, { "content": " // skip the length header we already read.\n\n src.advance(Self::HEADER_LEN);\n\n\n\n LenSkipAhead { remaining: len }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n mod decode {\n\n use super::*;\n\n\n\n #[test]\n\n fn it_returns_bytes_withouth_length_header() {\n\n use bytes::BufMut;\n\n let mut codec = Length::<u64>::new();\n\n\n\n let mut src = BytesMut::with_capacity(5);\n\n src.put(&[0, 0, 0, 0, 0, 0, 0, 3u8, 1, 2, 3, 4][..]);\n\n let item = codec.decode(&mut src).unwrap();\n\n\n\n assert!(item == Some(Bytes::from(&[1u8, 2, 3][..])));\n\n }\n\n }\n\n}\n", "file_path": "crates/codec/src/codec/length.rs", "rank": 78, "score": 17919.702835067103 }, { "content": " }\n\n impl<$($param),+> ::std::default::Default for $t<$($param),+> {\n\n fn default() -> Self { Self::new() }\n\n }\n\n impl<$($param),+> ::std::cmp::PartialEq for $t<$($param),+> {\n\n fn eq(&self, _other: &Self) -> bool { true }\n\n }\n\n }\n\n}\n\n\n\nmod bytes;\n\npub use self::bytes::BytesCodec;\n\n\n\nmod length;\n\npub use self::length::{Length, OverflowError};\n\n\n\nmod lines;\n\npub use self::lines::Lines;\n\n\n\nmod limit;\n", "file_path": "crates/codec/src/codec/mod.rs", "rank": 79, "score": 17919.625709439882 }, { "content": "\n\n res\n\n }\n\n}\n\n\n\nimpl<Enc, Dec> super::EncoderError for Json<Enc, Dec>\n\nwhere\n\n Enc: Serialize + 'static,\n\n{\n\n type Error = Error;\n\n}\n\n\n\n/// Encoder impl encodes object streams to bytes\n\nimpl<Enc, Dec> Encoder<Enc> for Json<Enc, Dec>\n\nwhere\n\n Enc: Serialize + 'static,\n\n{\n\n fn encode(&mut self, data: &Enc, buf: &mut BytesMut) -> Result<(), Self::Error> {\n\n // Encode json\n\n let j = serde_json::to_string(data)?;\n", "file_path": "crates/codec/src/codec/json.rs", "rank": 80, "score": 17918.17987491834 }, { "content": "\n\n res\n\n }\n\n}\n\n\n\nimpl<Enc, Dec> super::EncoderError for Cbor<Enc, Dec>\n\nwhere\n\n Enc: Serialize + 'static,\n\n{\n\n type Error = serde_cbor::Error;\n\n}\n\n\n\n/// Encoder impl encodes object streams to bytes\n\nimpl<Enc, Dec> Encoder<Enc> for Cbor<Enc, Dec>\n\nwhere\n\n Enc: Serialize + 'static,\n\n{\n\n fn encode(&mut self, data: &Enc, buf: &mut BytesMut) -> Result<(), Self::Error> {\n\n // Encode cbor\n\n let j = serde_cbor::to_vec(data)?;\n", "file_path": "crates/codec/src/codec/cbor.rs", "rank": 81, "score": 17917.94194987186 }, { "content": "/// # let stream = Cursor::new(&mut buf);\n\n/// // let stream = ...\n\n/// let codec = Cbor::<Something, Something>::new();\n\n/// let mut framed = Framed::new(stream, codec);\n\n///\n\n/// while let Some(s) = framed.try_next().await.unwrap() {\n\n/// println!(\"{:?}\", s.data);\n\n/// }\n\n/// });\n\n/// ```\n\npub struct Cbor<Enc, Dec>(PhantomData<(Enc, Dec)>);\n\nimpl_phantom!(Cbor<Enc, Dec>);\n\n\n\n/// Decoder impl parses cbor objects from bytes\n\nimpl<Enc, Dec> Decoder for Cbor<Enc, Dec>\n\nwhere\n\n for<'de> Dec: Deserialize<'de> + 'static,\n\n{\n\n type Item = Dec;\n\n type Error = serde_cbor::Error;\n", "file_path": "crates/codec/src/codec/cbor.rs", "rank": 82, "score": 17917.755386127545 }, { "content": "use super::{Decoder, Encoder};\n\nuse bytes::{BufMut, BytesMut};\n\nuse memchr::memchr;\n\nuse std::convert::Infallible;\n\n\n\n/// A simple `Codec` implementation that splits up data into lines.\n\n///\n\n/// ```rust\n\n/// # futures_lite::future::block_on(async move {\n\n/// use futures_util::stream::TryStreamExt; // for lines.try_next()\n\n/// use yz_futures_codec::{Framed, codec::Lines, Error};\n\n///\n\n/// let input = \"hello\\nworld\\nthis\\nis\\ndog\\n\".as_bytes();\n\n/// let mut lines = Framed::new(input, Lines);\n\n/// while let Some(line) = lines.try_next().await? {\n\n/// println!(\"{}\", line);\n\n/// }\n\n/// # Ok::<_, Error<_>>(())\n\n/// # }).unwrap();\n\n/// ```\n", "file_path": "crates/codec/src/codec/lines.rs", "rank": 83, "score": 17916.627119380435 }, { "content": "use super::{Decoder, Encoder};\n\nuse bytes::{Buf, Bytes, BytesMut};\n\nuse std::convert::TryFrom;\n\nuse std::marker::PhantomData;\n\n\n\n/// A simple `Codec` implementation sending your data by prefixing it by its length.\n\n///\n\n/// # Example\n\n///\n\n/// This codec will most likely be used wrapped in another codec like so.\n\n///\n\n/// ```\n\n/// use yz_futures_codec::codec::{Decoder, Encoder, EncoderError, Length, OverflowError};\n\n/// use bytes::{Bytes, BytesMut};\n\n/// use std::io::{Error, ErrorKind};\n\n///\n\n/// pub struct MyStringCodec(Length::<u64>);\n\n///\n\n/// #[derive(Debug, thiserror::Error)]\n\n/// pub enum MyError {\n", "file_path": "crates/codec/src/codec/length.rs", "rank": 84, "score": 17916.269026087146 }, { "content": "/// # let mut buf = vec![];\n\n/// # let stream = Cursor::new(&mut buf);\n\n/// // let stream = ...\n\n/// let codec = Json::<Something, Something>::new();\n\n/// let mut framed = Framed::new(stream, codec);\n\n///\n\n/// while let Some(s) = framed.try_next().await.unwrap() {\n\n/// println!(\"{:?}\", s.data);\n\n/// }\n\n/// });\n\n/// ```\n\npub struct Json<Enc, Dec>(PhantomData<(Enc, Dec)>);\n\nimpl_phantom!(Json<Enc, Dec>);\n\n\n\n/// Decoder impl parses json objects from bytes\n\nimpl<Enc, Dec> Decoder for Json<Enc, Dec>\n\nwhere\n\n for<'de> Dec: Deserialize<'de> + 'static,\n\n{\n\n type Item = Dec;\n", "file_path": "crates/codec/src/codec/json.rs", "rank": 85, "score": 17916.08201163847 }, { "content": "\n\n // Write to buffer\n\n buf.reserve(j.len());\n\n buf.put_slice(&j.as_bytes());\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use bytes::BytesMut;\n\n use serde::{Deserialize, Serialize};\n\n\n\n use super::Json;\n\n use crate::{Decoder, Encoder};\n\n\n\n #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]\n\n struct TestStruct {\n\n pub name: String,\n", "file_path": "crates/codec/src/codec/json.rs", "rank": 86, "score": 17915.120565447105 }, { "content": "\n\n // Write to buffer\n\n buf.reserve(j.len());\n\n buf.put_slice(&j);\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use bytes::BytesMut;\n\n use serde::{Deserialize, Serialize};\n\n\n\n use super::Cbor;\n\n use crate::{Decoder, Encoder};\n\n\n\n #[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]\n\n struct TestStruct {\n\n pub name: String,\n", "file_path": "crates/codec/src/codec/cbor.rs", "rank": 87, "score": 17914.90365357604 }, { "content": "use super::{Decoder, Encoder, EncoderError};\n\nuse bytes::{Buf, BufMut, BytesMut};\n\nuse serde::{Deserialize, Serialize};\n\nuse std::marker::PhantomData;\n\n\n\n/// A codec for JSON encoding and decoding using serde_cbor\n\n/// Enc is the type to encode, Dec is the type to decode\n\n/// ```\n\n/// # use futures_util::{stream::TryStreamExt, io::Cursor};\n\n/// use serde::{Serialize, Deserialize};\n\n/// use yz_futures_codec::{codec::Cbor, Framed};\n\n/// use yz_futures_util::sink::SinkExt;\n\n///\n\n/// #[derive(Serialize, Deserialize)]\n\n/// struct Something {\n\n/// pub data: u16,\n\n/// }\n\n///\n\n/// futures_lite::future::block_on(async move {\n\n/// # let mut buf = vec![];\n", "file_path": "crates/codec/src/codec/cbor.rs", "rank": 88, "score": 17914.754595145558 }, { "content": "use super::{Decoder, Encoder};\n\nuse bytes::{Buf, BufMut, BytesMut};\n\nuse serde::{Deserialize, Serialize};\n\nuse serde_json::Error;\n\nuse std::marker::PhantomData;\n\n\n\n/// A codec for JSON encoding and decoding using serde_json\n\n/// Enc is the type to encode, Dec is the type to decode\n\n/// ```\n\n/// # use futures_util::{stream::TryStreamExt, io::Cursor};\n\n/// use serde::{Serialize, Deserialize};\n\n/// use yz_futures_codec::{codec::Json, Framed};\n\n/// use yz_futures_util::sink::SinkExt;\n\n///\n\n/// #[derive(Serialize, Deserialize)]\n\n/// struct Something {\n\n/// pub data: u16,\n\n/// }\n\n///\n\n/// futures_lite::future::block_on(async move {\n", "file_path": "crates/codec/src/codec/json.rs", "rank": 89, "score": 17914.246462961033 }, { "content": "/// ```\n\npub struct Length<L>(PhantomData<L>);\n\nimpl_phantom!(Length<L>);\n\n\n\n/// the error returned if [`Length`] fails\n\n#[derive(Debug, thiserror::Error)]\n\n#[error(\"length overflow\")]\n\npub struct OverflowError;\n\n\n\nimpl<L> Length<L> {\n\n const HEADER_LEN: usize = std::mem::size_of::<L>();\n\n}\n\n\n\n/// invariant: `std::mem::size_of::<Self>()` has the same length as the serialization of `Self`\n", "file_path": "crates/codec/src/codec/length.rs", "rank": 90, "score": 17913.436633955 }, { "content": " pub data: u16,\n\n }\n\n\n\n #[test]\n\n fn json_codec_encode_decode() {\n\n let mut codec = Json::<TestStruct, TestStruct>::new();\n\n let mut buff = BytesMut::new();\n\n\n\n let item1 = TestStruct {\n\n name: \"Test name\".to_owned(),\n\n data: 16,\n\n };\n\n codec.encode(item1.clone(), &mut buff).unwrap();\n\n\n\n let item2 = codec.decode(&mut buff).unwrap().unwrap();\n\n assert_eq!(item1, item2);\n\n\n\n assert_eq!(codec.decode(&mut buff).unwrap(), None);\n\n\n\n assert_eq!(buff.len(), 0);\n", "file_path": "crates/codec/src/codec/json.rs", "rank": 91, "score": 17913.430050730534 }, { "content": " pub data: u16,\n\n }\n\n\n\n #[test]\n\n fn cbor_codec_encode_decode() {\n\n let mut codec = Cbor::<TestStruct, TestStruct>::new();\n\n let mut buff = BytesMut::new();\n\n\n\n let item1 = TestStruct {\n\n name: \"Test name\".to_owned(),\n\n data: 16,\n\n };\n\n codec.encode(item1.clone(), &mut buff).unwrap();\n\n\n\n let item2 = codec.decode(&mut buff).unwrap().unwrap();\n\n assert_eq!(item1, item2);\n\n\n\n assert_eq!(codec.decode(&mut buff).unwrap(), None);\n\n\n\n assert_eq!(buff.len(), 0);\n", "file_path": "crates/codec/src/codec/cbor.rs", "rank": 92, "score": 17913.430050730534 }, { "content": "pub use self::limit::{DecoderWithSkipAhead, Limit, LimitError, SkipAheadHandler};\n\n\n\n#[cfg(feature = \"json\")]\n\nmod json;\n\n#[cfg(feature = \"json\")]\n\npub use self::json::Json;\n\n\n\n#[cfg(feature = \"cbor\")]\n\nmod cbor;\n\n#[cfg(feature = \"cbor\")]\n\npub use self::cbor::Cbor;\n", "file_path": "crates/codec/src/codec/mod.rs", "rank": 93, "score": 17913.30733591772 }, { "content": "use ::bytes::BytesMut;\n\n\n\n/// Decoding of frames via buffers, for use with [`Framed`](crate::Framed).\n", "file_path": "crates/codec/src/codec/mod.rs", "rank": 94, "score": 17912.782331510825 }, { "content": " }\n\n\n\n #[test]\n\n fn json_codec_partial_decode() {\n\n let mut codec = Json::<TestStruct, TestStruct>::new();\n\n let mut buff = BytesMut::new();\n\n\n\n let item1 = TestStruct {\n\n name: \"Test name\".to_owned(),\n\n data: 34,\n\n };\n\n codec.encode(item1.clone(), &mut buff).unwrap();\n\n\n\n let mut start = buff.clone().split_to(4);\n\n assert_eq!(codec.decode(&mut start).unwrap(), None);\n\n\n\n codec.decode(&mut buff).unwrap().unwrap();\n\n\n\n assert_eq!(buff.len(), 0);\n\n }\n\n}\n", "file_path": "crates/codec/src/codec/json.rs", "rank": 95, "score": 17911.430948217247 }, { "content": " }\n\n\n\n #[test]\n\n fn cbor_codec_partial_decode() {\n\n let mut codec = Cbor::<TestStruct, TestStruct>::new();\n\n let mut buff = BytesMut::new();\n\n\n\n let item1 = TestStruct {\n\n name: \"Test name\".to_owned(),\n\n data: 34,\n\n };\n\n codec.encode(item1.clone(), &mut buff).unwrap();\n\n\n\n let mut start = buff.clone().split_to(4);\n\n assert_eq!(codec.decode(&mut start).unwrap(), None);\n\n\n\n codec.decode(&mut buff).unwrap().unwrap();\n\n\n\n assert_eq!(buff.len(), 0);\n\n }\n\n}\n", "file_path": "crates/codec/src/codec/cbor.rs", "rank": 96, "score": 17911.430948217247 }, { "content": "# yz-futures-codec\n\n\n\nUtilities for encoding and decoding frames using `async/await`.\n\n\n\nContains adapters to go from streams of bytes, `AsyncRead` and `AsyncWrite`,\n\nto framed streams implementing `Sink` and `Stream`. Framed streams are also known as transports.\n\n\n\n[![Latest Version](https://img.shields.io/crates/v/yz-futures-codec.svg)](https://crates.io/crates/yz-futures-codec)\n\n[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/yz-futures-codec)\n\n\n\n## License\n\n\n\nLicensed under either of\n\n\n\n * Apache License, Version 2.0\n\n ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)\n\n * MIT license\n\n ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)\n\n\n\nat your option.\n\n\n\n### License of tests\n\n\n\nThe tests are licensed only under Apache License, Version 2.0,\n\nin order to be compatible with [`futures-micro`](https://github.com/irrustible/futures-micro/issues/5).\n\n\n\n## Contribution\n\n\n\nUnless you explicitly state otherwise, any contribution intentionally submitted\n\nfor inclusion in the work by you, as defined in the Apache-2.0 license, shall be\n\ndual licensed as above, without any additional terms or conditions.\n\n\n\n## Example\n\n```rust\n\nuse yz_futures_codec::{LinesCodec, Framed};\n\n\n\nasync fn main() {\n\n // let stream = ...\n\n let mut framed = Framed::new(stream, LinesCodec {});\n\n\n\n while let Some(line) = framed.try_next().await.unwrap() {\n\n println!(\"{:?}\", line);\n\n }\n\n}\n\n```\n", "file_path": "crates/codec/README.md", "rank": 97, "score": 10.877081637613733 }, { "content": "use futures_lite::future::block_on;\n\nuse futures_util::{io::AsyncRead, stream::StreamExt};\n\nuse std::io;\n\nuse std::pin::Pin;\n\nuse std::task::{Context, Poll};\n\nuse yz_futures_codec::{codec::Decoder, codec::Lines, BytesMut, Framed};\n\n\n\n// Sends two lines at once, then nothing else forever\n", "file_path": "crates/codec/tests/read.rs", "rank": 98, "score": 10.385556976379688 }, { "content": "# yz-futures-util\n\n\n\nUtilities for working with `yz-futures-sink` using `async/await`.\n\n\n\n[![Latest Version](https://img.shields.io/crates/v/yz-futures-codec.svg)](https://crates.io/crates/yz-futures-util)\n\n[![Rust Documentation](https://img.shields.io/badge/api-rustdoc-blue.svg)](https://docs.rs/yz-futures-util)\n\n\n\n## License\n\n\n\n[Licensed](LICENSE) under Apache License, Version 2.0 (https://www.apache.org/licenses/LICENSE-2.0).\n\n\n\n## Contribution\n\n\n\nUnless you explicitly state otherwise, any contribution intentionally submitted\n\nfor inclusion in the work by you, as defined in the Apache-2.0 license, shall be\n\nlicensed as above, without any additional terms or conditions.\n\n\n\n## Example\n\n```rust\n\nuse yz_futures_codec::{LinesCodec, Framed};\n\n\n\nasync fn main() {\n\n // let stream = ...\n\n let mut framed = Framed::new(stream, LinesCodec {});\n\n\n\n while let Some(line) = framed.try_next().await.unwrap() {\n\n println!(\"{:?}\", line);\n\n }\n\n}\n\n```\n", "file_path": "crates/util/README.md", "rank": 99, "score": 9.840095361437523 } ]
Rust
src/diagnostics/archivist/tests/v2/src/logs/budget.rs
wwjiang007/fuchsia-1
0db66b52b5bcd3e27c8b8c2163925309e8522f94
use crate::{constants::*, test_topology, utils}; use anyhow::Error; use archivist_lib::configs::parse_config; use component_events::{events::*, matcher::ExitStatusMatcher}; use diagnostics_data::{Data, LogError, Logs, Severity}; use diagnostics_hierarchy::trie::TrieIterableNode; use diagnostics_message::message::{fx_log_packet_t, METADATA_SIZE}; use diagnostics_reader::{ArchiveReader, Inspect, SubscriptionResultsStream}; use fidl_fuchsia_archivist_tests::{ SocketPuppetControllerRequest, SocketPuppetControllerRequestStream, SocketPuppetProxy, }; use fidl_fuchsia_component as fcomponent; use fidl_fuchsia_diagnostics::ArchiveAccessorMarker; use fidl_fuchsia_io::DirectoryMarker; use fidl_fuchsia_sys2::{ChildRef, EventSourceMarker, RealmMarker}; use fuchsia_async::{Task, Timer}; use fuchsia_component::{client, server::ServiceFs}; use fuchsia_component_test::{builder::*, mock, RealmInstance}; use fuchsia_zircon as zx; use futures::{ channel::mpsc::{self, Receiver}, StreamExt, }; use rand::{prelude::SliceRandom, rngs::StdRng, SeedableRng}; use std::{collections::BTreeMap, ops::Deref, time::Duration}; use tracing::{debug, info, trace}; const TEST_PACKET_LEN: usize = 49; const MAX_PUPPETS: usize = 5; #[fuchsia_async::run_singlethreaded(test)] async fn test_budget() { fuchsia_syslog::init().unwrap(); fuchsia_syslog::set_severity(fuchsia_syslog::levels::DEBUG); info!("testing that the archivist's log buffers correctly enforce their budget"); info!("creating nested environment for collecting diagnostics"); let mut env = PuppetEnv::create(MAX_PUPPETS).await; info!("check that archivist log state is clean"); env.assert_archivist_state_matches_expected().await; for i in 0..MAX_PUPPETS { env.launch_puppet(i).await; } env.validate().await; } struct PuppetEnv { max_puppets: usize, instance: RealmInstance, controllers: Receiver<SocketPuppetControllerRequestStream>, messages_allowed_in_cache: usize, messages_sent: Vec<MessageReceipt>, launched_monikers: Vec<String>, running_puppets: Vec<Puppet>, inspect_reader: ArchiveReader, log_reader: ArchiveReader, log_subscription: SubscriptionResultsStream<Logs>, rng: StdRng, _log_errors: Task<()>, } impl PuppetEnv { async fn create(max_puppets: usize) -> Self { let (sender, controllers) = mpsc::channel(1); let mut builder = test_topology::create(test_topology::Options { archivist_url: ARCHIVIST_WITH_SMALL_CACHES, }) .await .expect("create base topology"); builder .add_component( "mocks-server", ComponentSource::Mock(mock::Mock::new(move |mock_handles: mock::MockHandles| { Box::pin(run_mocks(mock_handles, sender.clone())) })), ) .await .unwrap(); for i in 0..max_puppets { let name = format!("test/puppet-{}", i); builder .add_component(name.clone(), ComponentSource::url(SOCKET_PUPPET_COMPONENT_URL)) .await .unwrap() .add_route(CapabilityRoute { capability: Capability::protocol( "fuchsia.archivist.tests.SocketPuppetController", ), source: RouteEndpoint::component("mocks-server"), targets: vec![RouteEndpoint::component(name.clone())], }) .unwrap() .add_route(CapabilityRoute { capability: Capability::protocol("fuchsia.logger.LogSink"), source: RouteEndpoint::component("test/archivist"), targets: vec![RouteEndpoint::component(name)], }) .unwrap(); } info!("starting our instance"); let mut realm = builder.build(); test_topology::expose_test_realm_protocol(&mut realm).await; let instance = realm.create().await.expect("create instance"); let config = parse_config("/pkg/data/config/small-caches-config.json").unwrap(); let messages_allowed_in_cache = config.logs.max_cached_original_bytes / TEST_PACKET_LEN; let archive = || instance.root.connect_to_protocol_at_exposed_dir::<ArchiveAccessorMarker>().unwrap(); let mut inspect_reader = ArchiveReader::new(); inspect_reader .with_archive(archive()) .with_minimum_schema_count(1) .add_selector("archivist:root/logs_buffer") .add_selector("archivist:root/sources"); let mut log_reader = ArchiveReader::new(); log_reader .with_archive(archive()) .with_minimum_schema_count(0) .retry_if_empty(false); let (log_subscription, mut errors) = log_reader.snapshot_then_subscribe::<Logs>().unwrap().split_streams(); let _log_errors = Task::spawn(async move { if let Some(error) = errors.next().await { panic!("{:#?}", error); } }); Self { max_puppets, controllers, instance, messages_allowed_in_cache, messages_sent: vec![], launched_monikers: vec![], running_puppets: vec![], inspect_reader, log_reader, log_subscription, rng: StdRng::seed_from_u64(0xA455), _log_errors, } } async fn launch_puppet(&mut self, id: usize) { assert!(id < self.max_puppets); let mut child_ref = ChildRef { name: format!("puppet-{}", id), collection: None }; let (exposed_dir, server_end) = fidl::endpoints::create_proxy::<DirectoryMarker>().unwrap(); let realm = self.instance.root.connect_to_protocol_at_exposed_dir::<RealmMarker>().unwrap(); realm.open_exposed_dir(&mut child_ref, server_end).await.unwrap().unwrap(); let _ = client::connect_to_protocol_at_dir_root::<fcomponent::BinderMarker>(&exposed_dir) .unwrap(); debug!("waiting for controller request"); let mut controller = self.controllers.next().await.unwrap(); debug!("waiting for ControlPuppet call"); let proxy = match controller.next().await { Some(Ok(SocketPuppetControllerRequest::ControlPuppet { to_control, control_handle, })) => { control_handle.shutdown(); to_control.into_proxy().unwrap() } _ => panic!("did not expect that"), }; let moniker = format!( "fuchsia_component_test_collection:{}/test/puppet-{}", self.instance.root.child_name(), id ); let puppet = Puppet { id, moniker: moniker.clone(), proxy }; info!("having the puppet connect to LogSink"); puppet.connect_to_log_sink().await.unwrap(); info!("observe the puppet appears in archivist's inspect output"); self.launched_monikers.push(moniker); self.running_puppets.push(puppet); while self.current_expected_sources() != self.current_observed_sources().await { Timer::new(Duration::from_millis(100)).await; } } fn current_expected_sources(&self) -> BTreeMap<String, Count> { let mut expected_sources = BTreeMap::new(); for source in &self.launched_monikers { expected_sources.insert(source.clone(), Count { total: 0, dropped: 0 }); } for (prior_messages, receipt) in self.messages_sent.iter().rev().enumerate() { let mut puppet_count = expected_sources.get_mut(&receipt.moniker).unwrap(); puppet_count.total += 1; if prior_messages >= self.messages_allowed_in_cache { puppet_count.dropped += 1; } } expected_sources .into_iter() .filter(|(moniker, count)| { let has_messages = count.total > 0 && count.total != count.dropped; let is_running = self.running_puppets.iter().find(|puppet| moniker == &puppet.moniker).is_some(); is_running || has_messages }) .collect() } async fn current_observed_sources(&self) -> BTreeMap<String, Count> { let results = self.inspect_reader.snapshot::<Inspect>().await.unwrap().into_iter().next().unwrap(); let root = results.payload.as_ref().unwrap(); let mut counts = BTreeMap::new(); let sources = root.get_child("sources").unwrap(); for (moniker, source) in sources.get_children() { if let Some(logs) = source.get_child("logs") { let total = logs.get_child("total").unwrap(); let total_number = *total.get_property("number").unwrap().uint().unwrap() as usize; let total_bytes = *total.get_property("bytes").unwrap().uint().unwrap() as usize; assert_eq!(total_bytes, total_number * TEST_PACKET_LEN); let dropped = logs.get_child("dropped").unwrap(); let dropped_number = *dropped.get_property("number").unwrap().uint().unwrap() as usize; let dropped_bytes = *dropped.get_property("bytes").unwrap().uint().unwrap() as usize; assert_eq!(dropped_bytes, dropped_number * TEST_PACKET_LEN); counts.insert( moniker.clone(), Count { total: total_number, dropped: dropped_number }, ); } } counts } async fn assert_archivist_state_matches_expected(&self) { let expected_sources = self.current_expected_sources(); let observed_sources = self.current_observed_sources().await; assert_eq!(observed_sources, expected_sources); let expected_drops = || expected_sources.iter().filter(|(_, c)| c.dropped > 0); let mut expected_logs = self .messages_sent .iter() .rev() .take(self.messages_allowed_in_cache) .rev(); trace!("reading log snapshot"); let observed_logs = self.log_reader.snapshot::<Logs>().await.unwrap().into_iter(); let mut dropped_message_warnings = BTreeMap::new(); for observed in observed_logs { if observed.metadata.errors.is_some() { dropped_message_warnings.insert(observed.moniker.clone(), observed); } else { let expected = expected_logs.next().unwrap(); assert_eq!(expected, &observed); } } for (moniker, Count { dropped, .. }) in expected_drops() { let dropped_logs_warning = dropped_message_warnings.remove(moniker).unwrap(); assert_eq!( dropped_logs_warning.metadata.errors, Some(vec![LogError::DroppedLogs { count: *dropped as u64 }]) ); assert_eq!(dropped_logs_warning.metadata.severity, Severity::Warn); } assert!(dropped_message_warnings.is_empty(), "must have encountered all expected warnings"); } async fn validate(mut self) { let overall_messages_to_log = self.messages_allowed_in_cache * 15; let iteration_for_killing_a_puppet = self.messages_allowed_in_cache; let event_source = EventSource::from_proxy(client::connect_to_protocol::<EventSourceMarker>().unwrap()); let mut event_stream = event_source .subscribe(vec![EventSubscription::new(vec![Stopped::NAME], EventMode::Async)]) .await .unwrap(); info!("having the puppets log packets until overflow"); for i in 0..overall_messages_to_log { trace!(i, "loop ticked"); if i == iteration_for_killing_a_puppet { let to_stop = self.running_puppets.pop().unwrap(); let receipt = to_stop.emit_packet().await; self.check_receipt(receipt).await; let id = to_stop.id; drop(to_stop); utils::wait_for_component_stopped_event( &self.instance.root.child_name(), &format!("puppet-{}", id), ExitStatusMatcher::Clean, &mut event_stream, ) .await; } let puppet = self.running_puppets.choose(&mut self.rng).unwrap(); let receipt = puppet.emit_packet().await; self.check_receipt(receipt).await; } assert_eq!( self.current_expected_sources().len(), self.running_puppets.len(), "must have stopped a component and rolled out all of its logs" ); info!("test complete!"); } async fn check_receipt(&mut self, receipt: MessageReceipt) { let next_message = self.log_subscription.next().await.unwrap(); assert_eq!(receipt, next_message); self.messages_sent.push(receipt); self.assert_archivist_state_matches_expected().await; } } struct Puppet { proxy: SocketPuppetProxy, moniker: String, id: usize, } impl std::fmt::Debug for Puppet { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("Puppet").field("moniker", &self.moniker).finish() } } impl Puppet { async fn emit_packet(&self) -> MessageReceipt { let timestamp = zx::Time::get_monotonic().into_nanos(); let mut packet: fx_log_packet_t = Default::default(); packet.metadata.severity = fuchsia_syslog::levels::INFO; packet.metadata.time = timestamp; packet.fill_data(1..(TEST_PACKET_LEN - METADATA_SIZE), b'A' as _); self.proxy.emit_packet(packet.as_bytes()).await.unwrap(); MessageReceipt { timestamp, moniker: self.moniker.clone() } } } impl Deref for Puppet { type Target = SocketPuppetProxy; fn deref(&self) -> &Self::Target { &self.proxy } } async fn run_mocks( mock_handles: mock::MockHandles, mut sender: mpsc::Sender<SocketPuppetControllerRequestStream>, ) -> Result<(), Error> { let mut fs = ServiceFs::new(); fs.dir("svc").add_fidl_service(move |stream: SocketPuppetControllerRequestStream| { sender.start_send(stream).unwrap(); }); fs.serve_connection(mock_handles.outgoing_dir.into_channel())?; fs.collect::<()>().await; Ok(()) } #[derive(Clone, Copy, Debug, PartialEq)] struct Count { total: usize, dropped: usize, } #[derive(Clone, Debug, PartialEq)] struct MessageReceipt { moniker: String, timestamp: i64, } impl PartialEq<Data<Logs>> for MessageReceipt { fn eq(&self, other: &Data<Logs>) -> bool { other.moniker.starts_with(&self.moniker) && *other.metadata.timestamp as i64 == self.timestamp } }
use crate::{constants::*, test_topology, utils}; use anyhow::Error; use archivist_lib::configs::parse_config; use component_events::{events::*, matcher::ExitStatusMatcher}; use diagnostics_data::{Data, LogError, Logs, Severity}; use diagnostics_hierarchy::trie::TrieIterableNode; use diagnostics_message::message::{fx_log_packet_t, METADATA_SIZE}; use diagnostics_reader::{ArchiveReader, Inspect, SubscriptionResultsStream}; use fidl_fuchsia_archivist_tests::{ SocketPuppetControllerRequest, SocketPuppetControllerRequestStream, SocketPuppetProxy, }; use fidl_fuchsia_component as fcomponent; use fidl_fuchsia_diagnostics::ArchiveAccessorMarker; use fidl_fuchsia_io::DirectoryMarker; use fidl_fuchsia_sys2::{ChildRef, EventSourceMarker, RealmMarker}; use fuchsia_async::{Task, Timer}; use fuchsia_component::{client, server::ServiceFs}; use fuchsia_component_test::{builder::*, mock, RealmInstance}; use fuchsia_zircon as zx; use futures::{ channel::mpsc::{self, Receiver}, StreamExt, }; use rand::{prelude::SliceRandom, rngs::StdRng, SeedableRng}; use std::{collections::BTreeMap, ops::Deref, time::Duration}; use tracing::{debug, info, trace}; const TEST_PACKET_LEN: usize = 49; const MAX_PUPPETS: usize = 5; #[fuchsia_async::run_singlethreaded(test)]
struct PuppetEnv { max_puppets: usize, instance: RealmInstance, controllers: Receiver<SocketPuppetControllerRequestStream>, messages_allowed_in_cache: usize, messages_sent: Vec<MessageReceipt>, launched_monikers: Vec<String>, running_puppets: Vec<Puppet>, inspect_reader: ArchiveReader, log_reader: ArchiveReader, log_subscription: SubscriptionResultsStream<Logs>, rng: StdRng, _log_errors: Task<()>, } impl PuppetEnv { async fn create(max_puppets: usize) -> Self { let (sender, controllers) = mpsc::channel(1); let mut builder = test_topology::create(test_topology::Options { archivist_url: ARCHIVIST_WITH_SMALL_CACHES, }) .await .expect("create base topology"); builder .add_component( "mocks-server", ComponentSource::Mock(mock::Mock::new(move |mock_handles: mock::MockHandles| { Box::pin(run_mocks(mock_handles, sender.clone())) })), ) .await .unwrap(); for i in 0..max_puppets { let name = format!("test/puppet-{}", i); builder .add_component(name.clone(), ComponentSource::url(SOCKET_PUPPET_COMPONENT_URL)) .await .unwrap() .add_route(CapabilityRoute { capability: Capability::protocol( "fuchsia.archivist.tests.SocketPuppetController", ), source: RouteEndpoint::component("mocks-server"), targets: vec![RouteEndpoint::component(name.clone())], }) .unwrap() .add_route(CapabilityRoute { capability: Capability::protocol("fuchsia.logger.LogSink"), source: RouteEndpoint::component("test/archivist"), targets: vec![RouteEndpoint::component(name)], }) .unwrap(); } info!("starting our instance"); let mut realm = builder.build(); test_topology::expose_test_realm_protocol(&mut realm).await; let instance = realm.create().await.expect("create instance"); let config = parse_config("/pkg/data/config/small-caches-config.json").unwrap(); let messages_allowed_in_cache = config.logs.max_cached_original_bytes / TEST_PACKET_LEN; let archive = || instance.root.connect_to_protocol_at_exposed_dir::<ArchiveAccessorMarker>().unwrap(); let mut inspect_reader = ArchiveReader::new(); inspect_reader .with_archive(archive()) .with_minimum_schema_count(1) .add_selector("archivist:root/logs_buffer") .add_selector("archivist:root/sources"); let mut log_reader = ArchiveReader::new(); log_reader .with_archive(archive()) .with_minimum_schema_count(0) .retry_if_empty(false); let (log_subscription, mut errors) = log_reader.snapshot_then_subscribe::<Logs>().unwrap().split_streams(); let _log_errors = Task::spawn(async move { if let Some(error) = errors.next().await { panic!("{:#?}", error); } }); Self { max_puppets, controllers, instance, messages_allowed_in_cache, messages_sent: vec![], launched_monikers: vec![], running_puppets: vec![], inspect_reader, log_reader, log_subscription, rng: StdRng::seed_from_u64(0xA455), _log_errors, } } async fn launch_puppet(&mut self, id: usize) { assert!(id < self.max_puppets); let mut child_ref = ChildRef { name: format!("puppet-{}", id), collection: None }; let (exposed_dir, server_end) = fidl::endpoints::create_proxy::<DirectoryMarker>().unwrap(); let realm = self.instance.root.connect_to_protocol_at_exposed_dir::<RealmMarker>().unwrap(); realm.open_exposed_dir(&mut child_ref, server_end).await.unwrap().unwrap(); let _ = client::connect_to_protocol_at_dir_root::<fcomponent::BinderMarker>(&exposed_dir) .unwrap(); debug!("waiting for controller request"); let mut controller = self.controllers.next().await.unwrap(); debug!("waiting for ControlPuppet call"); let proxy = match controller.next().await { Some(Ok(SocketPuppetControllerRequest::ControlPuppet { to_control, control_handle, })) => { control_handle.shutdown(); to_control.into_proxy().unwrap() } _ => panic!("did not expect that"), }; let moniker = format!( "fuchsia_component_test_collection:{}/test/puppet-{}", self.instance.root.child_name(), id ); let puppet = Puppet { id, moniker: moniker.clone(), proxy }; info!("having the puppet connect to LogSink"); puppet.connect_to_log_sink().await.unwrap(); info!("observe the puppet appears in archivist's inspect output"); self.launched_monikers.push(moniker); self.running_puppets.push(puppet); while self.current_expected_sources() != self.current_observed_sources().await { Timer::new(Duration::from_millis(100)).await; } } fn current_expected_sources(&self) -> BTreeMap<String, Count> { let mut expected_sources = BTreeMap::new(); for source in &self.launched_monikers { expected_sources.insert(source.clone(), Count { total: 0, dropped: 0 }); } for (prior_messages, receipt) in self.messages_sent.iter().rev().enumerate() { let mut puppet_count = expected_sources.get_mut(&receipt.moniker).unwrap(); puppet_count.total += 1; if prior_messages >= self.messages_allowed_in_cache { puppet_count.dropped += 1; } } expected_sources .into_iter() .filter(|(moniker, count)| { let has_messages = count.total > 0 && count.total != count.dropped; let is_running = self.running_puppets.iter().find(|puppet| moniker == &puppet.moniker).is_some(); is_running || has_messages }) .collect() } async fn current_observed_sources(&self) -> BTreeMap<String, Count> { let results = self.inspect_reader.snapshot::<Inspect>().await.unwrap().into_iter().next().unwrap(); let root = results.payload.as_ref().unwrap(); let mut counts = BTreeMap::new(); let sources = root.get_child("sources").unwrap(); for (moniker, source) in sources.get_children() { if let Some(logs) = source.get_child("logs") { let total = logs.get_child("total").unwrap(); let total_number = *total.get_property("number").unwrap().uint().unwrap() as usize; let total_bytes = *total.get_property("bytes").unwrap().uint().unwrap() as usize; assert_eq!(total_bytes, total_number * TEST_PACKET_LEN); let dropped = logs.get_child("dropped").unwrap(); let dropped_number = *dropped.get_property("number").unwrap().uint().unwrap() as usize; let dropped_bytes = *dropped.get_property("bytes").unwrap().uint().unwrap() as usize; assert_eq!(dropped_bytes, dropped_number * TEST_PACKET_LEN); counts.insert( moniker.clone(), Count { total: total_number, dropped: dropped_number }, ); } } counts } async fn assert_archivist_state_matches_expected(&self) { let expected_sources = self.current_expected_sources(); let observed_sources = self.current_observed_sources().await; assert_eq!(observed_sources, expected_sources); let expected_drops = || expected_sources.iter().filter(|(_, c)| c.dropped > 0); let mut expected_logs = self .messages_sent .iter() .rev() .take(self.messages_allowed_in_cache) .rev(); trace!("reading log snapshot"); let observed_logs = self.log_reader.snapshot::<Logs>().await.unwrap().into_iter(); let mut dropped_message_warnings = BTreeMap::new(); for observed in observed_logs { if observed.metadata.errors.is_some() { dropped_message_warnings.insert(observed.moniker.clone(), observed); } else { let expected = expected_logs.next().unwrap(); assert_eq!(expected, &observed); } } for (moniker, Count { dropped, .. }) in expected_drops() { let dropped_logs_warning = dropped_message_warnings.remove(moniker).unwrap(); assert_eq!( dropped_logs_warning.metadata.errors, Some(vec![LogError::DroppedLogs { count: *dropped as u64 }]) ); assert_eq!(dropped_logs_warning.metadata.severity, Severity::Warn); } assert!(dropped_message_warnings.is_empty(), "must have encountered all expected warnings"); } async fn validate(mut self) { let overall_messages_to_log = self.messages_allowed_in_cache * 15; let iteration_for_killing_a_puppet = self.messages_allowed_in_cache; let event_source = EventSource::from_proxy(client::connect_to_protocol::<EventSourceMarker>().unwrap()); let mut event_stream = event_source .subscribe(vec![EventSubscription::new(vec![Stopped::NAME], EventMode::Async)]) .await .unwrap(); info!("having the puppets log packets until overflow"); for i in 0..overall_messages_to_log { trace!(i, "loop ticked"); if i == iteration_for_killing_a_puppet { let to_stop = self.running_puppets.pop().unwrap(); let receipt = to_stop.emit_packet().await; self.check_receipt(receipt).await; let id = to_stop.id; drop(to_stop); utils::wait_for_component_stopped_event( &self.instance.root.child_name(), &format!("puppet-{}", id), ExitStatusMatcher::Clean, &mut event_stream, ) .await; } let puppet = self.running_puppets.choose(&mut self.rng).unwrap(); let receipt = puppet.emit_packet().await; self.check_receipt(receipt).await; } assert_eq!( self.current_expected_sources().len(), self.running_puppets.len(), "must have stopped a component and rolled out all of its logs" ); info!("test complete!"); } async fn check_receipt(&mut self, receipt: MessageReceipt) { let next_message = self.log_subscription.next().await.unwrap(); assert_eq!(receipt, next_message); self.messages_sent.push(receipt); self.assert_archivist_state_matches_expected().await; } } struct Puppet { proxy: SocketPuppetProxy, moniker: String, id: usize, } impl std::fmt::Debug for Puppet { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("Puppet").field("moniker", &self.moniker).finish() } } impl Puppet { async fn emit_packet(&self) -> MessageReceipt { let timestamp = zx::Time::get_monotonic().into_nanos(); let mut packet: fx_log_packet_t = Default::default(); packet.metadata.severity = fuchsia_syslog::levels::INFO; packet.metadata.time = timestamp; packet.fill_data(1..(TEST_PACKET_LEN - METADATA_SIZE), b'A' as _); self.proxy.emit_packet(packet.as_bytes()).await.unwrap(); MessageReceipt { timestamp, moniker: self.moniker.clone() } } } impl Deref for Puppet { type Target = SocketPuppetProxy; fn deref(&self) -> &Self::Target { &self.proxy } } async fn run_mocks( mock_handles: mock::MockHandles, mut sender: mpsc::Sender<SocketPuppetControllerRequestStream>, ) -> Result<(), Error> { let mut fs = ServiceFs::new(); fs.dir("svc").add_fidl_service(move |stream: SocketPuppetControllerRequestStream| { sender.start_send(stream).unwrap(); }); fs.serve_connection(mock_handles.outgoing_dir.into_channel())?; fs.collect::<()>().await; Ok(()) } #[derive(Clone, Copy, Debug, PartialEq)] struct Count { total: usize, dropped: usize, } #[derive(Clone, Debug, PartialEq)] struct MessageReceipt { moniker: String, timestamp: i64, } impl PartialEq<Data<Logs>> for MessageReceipt { fn eq(&self, other: &Data<Logs>) -> bool { other.moniker.starts_with(&self.moniker) && *other.metadata.timestamp as i64 == self.timestamp } }
async fn test_budget() { fuchsia_syslog::init().unwrap(); fuchsia_syslog::set_severity(fuchsia_syslog::levels::DEBUG); info!("testing that the archivist's log buffers correctly enforce their budget"); info!("creating nested environment for collecting diagnostics"); let mut env = PuppetEnv::create(MAX_PUPPETS).await; info!("check that archivist log state is clean"); env.assert_archivist_state_matches_expected().await; for i in 0..MAX_PUPPETS { env.launch_puppet(i).await; } env.validate().await; }
function_block-full_function
[]
Rust
cortex-m-rtfm/macros/src/codegen/util.rs
ButtNaked/m4mon8
017dc3e1caed191804e795c0709580f1a58ad73e
use core::sync::atomic::{AtomicUsize, Ordering}; use proc_macro2::{Span, TokenStream as TokenStream2}; use quote::quote; use rtfm_syntax::{ast::App, Context, Core}; use syn::{Attribute, Ident, LitInt, PatType}; use crate::check::Extra; pub fn capacity_literal(capacity: u8) -> LitInt { LitInt::new(&capacity.to_string(), Span::call_site()) } pub fn capacity_typenum(capacity: u8, round_up_to_power_of_two: bool) -> TokenStream2 { let capacity = if round_up_to_power_of_two { capacity.checked_next_power_of_two().expect("UNREACHABLE") } else { capacity }; let ident = Ident::new(&format!("U{}", capacity), Span::call_site()); quote!(rtfm::export::consts::#ident) } pub fn cfg_core(core: Core, cores: u8) -> Option<TokenStream2> { if cores == 1 { None } else if cfg!(feature = "heterogeneous") { let core = core.to_string(); Some(quote!(#[cfg(core = #core)])) } else { None } } pub fn fq_ident(task: &Ident, sender: Core) -> Ident { Ident::new( &format!("{}_S{}_FQ", task.to_string(), sender), Span::call_site(), ) } pub fn impl_mutex( extra: &Extra, cfgs: &[Attribute], cfg_core: Option<&TokenStream2>, resources_prefix: bool, name: &Ident, ty: TokenStream2, ceiling: u8, ptr: TokenStream2, ) -> TokenStream2 { let (path, priority) = if resources_prefix { (quote!(resources::#name), quote!(self.priority())) } else { (quote!(#name), quote!(self.priority)) }; let device = extra.device; quote!( #(#cfgs)* #cfg_core impl<'a> rtfm::Mutex for #path<'a> { type T = #ty; #[inline(always)] fn lock<R>(&mut self, f: impl FnOnce(&mut #ty) -> R) -> R { const CEILING: u8 = #ceiling; unsafe { rtfm::export::lock( #ptr, #priority, CEILING, #device::NVIC_PRIO_BITS, f, ) } } } ) } pub fn init_barrier(initializer: Core) -> Ident { Ident::new(&format!("IB{}", initializer), Span::call_site()) } pub fn inputs_ident(task: &Ident, sender: Core) -> Ident { Ident::new(&format!("{}_S{}_INPUTS", task, sender), Span::call_site()) } pub fn instants_ident(task: &Ident, sender: Core) -> Ident { Ident::new(&format!("{}_S{}_INSTANTS", task, sender), Span::call_site()) } pub fn interrupt_ident(core: Core, cores: u8) -> Ident { let span = Span::call_site(); if cores == 1 { Ident::new("Interrupt", span) } else { Ident::new(&format!("Interrupt_{}", core), span) } } pub fn is_exception(name: &Ident) -> bool { let s = name.to_string(); match &*s { "MemoryManagement" | "BusFault" | "UsageFault" | "SecureFault" | "SVCall" | "DebugMonitor" | "PendSV" | "SysTick" => true, _ => false, } } pub fn late_resources_ident(init: &Ident) -> Ident { Ident::new( &format!("{}LateResources", init.to_string()), Span::call_site(), ) } fn link_section_index() -> usize { static INDEX: AtomicUsize = AtomicUsize::new(0); INDEX.fetch_add(1, Ordering::Relaxed) } pub fn link_section(section: &str, core: Core) -> Option<TokenStream2> { if cfg!(feature = "homogeneous") { let section = format!(".{}_{}.rtfm{}", section, core, link_section_index()); Some(quote!(#[link_section = #section])) } else { None } } pub fn link_section_uninit(core: Option<Core>) -> Option<TokenStream2> { let section = if let Some(core) = core { let index = link_section_index(); if cfg!(feature = "homogeneous") { format!(".uninit_{}.rtfm{}", core, index) } else { format!(".uninit.rtfm{}", index) } } else { if cfg!(feature = "heterogeneous") { return None; } format!(".uninit.rtfm{}", link_section_index()) }; Some(quote!(#[link_section = #section])) } pub fn locals_ident(ctxt: Context, app: &App) -> Ident { let mut s = match ctxt { Context::Init(core) => app.inits[&core].name.to_string(), Context::Idle(core) => app.idles[&core].name.to_string(), Context::HardwareTask(ident) | Context::SoftwareTask(ident) => ident.to_string(), }; s.push_str("Locals"); Ident::new(&s, Span::call_site()) } pub fn rendezvous_ident(core: Core) -> Ident { Ident::new(&format!("RV{}", core), Span::call_site()) } pub fn regroup_inputs( inputs: &[PatType], ) -> ( Vec<TokenStream2>, TokenStream2, Vec<TokenStream2>, TokenStream2, ) { if inputs.len() == 1 { let ty = &inputs[0].ty; ( vec![quote!(_0: #ty)], quote!(_0), vec![quote!(_0)], quote!(#ty), ) } else { let mut args = vec![]; let mut pats = vec![]; let mut tys = vec![]; for (i, input) in inputs.iter().enumerate() { let i = Ident::new(&format!("_{}", i), Span::call_site()); let ty = &input.ty; args.push(quote!(#i: #ty)); pats.push(quote!(#i)); tys.push(quote!(#ty)); } let tupled = { let pats = pats.clone(); quote!((#(#pats,)*)) }; let ty = quote!((#(#tys,)*)); (args, tupled, pats, ty) } } pub fn resources_ident(ctxt: Context, app: &App) -> Ident { let mut s = match ctxt { Context::Init(core) => app.inits[&core].name.to_string(), Context::Idle(core) => app.idles[&core].name.to_string(), Context::HardwareTask(ident) | Context::SoftwareTask(ident) => ident.to_string(), }; s.push_str("Resources"); Ident::new(&s, Span::call_site()) } pub fn rq_ident(receiver: Core, priority: u8, sender: Core) -> Ident { Ident::new( &format!("R{}_P{}_S{}_RQ", receiver, priority, sender), Span::call_site(), ) } pub fn schedule_ident(name: &Ident, sender: Core) -> Ident { Ident::new( &format!("schedule_{}_S{}", name.to_string(), sender), Span::call_site(), ) } pub fn schedule_t_ident(core: Core) -> Ident { Ident::new(&format!("T{}", core), Span::call_site()) } pub fn spawn_barrier(receiver: Core) -> Ident { Ident::new(&format!("SB{}", receiver), Span::call_site()) } pub fn spawn_ident(name: &Ident, sender: Core) -> Ident { Ident::new( &format!("spawn_{}_S{}", name.to_string(), sender), Span::call_site(), ) } pub fn spawn_t_ident(receiver: Core, priority: u8, sender: Core) -> Ident { Ident::new( &format!("R{}_P{}_S{}_T", receiver, priority, sender), Span::call_site(), ) } pub fn suffixed(name: &str, core: u8) -> Ident { let span = Span::call_site(); if cfg!(feature = "homogeneous") { Ident::new(&format!("{}_{}", name, core), span) } else { Ident::new(name, span) } } pub fn tq_ident(core: Core) -> Ident { Ident::new(&format!("TQ{}", core), Span::call_site()) }
use core::sync::atomic::{AtomicUsize, Ordering}; use proc_macro2::{Span, TokenStream as TokenStream2}; use quote::quote; use rtfm_syntax::{ast::App, Context, Core}; use syn::{Attribute, Ident, LitInt, PatType}; use crate::check::Extra; pub fn capacity_literal(capacity: u8) -> LitInt { LitInt::new(&capacity.to_string(), Span::call_site()) } pub fn capacity_typenum(capacity: u8, round_up_to_power_of_two: bool) -> TokenStream2 { let capacity = if round_up_to_power_of_two { capacity.checked_next_power_of_two().expect("UNREACHABLE") } else { capacity }; let ident = Ident::new(&format!("U{}", capacity), Span::call_site()); quote!(rtfm::export::consts::#ident) } pub fn cfg_core(core: Core, cores: u8) -> Option<TokenStream2> { if cores == 1 { None } else if cfg!(feature = "heterogeneous") { let core = core.to_string(); Some(quote!(#[cfg(core = #core)])) } else { None } } pub fn fq_ident(task: &Ident, sender: Core) -> Ident { Ident::new( &format!("{}_S{}_FQ", task.to_string(), sender), Span::call_site(), ) } pub fn impl_mutex( extra: &Extra, cfgs: &[Attribute], cfg_core: Option<&TokenStream2>, resources_prefix: bool, name: &Ident, ty: TokenStream2, ceiling: u8, ptr: TokenStream2, ) -> TokenStream2 { let (path, priority) = if resources_prefix { (quote!(resources::#name), quote!(self.priority())) } else { (quote!(#name), quote!(self.priority)) }; let device = extra.device; quote!( #(#cfgs)* #cfg_core impl<'a> rtfm::Mutex for #path<'a> { type T = #ty; #[inline(always)] fn lock<R>(&mut self, f: impl FnOnce(&mut #ty) -> R) -> R { const CEILING: u8 = #ceiling; unsafe { rtfm::export::lock( #ptr, #priority, CEILING, #device::NVIC_PRIO_BITS, f, ) } } } ) } pub fn init_barrier(initializer: Core) -> Ident { Ident::new(&format!("IB{}", initializer), Span::call_site()) } pub fn inputs_ident(task: &Ident, sender: Core) -> Ident { Ident::new(&format!("{}_S{}_INPUTS", task, sender), Span::call_site()) } pub fn instants_ident(task: &Ident, sender: Core) -> Ident { Ident::new(&format!("{}_S{}_INSTANTS", task, sender), Span::call_site()) } pub fn interrupt_ident(core: Core, cores: u8) -> Ident { let span = Span::call_site(); if cores == 1 { Ident::new("Interrupt", span) } else { Ident::new(&format!("Interrupt_{}", core), span) } } pub fn is_exception(name: &Ident) -> bool { let s = name.to_string(); match &*s { "MemoryManagement" | "BusFault" | "UsageFault" | "SecureFault" | "SVCall" | "DebugMonitor" | "PendSV" | "SysTick" => true, _ => false, } } pub fn late_resources_ident(init: &Ident) -> Ident { Ident::new( &format!("{}LateResources", init.to_string()), Span::call_site(), ) } fn link_section_index() -> usize { static INDEX: AtomicUsize = AtomicUsize::new(0); INDEX.fetch_add(1, Ordering::Relaxed) }
pub fn link_section_uninit(core: Option<Core>) -> Option<TokenStream2> { let section = if let Some(core) = core { let index = link_section_index(); if cfg!(feature = "homogeneous") { format!(".uninit_{}.rtfm{}", core, index) } else { format!(".uninit.rtfm{}", index) } } else { if cfg!(feature = "heterogeneous") { return None; } format!(".uninit.rtfm{}", link_section_index()) }; Some(quote!(#[link_section = #section])) } pub fn locals_ident(ctxt: Context, app: &App) -> Ident { let mut s = match ctxt { Context::Init(core) => app.inits[&core].name.to_string(), Context::Idle(core) => app.idles[&core].name.to_string(), Context::HardwareTask(ident) | Context::SoftwareTask(ident) => ident.to_string(), }; s.push_str("Locals"); Ident::new(&s, Span::call_site()) } pub fn rendezvous_ident(core: Core) -> Ident { Ident::new(&format!("RV{}", core), Span::call_site()) } pub fn regroup_inputs( inputs: &[PatType], ) -> ( Vec<TokenStream2>, TokenStream2, Vec<TokenStream2>, TokenStream2, ) { if inputs.len() == 1 { let ty = &inputs[0].ty; ( vec![quote!(_0: #ty)], quote!(_0), vec![quote!(_0)], quote!(#ty), ) } else { let mut args = vec![]; let mut pats = vec![]; let mut tys = vec![]; for (i, input) in inputs.iter().enumerate() { let i = Ident::new(&format!("_{}", i), Span::call_site()); let ty = &input.ty; args.push(quote!(#i: #ty)); pats.push(quote!(#i)); tys.push(quote!(#ty)); } let tupled = { let pats = pats.clone(); quote!((#(#pats,)*)) }; let ty = quote!((#(#tys,)*)); (args, tupled, pats, ty) } } pub fn resources_ident(ctxt: Context, app: &App) -> Ident { let mut s = match ctxt { Context::Init(core) => app.inits[&core].name.to_string(), Context::Idle(core) => app.idles[&core].name.to_string(), Context::HardwareTask(ident) | Context::SoftwareTask(ident) => ident.to_string(), }; s.push_str("Resources"); Ident::new(&s, Span::call_site()) } pub fn rq_ident(receiver: Core, priority: u8, sender: Core) -> Ident { Ident::new( &format!("R{}_P{}_S{}_RQ", receiver, priority, sender), Span::call_site(), ) } pub fn schedule_ident(name: &Ident, sender: Core) -> Ident { Ident::new( &format!("schedule_{}_S{}", name.to_string(), sender), Span::call_site(), ) } pub fn schedule_t_ident(core: Core) -> Ident { Ident::new(&format!("T{}", core), Span::call_site()) } pub fn spawn_barrier(receiver: Core) -> Ident { Ident::new(&format!("SB{}", receiver), Span::call_site()) } pub fn spawn_ident(name: &Ident, sender: Core) -> Ident { Ident::new( &format!("spawn_{}_S{}", name.to_string(), sender), Span::call_site(), ) } pub fn spawn_t_ident(receiver: Core, priority: u8, sender: Core) -> Ident { Ident::new( &format!("R{}_P{}_S{}_T", receiver, priority, sender), Span::call_site(), ) } pub fn suffixed(name: &str, core: u8) -> Ident { let span = Span::call_site(); if cfg!(feature = "homogeneous") { Ident::new(&format!("{}_{}", name, core), span) } else { Ident::new(name, span) } } pub fn tq_ident(core: Core) -> Ident { Ident::new(&format!("TQ{}", core), Span::call_site()) }
pub fn link_section(section: &str, core: Core) -> Option<TokenStream2> { if cfg!(feature = "homogeneous") { let section = format!(".{}_{}.rtfm{}", section, core, link_section_index()); Some(quote!(#[link_section = #section])) } else { None } }
function_block-full_function
[ { "content": "pub fn xpend(_core: u8, _interrupt: impl Nr) {}\n\n\n\n/// Fake monotonic timer\n\npub struct MT;\n\n\n\nimpl Monotonic for MT {\n\n type Instant = Instant;\n\n\n\n fn ratio() -> Fraction {\n\n Fraction {\n\n numerator: 1,\n\n denominator: 1,\n\n }\n\n }\n\n\n\n unsafe fn reset() {\n\n (0xE0001004 as *mut u32).write_volatile(0)\n\n }\n\n\n\n fn now() -> Instant {\n", "file_path": "cortex-m-rtfm/heterogeneous/src/lib.rs", "rank": 12, "score": 301144.09079921606 }, { "content": "/// Generates compile-time assertions that check that types implement the `Send` / `Sync` traits\n\npub fn codegen(core: u8, analysis: &Analysis, extra: &Extra) -> Vec<TokenStream2> {\n\n let mut stmts = vec![];\n\n\n\n // we don't generate *all* assertions on all cores because the user could conditionally import a\n\n // type only on some core (e.g. `#[cfg(core = \"0\")] use some::Type;`)\n\n\n\n if let Some(types) = analysis.send_types.get(&core) {\n\n for ty in types {\n\n stmts.push(quote!(rtfm::export::assert_send::<#ty>();));\n\n }\n\n }\n\n\n\n if let Some(types) = analysis.sync_types.get(&core) {\n\n for ty in types {\n\n stmts.push(quote!(rtfm::export::assert_sync::<#ty>();));\n\n }\n\n }\n\n\n\n // if the `schedule` API is used in more than one core then we need to check that the\n\n // `monotonic` timer can be used in multi-core context\n\n if analysis.timer_queues.len() > 1 && analysis.timer_queues.contains_key(&core) {\n\n let monotonic = extra.monotonic();\n\n stmts.push(quote!(rtfm::export::assert_multicore::<#monotonic>();));\n\n }\n\n\n\n stmts\n\n}\n", "file_path": "cortex-m-rtfm/macros/src/codegen/assertions.rs", "rank": 13, "score": 299021.10166743345 }, { "content": "#[cfg(not(armv7m))]\n\n#[inline(always)]\n\npub fn run<F>(_priority: u8, f: F)\n\nwhere\n\n F: FnOnce(),\n\n{\n\n f();\n\n}\n\n\n\npub struct Barrier {\n\n inner: AtomicBool,\n\n}\n\n\n\nimpl Barrier {\n\n pub const fn new() -> Self {\n\n Barrier {\n\n inner: AtomicBool::new(false),\n\n }\n\n }\n\n\n\n pub fn release(&self) {\n\n self.inner.store(true, Ordering::Release)\n", "file_path": "cortex-m-rtfm/src/export.rs", "rank": 14, "score": 297779.793762301 }, { "content": "#[cfg(armv7m)]\n\n#[inline(always)]\n\npub fn run<F>(priority: u8, f: F)\n\nwhere\n\n F: FnOnce(),\n\n{\n\n if priority == 1 {\n\n // if the priority of this interrupt is `1` then BASEPRI can only be `0`\n\n f();\n\n unsafe { basepri::write(0) }\n\n } else {\n\n let initial = basepri::read();\n\n f();\n\n unsafe { basepri::write(initial) }\n\n }\n\n}\n\n\n", "file_path": "cortex-m-rtfm/src/export.rs", "rank": 15, "score": 297779.793762301 }, { "content": "pub fn codegen(ctxt: Context, resources_tick: bool, app: &App, extra: &Extra) -> TokenStream2 {\n\n let mut items = vec![];\n\n let mut fields = vec![];\n\n let mut values = vec![];\n\n\n\n let name = ctxt.ident(app);\n\n\n\n let core = ctxt.core(app);\n\n let mut needs_instant = false;\n\n let mut lt = None;\n\n match ctxt {\n\n Context::Init(core) => {\n\n if app.uses_schedule(core) {\n\n let m = extra.monotonic();\n\n\n\n fields.push(quote!(\n\n /// System start time = `Instant(0 /* cycles */)`\n\n pub start: <#m as rtfm::Monotonic>::Instant\n\n ));\n\n\n", "file_path": "cortex-m-rtfm/macros/src/codegen/module.rs", "rank": 16, "score": 288633.6461693382 }, { "content": "pub fn codegen(scheduler: Context, name: &Ident, app: &App) -> TokenStream2 {\n\n let sender = scheduler.core(app);\n\n let schedulee = &app.software_tasks[name];\n\n let receiver = schedulee.args.core;\n\n\n\n let fq = util::fq_ident(name, sender);\n\n let tq = util::tq_ident(sender);\n\n let (dequeue, enqueue) = if scheduler.is_init() {\n\n (quote!(#fq.dequeue()), quote!(#tq.enqueue_unchecked(nr);))\n\n } else {\n\n (\n\n quote!((#fq { priority }).lock(|fq| fq.split().1.dequeue())),\n\n quote!((#tq { priority }).lock(|tq| tq.enqueue_unchecked(nr));),\n\n )\n\n };\n\n\n\n let write_instant = if app.uses_schedule(receiver) {\n\n let instants = util::instants_ident(name, sender);\n\n\n\n Some(quote!(\n", "file_path": "cortex-m-rtfm/macros/src/codegen/schedule_body.rs", "rank": 17, "score": 283785.9738851656 }, { "content": "pub fn xpend(_core: u8, _interrupt: impl Nr) {}\n\n\n\n/// Fake monotonic timer\n\npub struct MT;\n\n\n\nimpl Monotonic for MT {\n\n type Instant = Instant;\n\n\n\n fn ratio() -> Fraction {\n\n Fraction {\n\n numerator: 1,\n\n denominator: 1,\n\n }\n\n }\n\n\n\n unsafe fn reset() {\n\n (0xE0001004 as *mut u32).write_volatile(0)\n\n }\n\n\n\n fn now() -> Instant {\n", "file_path": "cortex-m-rtfm/homogeneous/src/lib.rs", "rank": 22, "score": 264751.29981089185 }, { "content": "#[no_mangle]\n\npub fn cb_read_test_test5() -> Result<&'static [u8], Error> {\n\n Ok(b\"or you wouldn't have come here.\\\"\")\n\n}", "file_path": "src/cmd.rs", "rank": 25, "score": 235044.76760031685 }, { "content": "#[no_mangle]\n\npub fn cb_read_test_test3() -> Result<&'static [u8], Error> {\n\n Ok(b\"\\\"How do you know I'm mad?\\\" said Alice.\")\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 26, "score": 235044.76760031685 }, { "content": "#[no_mangle]\n\npub fn cb_read_test_test1() -> Result<&'static [u8], Error> {\n\n Ok(b\"\\\"Oh, you can't help that,\\\" said the Cat:\")\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 27, "score": 235044.76760031685 }, { "content": "#[no_mangle]\n\npub fn cb_read_test_test2() -> Result<&'static [u8], Error> {\n\n Ok(b\"\\\"we're all mad here. I'm mad. You're mad.\\\"\")\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 28, "score": 235044.76760031685 }, { "content": "#[no_mangle]\n\npub fn cb_read_test_echo() -> Result<&'static [u8], Error> {\n\n unsafe { Ok(&ECHO_BUF[..ECHO_BUF_SZ]) }\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 29, "score": 235044.76760031685 }, { "content": "#[no_mangle]\n\npub fn cb_read_test_test4() -> Result<&'static [u8], Error> {\n\n Ok(b\"\\\"You must be,\\\" said the Cat,\")\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 30, "score": 235044.76760031685 }, { "content": "#[no_mangle]\n\npub fn cb_write_test_test4(_v : &'static [u8]) -> Result<(), Error> {\n\n Err(Error::NonWriteable)\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 33, "score": 230818.3393027104 }, { "content": "#[no_mangle]\n\npub fn cb_write_test_test3(_v : &'static [u8]) -> Result<(), Error> {\n\n Err(Error::NonWriteable)\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 34, "score": 230818.3393027104 }, { "content": "#[no_mangle]\n\npub fn cb_write_test_echo(v : &'static [u8]) -> Result<(), Error> {\n\n unsafe {\n\n ECHO_BUF_SZ = v.len();\n\n (&mut ECHO_BUF[..ECHO_BUF_SZ]).copy_from_slice(v);\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 35, "score": 230818.3393027104 }, { "content": "#[no_mangle]\n\npub fn cb_write_test_test5(_v : &'static [u8]) -> Result<(), Error> {\n\n Err(Error::NonWriteable)\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 36, "score": 230818.3393027104 }, { "content": "#[no_mangle]\n\npub fn cb_write_test_test2(_v : &'static [u8]) -> Result<(), Error> {\n\n Err(Error::NonWriteable)\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 37, "score": 230818.3393027104 }, { "content": "#[no_mangle]\n\npub fn cb_write_test_test1(_v : &'static [u8]) -> Result<(), Error> {\n\n Err(Error::NonWriteable)\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 38, "score": 230818.3393027104 }, { "content": "/// Generates all `${ctxt}::Schedule` methods\n\npub fn codegen(app: &App, extra: &Extra) -> Vec<TokenStream2> {\n\n let mut items = vec![];\n\n\n\n let mut seen = BTreeMap::<u8, HashSet<_>>::new();\n\n for (scheduler, schedulees) in app.schedule_callers() {\n\n let m = extra.monotonic();\n\n let instant = quote!(<#m as rtfm::Monotonic>::Instant);\n\n\n\n let sender = scheduler.core(app);\n\n let cfg_sender = util::cfg_core(sender, app.args.cores);\n\n let seen = seen.entry(sender).or_default();\n\n let mut methods = vec![];\n\n\n\n for name in schedulees {\n\n let schedulee = &app.software_tasks[name];\n\n let cfgs = &schedulee.cfgs;\n\n let (args, _, untupled, ty) = util::regroup_inputs(&schedulee.inputs);\n\n let args = &args;\n\n\n\n if scheduler.is_init() {\n", "file_path": "cortex-m-rtfm/macros/src/codegen/schedule.rs", "rank": 40, "score": 202247.17480092327 }, { "content": "// TODO document the syntax here or in `rtfm-syntax`\n\npub fn app(app: &App, analysis: &Analysis, extra: &Extra) -> TokenStream2 {\n\n let mut const_app = vec![];\n\n let mut mains = vec![];\n\n let mut root = vec![];\n\n let mut user = vec![];\n\n\n\n // generate a `main` function for each core\n\n for core in 0..app.args.cores {\n\n let assertion_stmts = assertions::codegen(core, analysis, extra);\n\n\n\n let (const_app_pre_init, pre_init_stmts) = pre_init::codegen(core, &app, analysis, extra);\n\n\n\n let (const_app_init, root_init, user_init, call_init) =\n\n init::codegen(core, app, analysis, extra);\n\n\n\n let (const_app_post_init, post_init_stmts) = post_init::codegen(core, analysis, extra);\n\n\n\n let (const_app_idle, root_idle, user_idle, call_idle) =\n\n idle::codegen(core, app, analysis, extra);\n\n\n", "file_path": "cortex-m-rtfm/macros/src/codegen.rs", "rank": 42, "score": 198734.8909389064 }, { "content": "pub fn from_u8_slice(slice: &[u8]) -> &[u16] {\n\n use core::mem::{size_of, transmute};\n\n use core::slice::from_raw_parts;\n\n\n\n let ptr: *const u16 = unsafe { transmute(slice.as_ptr()) };\n\n let len: usize = slice.len() / (size_of::<u16>() / size_of::<u8>());\n\n\n\n unsafe { from_raw_parts(ptr, len) }\n\n}\n", "file_path": "src/splash.rs", "rank": 45, "score": 194815.74561909906 }, { "content": "#[inline]\n\npub fn logical2hw(logical: u8, nvic_prio_bits: u8) -> u8 {\n\n ((1 << nvic_prio_bits) - logical) << (8 - nvic_prio_bits)\n\n}\n", "file_path": "cortex-m-rtfm/src/export.rs", "rank": 46, "score": 194737.13821635128 }, { "content": "/// Generates task dispatchers\n\npub fn codegen(app: &App, analysis: &Analysis, extra: &Extra) -> Vec<TokenStream2> {\n\n let mut items = vec![];\n\n\n\n for (&receiver, dispatchers) in &analysis.channels {\n\n let interrupts = &analysis.interrupts[&receiver];\n\n\n\n for (&level, channels) in dispatchers {\n\n let mut stmts = vec![];\n\n\n\n for (&sender, channel) in channels {\n\n let cfg_sender = util::cfg_core(sender, app.args.cores);\n\n\n\n let variants = channel\n\n .tasks\n\n .iter()\n\n .map(|name| {\n\n let cfgs = &app.software_tasks[name].cfgs;\n\n\n\n quote!(\n\n #(#cfgs)*\n", "file_path": "cortex-m-rtfm/macros/src/codegen/dispatchers.rs", "rank": 47, "score": 191548.89550726226 }, { "content": "/// Generates all `${ctxt}::Spawn` methods\n\npub fn codegen(app: &App, analysis: &Analysis, extra: &Extra) -> Vec<TokenStream2> {\n\n let mut items = vec![];\n\n\n\n let mut seen = BTreeMap::<u8, HashSet<_>>::new();\n\n for (spawner, spawnees) in app.spawn_callers() {\n\n let sender = spawner.core(app);\n\n let cfg_sender = util::cfg_core(sender, app.args.cores);\n\n let seen = seen.entry(sender).or_default();\n\n let mut methods = vec![];\n\n\n\n for name in spawnees {\n\n let spawnee = &app.software_tasks[name];\n\n let receiver = spawnee.args.core;\n\n let cfgs = &spawnee.cfgs;\n\n let (args, _, untupled, ty) = util::regroup_inputs(&spawnee.inputs);\n\n let args = &args;\n\n\n\n if spawner.is_init() {\n\n // `init` uses a special spawn implementation; it doesn't use the `spawn_${name}`\n\n // functions which are shared by other contexts\n", "file_path": "cortex-m-rtfm/macros/src/codegen/spawn.rs", "rank": 48, "score": 191543.97436541703 }, { "content": "/// Generates timer queues and timer queue handlers\n\npub fn codegen(app: &App, analysis: &Analysis, extra: &Extra) -> Vec<TokenStream2> {\n\n let mut items = vec![];\n\n\n\n for (&sender, timer_queue) in &analysis.timer_queues {\n\n let cfg_sender = util::cfg_core(sender, app.args.cores);\n\n let t = util::schedule_t_ident(sender);\n\n\n\n // Enumeration of `schedule`-able tasks\n\n {\n\n let variants = timer_queue\n\n .tasks\n\n .iter()\n\n .map(|name| {\n\n let cfgs = &app.software_tasks[name].cfgs;\n\n\n\n quote!(\n\n #(#cfgs)*\n\n #name\n\n )\n\n })\n", "file_path": "cortex-m-rtfm/macros/src/codegen/timer_queue.rs", "rank": 49, "score": 189674.60609681124 }, { "content": "#[no_mangle]\n\npub fn cb_read_hard_red() -> Result<u8, Error> {\n\n Ok(unsafe { RED_CUR })\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 50, "score": 178636.04147980554 }, { "content": "#[no_mangle]\n\npub fn cb_read_hard_inf() -> Result<u8, Error> {\n\n Ok(unsafe { INF_CUR })\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 51, "score": 178636.04147980554 }, { "content": "#[no_mangle]\n\npub fn cb_write_hard_inf(v : u8) -> Result<(), Error> {\n\n unsafe { INF_CUR = v; }\n\n let tmp : u16 = unsafe { ((RED_CUR as u16) << 8) | INF_CUR as u16 };\n\n PldAfeCmd_low::get().write(tmp);\n\n PldAfeCmd_high::get().write(0x2202);\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 52, "score": 175301.0367456311 }, { "content": "#[no_mangle]\n\npub fn cb_write_hard_red(v : u8) -> Result<(), Error> {\n\n unsafe { RED_CUR = v; }\n\n let tmp : u16 = unsafe { ((RED_CUR as u16) << 8) | INF_CUR as u16 };\n\n PldAfeCmd_low::get().write(tmp);\n\n PldAfeCmd_high::get().write(0x2202);\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 53, "score": 175301.0367456311 }, { "content": "#[no_mangle]\n\npub fn cb_read_build_host() -> Result<&'static str, Error> {\n\n Ok(built_info::HOST)\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 54, "score": 172786.32507515678 }, { "content": "#[no_mangle]\n\npub fn cb_read_build_profile() -> Result<&'static str, Error> {\n\n Ok(built_info::PROFILE)\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 55, "score": 172786.32507515678 }, { "content": "#[no_mangle]\n\npub fn cb_read_build_compiler() -> Result<&'static str, Error> {\n\n Ok(built_info::RUSTC_VERSION)\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 56, "score": 172786.32507515678 }, { "content": "#[no_mangle]\n\npub fn cb_read_build_time() -> Result<&'static str, Error> {\n\n Ok(built_info::BUILT_TIME_UTC)\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 57, "score": 172786.32507515678 }, { "content": "#[no_mangle]\n\npub fn cb_read_info_version() -> Result<&'static str, Error> {\n\n Ok(built_info::PKG_VERSION)\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 58, "score": 172786.32507515678 }, { "content": "#[no_mangle]\n\npub fn cb_read_build_authors() -> Result<&'static str, Error> {\n\n Ok(built_info::PKG_AUTHORS)\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 59, "score": 172786.32507515678 }, { "content": "#[no_mangle]\n\npub fn cb_read_build_target() -> Result<&'static str, Error> {\n\n Ok(built_info::TARGET)\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 60, "score": 172786.32507515678 }, { "content": "#[no_mangle]\n\npub fn cb_read_build_git() -> Result<&'static str, Error> {\n\n Ok(built_info::GIT_VERSION.unwrap_or(\"None\"))\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 61, "score": 172786.32507515678 }, { "content": "#[no_mangle]\n\npub fn cb_read_build_version() -> Result<&'static str, Error> {\n\n Ok(built_info::PKG_VERSION)\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 62, "score": 172786.32507515678 }, { "content": "#[no_mangle]\n\npub fn cb_write_build_target(_v : &'static str) -> Result<(), Error> {\n\n Err(Error::NonWriteable)\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 63, "score": 169769.31700294523 }, { "content": "#[no_mangle]\n\npub fn cb_write_build_version(_v : &'static str) -> Result<(), Error> {\n\n Err(Error::NonWriteable)\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 64, "score": 169769.31700294523 }, { "content": "#[no_mangle]\n\npub fn cb_write_build_profile(_v : &'static str) -> Result<(), Error> {\n\n Err(Error::NonWriteable)\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 65, "score": 169769.31700294523 }, { "content": "#[no_mangle]\n\npub fn cb_write_build_git(_v : &'static str) -> Result<(), Error> {\n\n Err(Error::NonWriteable)\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 66, "score": 169769.31700294523 }, { "content": "#[no_mangle]\n\npub fn cb_write_build_authors(_v : &'static str) -> Result<(), Error> {\n\n Err(Error::NonWriteable)\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 67, "score": 169769.31700294523 }, { "content": "#[no_mangle]\n\npub fn cb_write_info_version(_v : &'static str) -> Result<(), Error> {\n\n Err(Error::NonWriteable)\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 68, "score": 169769.31700294523 }, { "content": "#[no_mangle]\n\npub fn cb_write_build_compiler(_v : &'static str) -> Result<(), Error> {\n\n Err(Error::NonWriteable)\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 69, "score": 169769.31700294523 }, { "content": "#[no_mangle]\n\npub fn cb_write_build_time(_v : &'static str) -> Result<(), Error> {\n\n Err(Error::NonWriteable)\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 70, "score": 169769.31700294523 }, { "content": "#[no_mangle]\n\npub fn cb_write_build_host(_v : &'static str) -> Result<(), Error> {\n\n Err(Error::NonWriteable)\n\n}\n\n\n", "file_path": "src/cmd.rs", "rank": 71, "score": 169769.31700294523 }, { "content": "/// Generate support code for hardware tasks (`#[exception]`s and `#[interrupt]`s)\n\npub fn codegen(\n\n app: &App,\n\n analysis: &Analysis,\n\n extra: &Extra,\n\n) -> (\n\n // const_app_hardware_tasks -- interrupt handlers and `${task}Resources` constructors\n\n Vec<TokenStream2>,\n\n // root_hardware_tasks -- items that must be placed in the root of the crate:\n\n // - `${task}Locals` structs\n\n // - `${task}Resources` structs\n\n // - `${task}` modules\n\n Vec<TokenStream2>,\n\n // user_hardware_tasks -- the `#[task]` functions written by the user\n\n Vec<TokenStream2>,\n\n) {\n\n let mut const_app = vec![];\n\n let mut root = vec![];\n\n let mut user_tasks = vec![];\n\n\n\n for (name, task) in &app.hardware_tasks {\n", "file_path": "cortex-m-rtfm/macros/src/codegen/hardware_tasks.rs", "rank": 73, "score": 162355.7591600921 }, { "content": "pub fn codegen(\n\n app: &App,\n\n analysis: &Analysis,\n\n extra: &Extra,\n\n) -> (\n\n // const_app_software_tasks -- free queues, buffers and `${task}Resources` constructors\n\n Vec<TokenStream2>,\n\n // root_software_tasks -- items that must be placed in the root of the crate:\n\n // - `${task}Locals` structs\n\n // - `${task}Resources` structs\n\n // - `${task}` modules\n\n Vec<TokenStream2>,\n\n // user_software_tasks -- the `#[task]` functions written by the user\n\n Vec<TokenStream2>,\n\n) {\n\n let mut const_app = vec![];\n\n let mut root = vec![];\n\n let mut user_tasks = vec![];\n\n\n\n for (name, task) in &app.software_tasks {\n", "file_path": "cortex-m-rtfm/macros/src/codegen/software_tasks.rs", "rank": 74, "score": 162351.09825203006 }, { "content": "#[allow(dead_code)]\n\nfn bar_trampoline(_: bar::Context) {}\n", "file_path": "cortex-m-rtfm/examples/t-binds.rs", "rank": 75, "score": 154917.72269016213 }, { "content": "#[allow(dead_code)]\n\nfn foo_trampoline(_: foo::Context) {}\n\n\n", "file_path": "cortex-m-rtfm/examples/t-binds.rs", "rank": 76, "score": 154917.72269016213 }, { "content": "pub fn app<'a>(app: &'a App, analysis: &Analysis) -> parse::Result<Extra<'a>> {\n\n if cfg!(feature = \"homogeneous\") {\n\n // this RTFM mode uses the same namespace for all cores so we need to check that the\n\n // identifiers used for each core `#[init]` and `#[idle]` functions don't collide\n\n let mut seen = HashSet::new();\n\n\n\n for name in app\n\n .inits\n\n .values()\n\n .map(|init| &init.name)\n\n .chain(app.idles.values().map(|idle| &idle.name))\n\n {\n\n if seen.contains(name) {\n\n return Err(parse::Error::new(\n\n name.span(),\n\n \"this identifier is already being used by another core\",\n\n ));\n\n } else {\n\n seen.insert(name);\n\n }\n", "file_path": "cortex-m-rtfm/macros/src/check.rs", "rank": 77, "score": 151703.93528729037 }, { "content": "fn byte_to_bcd2(byte: u8) -> (u8, u8){\n\n let mut bcd_high: u8 = 0;\n\n let mut value = byte;\n\n \n\n while value >= 10 {\n\n bcd_high += 1;\n\n value -= 10;\n\n }\n\n\n\n (bcd_high, ((bcd_high << 4) | value) as u8)\n\n}\n\n\n", "file_path": "stm32l4xx-hal-0.5.0/src/rtc.rs", "rank": 78, "score": 148004.98784062482 }, { "content": "fn bcd2_to_byte(bcd: (u8, u8)) -> u8 {\n\n let value = bcd.1 | bcd.0 << 4;\n\n \n\n let tmp = ((value & 0xF0) >> 0x4) * 10;\n\n \n\n (tmp + (value & 0x0F))\n\n}", "file_path": "stm32l4xx-hal-0.5.0/src/rtc.rs", "rank": 79, "score": 148004.98784062482 }, { "content": "#[proc_macro_attribute]\n\npub fn app(args: TokenStream, input: TokenStream) -> TokenStream {\n\n let mut settings = Settings::default();\n\n settings.optimize_priorities = true;\n\n settings.parse_binds = true;\n\n settings.parse_cores = cfg!(feature = \"heterogeneous\") || cfg!(feature = \"homogeneous\");\n\n settings.parse_extern_interrupt = true;\n\n settings.parse_schedule = true;\n\n\n\n let (app, analysis) = match rtfm_syntax::parse(args, input, settings) {\n\n Err(e) => return e.to_compile_error().into(),\n\n Ok(x) => x,\n\n };\n\n\n\n let extra = match check::app(&app, &analysis) {\n\n Err(e) => return e.to_compile_error().into(),\n\n Ok(x) => x,\n\n };\n\n\n\n let analysis = analyze::app(analysis, &app);\n\n\n\n let ts = codegen::app(&app, &analysis, &extra);\n\n\n\n // Try to write the expanded code to disk\n\n if Path::new(\"target\").exists() {\n\n fs::write(\"target/rtfm-expansion.rs\", ts.to_string()).ok();\n\n }\n\n\n\n ts.into()\n\n}\n", "file_path": "cortex-m-rtfm/macros/src/lib.rs", "rank": 80, "score": 140712.27552711195 }, { "content": "#[allow(dead_code)]\n\npub fn schedule() {\n\n rtt_print!(\"Gen size {}\", core::mem::size_of_val(unsafe { &MY_GEN }));\n\n\n\n //let b : SmallBox<dyn Generator<Yield=u32, Return = u32>, S4>= SmallBox::new(\n\n // || {\n\n // yield 0;\n\n // yield 1;\n\n // yield 2;\n\n // return 3;\n\n // }\n\n //);\n\n\n\n match Pin::new(unsafe { &mut MY_GEN }).resume() {\n\n GeneratorState::Yielded(num) => {\n\n rtt_print!(\"Step : {}\", num);\n\n }\n\n GeneratorState::Complete(_) => {\n\n rtt_print!(\"Finish step!\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/sche.rs", "rank": 81, "score": 133941.64396593565 }, { "content": "#[rustfmt::skip]\n\npub fn port_init() {\n\n use stm32f1xx_hal::pac::{\n\n RCC,\n\n GPIOA,\n\n GPIOB,\n\n GPIOC,\n\n GPIOD,\n\n GPIOE,\n\n GPIOF,\n\n GPIOG\n\n };\n\n\n\n let rcc = unsafe { &*RCC::ptr() };\n\n rcc.apb2enr.modify(|_, w| {\n\n w.iopaen().enabled()\n\n .iopben().enabled()\n\n .iopcen().enabled()\n\n .iopden().enabled()\n\n .iopeen().enabled()\n\n .iopfen().enabled()\n", "file_path": "src/port.rs", "rank": 82, "score": 131624.86398515798 }, { "content": "pub fn pld_init() {\n\n // PllRdy\n\n // Проверка готовности PLL\n\n {\n\n let r = PldId::get();\n\n for _ in 0..100 {\n\n let res = r.read();\n\n rtt_print!(\"PldIDReg: {:X}\", res);\n\n busy_wait_cycles!(72000 * 10);\n\n if res & 0x8000 == 0 {\n\n break;\n\n }\n\n }\n\n rtt_print!(\"PllRdy test OK\");\n\n }\n\n\n\n // IdTest\n\n // Проверка кода идентификаци\n\n {\n\n let r = PldCfg::get();\n", "file_path": "src/pld.rs", "rank": 83, "score": 131624.86398515798 }, { "content": "pub fn usb_interrupt() {\n\n unsafe {\n\n USB_IRQ_CNT += 1;\n\n }\n\n let usb_dev = unsafe { USB_DEVICE.as_mut().unwrap() };\n\n let monitor = unsafe { USB_MONITOR.as_mut().unwrap() };\n\n\n\n if !usb_dev.poll(&mut [monitor]) {\n\n return;\n\n }\n\n\n\n //let mut buf = [0u8; 0x40];\n\n\n\n //match monitor.read(&mut buf) {\n\n // Ok(count) if count > 0 => {\n\n // let p = unsafe { &mut CMD_PROC };\n\n // let count = match p.process_try_answer(&mut buf, count) {\n\n // Ok(c) => c,\n\n // Err(_) => panic!(),\n\n // };\n", "file_path": "src/usb.rs", "rank": 84, "score": 131624.86398515798 }, { "content": "#[rustfmt::skip]\n\npub fn fsmc_init() {\n\n use stm32f1xx_hal::pac::{\n\n RCC,\n\n FSMC,\n\n GPIOB,\n\n GPIOD,\n\n GPIOE,\n\n };\n\n \n\n let rcc = unsafe { &*RCC::ptr() };\n\n\n\n rcc.ahbenr.modify(|_, w| w.fsmcen().enabled());\n\n let fsmc = unsafe { &*FSMC::ptr() };\n\n fsmc.bcr1.modify(|_, w| {\n\n w.mbken().enabled()\n\n .muxen().enabled()\n\n .mtyp().flash()\n\n .mwid().bits16()\n\n .wren().enabled()\n\n .extmod().disabled()\n", "file_path": "src/port.rs", "rank": 85, "score": 131624.86398515798 }, { "content": "/// Включить ШИММ (0x01 - разрешить работу выхода клапана),\n\n/// 0x02 - разрешить работу компрессора, \n\n/// 0x03 - разрешить работу обоих выходов, \n\n/// 0х00 - запретить работу обоих выходов \n\npub fn pld_enable_pwm() {\n\n // Разрешить работу клапана и компрессора\n\n PldPWMsel::get().write(0x0003);\n\n}\n\n\n", "file_path": "src/pld.rs", "rank": 86, "score": 129428.35133582709 }, { "content": "pub fn pld_emergency_block_unlock() {\n\n // Если был аварийный сброс давления, нужно для снятия флага прочитать этот регистр\n\n let _ = PldId::get().read();\n\n}", "file_path": "src/pld.rs", "rank": 87, "score": 127342.9780689525 }, { "content": "/// Generates `static [mut]` variables and resource proxies\n\npub fn codegen(\n\n app: &App,\n\n analysis: &Analysis,\n\n extra: &Extra,\n\n) -> (\n\n // const_app -- the `static [mut]` variables behind the proxies\n\n Vec<TokenStream2>,\n\n // mod_resources -- the `resources` module\n\n TokenStream2,\n\n) {\n\n let mut const_app = vec![];\n\n let mut mod_resources = vec![];\n\n\n\n for (name, res, expr, loc) in app.resources(analysis) {\n\n let cfgs = &res.cfgs;\n\n let ty = &res.ty;\n\n\n\n {\n\n let (loc_attr, section) = match loc {\n\n Location::Owned {\n", "file_path": "cortex-m-rtfm/macros/src/codegen/resources.rs", "rank": 88, "score": 125365.30272969356 }, { "content": "/// Generates support code for `#[idle]` functions\n\npub fn codegen(\n\n core: u8,\n\n app: &App,\n\n analysis: &Analysis,\n\n extra: &Extra,\n\n) -> (\n\n // const_app_idle -- the `${idle}Resources` constructor\n\n Option<TokenStream2>,\n\n // root_idle -- items that must be placed in the root of the crate:\n\n // - the `${idle}Locals` struct\n\n // - the `${idle}Resources` struct\n\n // - the `${idle}` module, which contains types like `${idle}::Context`\n\n Vec<TokenStream2>,\n\n // user_idle\n\n Option<TokenStream2>,\n\n // call_idle\n\n TokenStream2,\n\n) {\n\n if let Some(idle) = app.idles.get(&core) {\n\n let mut needs_lt = false;\n", "file_path": "cortex-m-rtfm/macros/src/codegen/idle.rs", "rank": 89, "score": 125360.51715696714 }, { "content": "/// Generates support code for `#[init]` functions\n\npub fn codegen(\n\n core: u8,\n\n app: &App,\n\n analysis: &Analysis,\n\n extra: &Extra,\n\n) -> (\n\n // const_app_idle -- the `${init}Resources` constructor\n\n Option<TokenStream2>,\n\n // root_init -- items that must be placed in the root of the crate:\n\n // - the `${init}Locals` struct\n\n // - the `${init}Resources` struct\n\n // - the `${init}LateResources` struct\n\n // - the `${init}` module, which contains types like `${init}::Context`\n\n Vec<TokenStream2>,\n\n // user_init -- the `#[init]` function written by the user\n\n Option<TokenStream2>,\n\n // call_init -- the call to the user `#[init]` if there's one\n\n Option<TokenStream2>,\n\n) {\n\n if let Some(init) = app.inits.get(&core) {\n", "file_path": "cortex-m-rtfm/macros/src/codegen/init.rs", "rank": 90, "score": 125360.51715696714 }, { "content": "pub fn codegen(\n\n ctxt: Context,\n\n locals: &Map<Local>,\n\n core: Core,\n\n app: &App,\n\n) -> (\n\n // locals\n\n TokenStream2,\n\n // pat\n\n TokenStream2,\n\n) {\n\n assert!(!locals.is_empty());\n\n\n\n let runs_once = ctxt.runs_once();\n\n let ident = util::locals_ident(ctxt, app);\n\n\n\n let mut lt = None;\n\n let mut fields = vec![];\n\n let mut items = vec![];\n\n let mut names = vec![];\n", "file_path": "cortex-m-rtfm/macros/src/codegen/locals.rs", "rank": 91, "score": 125360.51715696714 }, { "content": "/// Generates code that runs after `#[init]` returns\n\npub fn codegen(\n\n core: u8,\n\n analysis: &Analysis,\n\n extra: &Extra,\n\n) -> (Vec<TokenStream2>, Vec<TokenStream2>) {\n\n let mut const_app = vec![];\n\n let mut stmts = vec![];\n\n\n\n // initialize late resources\n\n if let Some(late_resources) = analysis.late_resources.get(&core) {\n\n for name in late_resources {\n\n // if it's live\n\n if analysis.locations.get(name).is_some() {\n\n stmts.push(quote!(#name.as_mut_ptr().write(late.#name);));\n\n }\n\n }\n\n }\n\n\n\n if analysis.timer_queues.is_empty() {\n\n // cross-initialization barriers -- notify *other* cores that their resources have been\n", "file_path": "cortex-m-rtfm/macros/src/codegen/post_init.rs", "rank": 93, "score": 123473.53401999589 }, { "content": "pub fn codegen(\n\n spawner: Context,\n\n name: &Ident,\n\n app: &App,\n\n analysis: &Analysis,\n\n extra: &Extra,\n\n) -> TokenStream2 {\n\n let sender = spawner.core(app);\n\n let spawnee = &app.software_tasks[name];\n\n let priority = spawnee.args.priority;\n\n let receiver = spawnee.args.core;\n\n\n\n let write_instant = if app.uses_schedule(receiver) {\n\n let instants = util::instants_ident(name, sender);\n\n\n\n Some(quote!(\n\n #instants.get_unchecked_mut(usize::from(index)).as_mut_ptr().write(instant);\n\n ))\n\n } else {\n\n None\n", "file_path": "cortex-m-rtfm/macros/src/codegen/spawn_body.rs", "rank": 94, "score": 123473.53401999589 }, { "content": "/// Generates code that runs before `#[init]`\n\npub fn codegen(\n\n core: u8,\n\n app: &App,\n\n analysis: &Analysis,\n\n extra: &Extra,\n\n) -> (\n\n // `const_app_pre_init` -- `static` variables for barriers\n\n Vec<TokenStream2>,\n\n // `pre_init_stmts`\n\n Vec<TokenStream2>,\n\n) {\n\n let mut const_app = vec![];\n\n let mut stmts = vec![];\n\n\n\n // disable interrupts -- `init` must run with interrupts disabled\n\n stmts.push(quote!(rtfm::export::interrupt::disable();));\n\n\n\n // populate this core `FreeQueue`s\n\n for (name, senders) in &analysis.free_queues {\n\n let task = &app.software_tasks[name];\n", "file_path": "cortex-m-rtfm/macros/src/codegen/pre_init.rs", "rank": 95, "score": 123473.53401999589 }, { "content": "pub fn codegen(\n\n ctxt: Context,\n\n priority: u8,\n\n needs_lt: &mut bool,\n\n app: &App,\n\n analysis: &Analysis,\n\n) -> (TokenStream2, TokenStream2) {\n\n let mut lt = None;\n\n\n\n let resources = match ctxt {\n\n Context::Init(core) => &app.inits[&core].args.resources,\n\n Context::Idle(core) => &app.idles[&core].args.resources,\n\n Context::HardwareTask(name) => &app.hardware_tasks[name].args.resources,\n\n Context::SoftwareTask(name) => &app.software_tasks[name].args.resources,\n\n };\n\n\n\n let mut fields = vec![];\n\n let mut values = vec![];\n\n let mut has_cfgs = false;\n\n\n", "file_path": "cortex-m-rtfm/macros/src/codegen/resources_struct.rs", "rank": 96, "score": 123473.53401999589 }, { "content": "#[inline(always)]\n\npub fn assert_sync<T>()\n\nwhere\n\n T: Sync,\n\n{\n\n}\n\n\n", "file_path": "cortex-m-rtfm/src/export.rs", "rank": 97, "score": 122559.12692855814 }, { "content": "#[inline(always)]\n\npub fn assert_send<T>()\n\nwhere\n\n T: Send,\n\n{\n\n}\n\n\n", "file_path": "cortex-m-rtfm/src/export.rs", "rank": 98, "score": 122559.12692855814 }, { "content": "#[inline(always)]\n\npub fn assert_multicore<T>()\n\nwhere\n\n T: super::MultiCore,\n\n{\n\n}\n\n\n\n#[cfg(armv7m)]\n\n#[inline(always)]\n\npub unsafe fn lock<T, R>(\n\n ptr: *mut T,\n\n priority: &Priority,\n\n ceiling: u8,\n\n nvic_prio_bits: u8,\n\n f: impl FnOnce(&mut T) -> R,\n\n) -> R {\n\n let current = priority.get();\n\n\n\n if current < ceiling {\n\n if ceiling == (1 << nvic_prio_bits) {\n\n priority.set(u8::max_value());\n", "file_path": "cortex-m-rtfm/src/export.rs", "rank": 99, "score": 122559.12692855814 } ]
Rust
src/cache/cache.rs
ELCHILEN0/memcached
f0291bdddd050b2cb46014e1364a435f48ed7c50
use cache::key::Key; use cache::value::Value; use cache::data_entry::DataEntry; use cache::storage_structure::CacheStorageStructure; use cache::replacement_policy::CacheReplacementPolicy; use cache::error::CacheError; pub struct CacheMetrics { pub evictions: u64, pub hit_count_get: u64, pub hit_count_set: u64, pub hit_count_delete: u64, pub miss_count_get: u64, pub miss_count_set: u64, pub miss_count_delete: u64, } pub struct Cache<T, R> { pub capacity: usize, pub item_lifetime: u64, pub max_key_len: usize, pub max_val_len: usize, pub storage_structure: T, pub replacement_policy: R, pub metrics: CacheMetrics, } impl CacheMetrics { pub fn new() -> CacheMetrics { CacheMetrics { evictions: 0, hit_count_get: 0, hit_count_set: 0, hit_count_delete: 0, miss_count_get: 0, miss_count_set: 0, miss_count_delete: 0, } } } impl <T: CacheStorageStructure, R: CacheReplacementPolicy> Cache<T, R> { pub fn new(capacity: usize, storage_structure: T, replacement_policy: R) -> Cache<T, R> { Cache { capacity: capacity, item_lifetime: 60 * 1000, max_key_len: 256, max_val_len: 512, storage_structure: storage_structure, replacement_policy: replacement_policy, metrics: CacheMetrics::new() } } pub fn get(&mut self, key: Key) -> Option<DataEntry> { match self.storage_structure.get(key) { Some((index, entry)) => { self.replacement_policy.update(index); self.metrics.hit_count_get += 1; Some(entry) }, None => { self.metrics.miss_count_get += 1; None } } } pub fn set(&mut self, key: Key, value: Value) -> Result<(), CacheError> { let entry = DataEntry::new(key.clone(), value); let current_elem_size = match self.storage_structure.get(key) { Some((_, curr_entry)) => curr_entry.len(), None => 0, }; if current_elem_size == 0 { self.metrics.miss_count_set += 1; } else { self.metrics.hit_count_set += 1; } loop { if self.storage_structure.size() + entry.len() - current_elem_size <= self.capacity { break; } try!(self.evict_next()); self.metrics.evictions += 1; } let (index, _) = self.storage_structure.set(entry); self.replacement_policy.update(index); Ok(()) } pub fn remove(&mut self, key: Key) { match self.storage_structure.remove(key) { Some((index, _)) => { self.replacement_policy.remove(index); self.metrics.hit_count_delete += 1; }, None => { self.metrics.miss_count_delete += 1; }, }; } pub fn contains(&mut self, key: Key) -> bool { self.storage_structure.contains(key) } fn evict_next(&mut self) -> Result<(), CacheError> { match self.replacement_policy.evict_next() { Ok(evict_index) => { match self.storage_structure.remove_index(evict_index) { Some((_, _)) => Ok(()), None => Err(CacheError::EvictionFailure) } }, Err(err) => Err(err) } } }
use cache::key::Key; use cache::value::Value; use cache::data_entry::DataEntry; use cache::storage_structure::CacheStorageStructure; use cache::replacement_policy::CacheReplacementPolicy; use cache::error::CacheError; pub struct CacheMetrics { pub evictions: u64, pub hit_count_get: u64, pub hit_count_set: u64, pub hit_count_delete: u64, pub miss_count_get: u64, pub miss_count_set: u64, pub miss_count_delete: u64, } pub struct Cache<T, R> { pub capacity: usize, pub item_lifetime: u64, pub max_key_len: usize, pub max_val_len: usize, pub storage_structure: T, pub replacement_policy: R, pub metrics: CacheMetrics, } impl CacheMetrics { pub fn new() -> CacheMetrics { CacheMetrics { evictions: 0, hit_count_get: 0, hit_count_set: 0, hit_count_delete: 0, miss_count_get: 0, miss_count_set: 0, miss_count_delete: 0, } } } impl <T: CacheStorageStructure, R: CacheReplacementPolicy> Cache<T, R> { pub fn new(capacity: usize, storage_structure: T, replacement_policy: R) -> Cache<T, R> { Cache { capacity: capacity, item_lifetime: 60 * 1000, max_key_len: 256, max_val_len: 512, storage_structure: storage_structure, replacement_policy: replacement_policy, metrics: CacheMetrics::new() } }
pub fn set(&mut self, key: Key, value: Value) -> Result<(), CacheError> { let entry = DataEntry::new(key.clone(), value); let current_elem_size = match self.storage_structure.get(key) { Some((_, curr_entry)) => curr_entry.len(), None => 0, }; if current_elem_size == 0 { self.metrics.miss_count_set += 1; } else { self.metrics.hit_count_set += 1; } loop { if self.storage_structure.size() + entry.len() - current_elem_size <= self.capacity { break; } try!(self.evict_next()); self.metrics.evictions += 1; } let (index, _) = self.storage_structure.set(entry); self.replacement_policy.update(index); Ok(()) } pub fn remove(&mut self, key: Key) { match self.storage_structure.remove(key) { Some((index, _)) => { self.replacement_policy.remove(index); self.metrics.hit_count_delete += 1; }, None => { self.metrics.miss_count_delete += 1; }, }; } pub fn contains(&mut self, key: Key) -> bool { self.storage_structure.contains(key) } fn evict_next(&mut self) -> Result<(), CacheError> { match self.replacement_policy.evict_next() { Ok(evict_index) => { match self.storage_structure.remove_index(evict_index) { Some((_, _)) => Ok(()), None => Err(CacheError::EvictionFailure) } }, Err(err) => Err(err) } } }
pub fn get(&mut self, key: Key) -> Option<DataEntry> { match self.storage_structure.get(key) { Some((index, entry)) => { self.replacement_policy.update(index); self.metrics.hit_count_get += 1; Some(entry) }, None => { self.metrics.miss_count_get += 1; None } } }
function_block-full_function
[ { "content": "// TODO: This will eventually be removed once a client is implemented, for now this exists for the purposes of telnet\n\npub fn parse_command<T: CacheStorageStructure, R: CacheReplacementPolicy>(command: &str, cache: &mut Cache<T, R>) -> Option<MemPacket> {\n\n let mut iter = command.split_whitespace();\n\n\n\n let mut extra_bytes: Vec<u8> = Vec::new();\n\n let mut key_bytes: Vec<u8> = Vec::new();\n\n let mut value_bytes: Vec<u8> = Vec::new();\n\n\n\n let code: u8;\n\n // TODO: Add param length validation? probably not if we implement a client\n\n match iter.next() {\n\n Some(cmd) => {\n\n match cmd.to_uppercase().borrow() {\n\n \"GET\" => {\n\n code = 0x00;\n\n key_bytes = Vec::from(iter.next().unwrap().as_bytes());\n\n },\n\n \"SET\" => {\n\n code = 0x01;\n\n key_bytes = Vec::from(iter.next().unwrap().as_bytes());\n\n value_bytes = Vec::from(iter.next().unwrap().as_bytes()); \n", "file_path": "src/command.rs", "rank": 0, "score": 79729.8374158661 }, { "content": "pub fn set_command<T: CacheStorageStructure, R: CacheReplacementPolicy>(request: MemPacket, cache: &mut Cache<T, R>) -> Option<MemPacket> {\n\n println!(\"set_command\");\n\n\n\n let mut response = MemPacket::new(false);\n\n response.header.with_opcode(request.header.opcode);\n\n\n\n // TODO: Required\n\n // if !request.has_extras() || !request.has_key() {\n\n // response.header.with_status(0x0004);\n\n // return response;\n\n // }\n\n \n\n set(request, cache, &mut response);\n\n Some(response)\n\n}\n\n\n", "file_path": "src/commands/set.rs", "rank": 1, "score": 77438.88369521996 }, { "content": "pub fn add_command<T: CacheStorageStructure, R: CacheReplacementPolicy>(request: MemPacket, cache: &mut Cache<T, R>) -> Option<MemPacket> {\n\n println!(\"add_command\");\n\n\n\n let mut response = MemPacket::new(false);\n\n response.header.with_opcode(request.header.opcode);\n\n\n\n // TODO: Required\n\n // if !request.has_extras() || !request.has_key() {\n\n // response.header.with_status(0x0004);\n\n // return response;\n\n // }\n\n\n\n if cache.contains(Key::new(request.key.clone())) {\n\n response.header.with_status(0x0005);\n\n return Some(response);\n\n }\n\n\n\n set(request, cache, &mut response);\n\n Some(response)\n\n}\n\n\n", "file_path": "src/commands/set.rs", "rank": 2, "score": 77438.88369521995 }, { "content": "pub fn delete_command<T: CacheStorageStructure, R: CacheReplacementPolicy>(request: MemPacket, cache: &mut Cache<T, R>) -> Option<MemPacket> {\n\n println!(\"delete_command\");\n\n\n\n let mut response = MemPacket::new(false);\n\n response.header.with_opcode(request.header.opcode);\n\n\n\n if request.has_extras() || request.has_value() {\n\n response.header.with_status(0x0004);\n\n return Some(response);\n\n }\n\n \n\n cache.remove(Key::new(request.key));\n\n response.header.with_status(0x0000); \n\n Some(response)\n\n}", "file_path": "src/commands/delete.rs", "rank": 3, "score": 77438.88369521996 }, { "content": "pub fn get_command<T: CacheStorageStructure, R: CacheReplacementPolicy>(request: MemPacket, cache: &mut Cache<T, R>) -> Option<MemPacket> {\n\n println!(\"get_command\");\n\n\n\n let mut response = MemPacket::new(false);\n\n response.header.with_opcode(request.header.opcode);\n\n\n\n if request.has_extras() || request.has_value() {\n\n response.header.with_status(0x0004);\n\n return Some(response) ;\n\n }\n\n\n\n let key_bytes = request.key.clone().into_bytes();\n\n let extra_bytes = vec![0; 0];\n\n \n\n \n\n response.with_key(String::from_utf8_lossy(key_bytes.as_slice()).into_owned());\n\n response.with_extras(String::from_utf8_lossy(extra_bytes.as_slice()).into_owned());\n\n\n\n match cache.get(Key::new(request.key)) {\n\n Some(value) => {\n\n response.with_value(value.value.item.clone());\n\n },\n\n None => {\n\n response.header.with_status(0x0001);\n\n response.with_value(String::from(\"Not found\").clone());\n\n }\n\n };\n\n \n\n Some(response)\n\n}", "file_path": "src/commands/get.rs", "rank": 4, "score": 77438.88369521996 }, { "content": "pub fn replace_command<T: CacheStorageStructure, R: CacheReplacementPolicy>(request: MemPacket, cache: &mut Cache<T, R>) -> Option<MemPacket> {\n\n println!(\"replace_command\");\n\n\n\n let mut response = MemPacket::new(false);\n\n response.header.with_opcode(request.header.opcode);\n\n\n\n // TODO: Required\n\n // if !request.has_extras() || !request.has_key() {\n\n // response.header.with_status(0x0004);\n\n // return response;\n\n // }\n\n\n\n if !cache.contains(Key::new(request.key.clone())) {\n\n response.header.with_status(0x0005);\n\n return Some(response);\n\n }\n\n\n\n set(request, cache, &mut response);\n\n Some(response)\n\n}", "file_path": "src/commands/set.rs", "rank": 5, "score": 77438.88369521996 }, { "content": "fn handle_command<T: CacheStorageStructure, R: CacheReplacementPolicy>(packet: MemPacket, cache: &mut Cache<T, R>) -> Option<MemPacket> {\n\n println!(\"handle_command\");\n\n println!(\"{:?}\", packet.header);\n\n println!(\"{:?}\", packet);\n\n\n\n let mut response = MemPacket::new(false);\n\n\n\n if packet.header.magic != 0x80 {\n\n response.header.with_status(0x0084);\n\n return Some(response);\n\n }\n\n\n\n match packet.header.opcode {\n\n 0x00 => commands::get::get_command(packet, cache),\n\n 0x01 => commands::set::set_command(packet, cache),\n\n 0x02 => commands::set::add_command(packet, cache),\n\n 0x03 => commands::set::replace_command(packet, cache),\n\n 0x04 => commands::delete::delete_command(packet, cache),\n\n _ => {\n\n response.header.with_status(0x0081);\n\n Some(response) \n\n }\n\n }\n\n}\n\n\n", "file_path": "src/command.rs", "rank": 6, "score": 63829.07247521405 }, { "content": "fn set<T: CacheStorageStructure, R: CacheReplacementPolicy>(request: MemPacket, cache: &mut Cache<T, R>, response: &mut MemPacket) {\n\n // TODO: If the Data Version Check (CAS) is nonzero, the requested operation MUST only succeed if the item exists and has a CAS value identical to the provided value.\n\n \n\n match cache.set(Key::new(request.key), Value::new(request.value)) {\n\n Ok(val) => {\n\n response.header.with_status(0x0000);\n\n response.header.with_cas(0x0000000000000001);\n\n },\n\n Err(_) => {\n\n response.header.with_status(0x0084);\n\n }\n\n }\n\n \n\n}\n\n\n", "file_path": "src/commands/set.rs", "rank": 7, "score": 62156.30607442137 }, { "content": "pub trait CacheStorageStructure {\n\n fn new() -> Self;\n\n\n\n fn size(&self) -> usize;\n\n \n\n /**\n\n * Returns the index and entry if it exists\n\n */\n\n fn get(&mut self, key: Key) -> Option<(usize, DataEntry)>;\n\n fn get_index(&mut self, index: usize) -> Option<(usize, DataEntry)>;\n\n\n\n /**\n\n * Set a key, value pair and return the new index and the removed entry if it exists\n\n */\n\n fn set(&mut self, entry: DataEntry) -> (usize, Option<DataEntry>);\n\n fn set_index(&mut self, index: usize, entry: DataEntry) -> (usize, Option<DataEntry>);\n\n\n\n /**\n\n * Remove a key, value pair and return the old index and entry if it exists\n\n */\n", "file_path": "src/cache/storage_structure.rs", "rank": 8, "score": 54371.0986613978 }, { "content": "pub trait CacheReplacementPolicy {\n\n fn new() -> Self;\n\n fn update(&mut self, index: usize);\n\n fn remove(&mut self, index: usize);\n\n fn evict_next(&mut self) -> Result<usize, CacheError>;\n\n}\n\n\n\npub struct LRU {\n\n recently_used: VecDeque<usize>, \n\n}\n\n\n\npub struct Clock {\n\n hand: usize,\n\n referenced_list: Vec<bool>,\n\n}\n\n\n\npub struct LFU {\n\n frequency_list: Vec<usize>, // (index, hits)\n\n}\n\n\n", "file_path": "src/cache/replacement_policy.rs", "rank": 9, "score": 54371.0986613978 }, { "content": "fn main() {\n\n let listener = TcpListener::bind(\"127.0.0.1:4321\").unwrap();\n\n\n\n for stream in listener.incoming() {\n\n match stream {\n\n Ok(stream) => {\n\n println!(\"Established connection!\");\n\n handle_client(stream);\n\n }\n\n Err(e) => {\n\n panic!(\"Unable to establish connection: {}\", e);\n\n }\n\n }\n\n }\n\n}", "file_path": "src/main.rs", "rank": 10, "score": 41650.62441954296 }, { "content": "fn handle_client(mut stream: TcpStream) {\n\n let mut cache: Cache<_, _> = Cache::new(360, NaiveStorageStructure::new(), LFU::new());\n\n\n\n loop {\n\n let mut buffer = [0; 128];\n\n let len = stream.read(&mut buffer).unwrap();\n\n\n\n let string = match str::from_utf8(&buffer[0..len]) {\n\n Ok(s) => s,\n\n Err(e) => panic!(\"Invalid UTF-8 sequence: {}\", e)\n\n };\n\n println!(\"{}\", string);\n\n\n\n match command::parse_command(string, &mut cache) {\n\n Some(response) => {\n\n println!(\"{:?}\", response.header);\n\n println!(\"{:?}\", response);\n\n\n\n let _ = stream.write(response.bytes().as_slice());\n\n let _ = stream.write(b\"\\r\\n\");\n\n let _ = stream.flush();\n\n },\n\n None => {},\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 11, "score": 30517.744439712427 }, { "content": "#[derive(PartialEq, Eq, Hash, Debug, Clone)]\n\npub struct Value {\n\n // TODO: Vec<u8>\n\n pub item: String,\n\n pub cas: u64,\n\n}\n\n\n\nimpl Value {\n\n pub fn new(item: String) -> Value {\n\n Value { \n\n item: item, \n\n cas: 0 \n\n }\n\n }\n\n\n\n pub fn inc_cas(&mut self) {\n\n self.cas += 1;\n\n }\n\n\n\n pub fn len(&self) -> usize {\n\n self.item.len()\n\n }\n\n}", "file_path": "src/cache/value.rs", "rank": 19, "score": 18243.303153461085 }, { "content": "#[derive(PartialEq, Eq, Hash, Debug, Clone)]\n\npub struct Key {\n\n // TODO: Vec<u8>\n\n pub item: String,\n\n}\n\n\n\nimpl Key {\n\n pub fn new(item: String) -> Key {\n\n Key { item: item }\n\n }\n\n\n\n pub fn len(&self) -> usize {\n\n self.item.len()\n\n }\n\n}", "file_path": "src/cache/key.rs", "rank": 20, "score": 18241.94986036367 }, { "content": "pub enum CacheError {\n\n EvictionFailure,\n\n KeyNotFound,\n\n}", "file_path": "src/cache/error.rs", "rank": 21, "score": 18240.965396264455 }, { "content": "pub mod cache;\n\npub mod key;\n\npub mod value;\n\npub mod data_entry;\n\npub mod storage_structure;\n\npub mod replacement_policy;\n\npub mod error;", "file_path": "src/cache/mod.rs", "rank": 22, "score": 18238.609519269645 }, { "content": " let mut target_index: usize = 0;\n\n for iter_index in self.recently_used.iter() {\n\n if *iter_index == index {\n\n break;\n\n }\n\n target_index += 1;\n\n }\n\n self.recently_used.remove(target_index);\n\n }\n\n \n\n fn evict_next(&mut self) -> Result<usize, CacheError> {\n\n match self.recently_used.pop_front() {\n\n Some(index) => Ok(index),\n\n None => Err(CacheError::EvictionFailure)\n\n }\n\n }\n\n}\n\n\n\nimpl CacheReplacementPolicy for Clock {\n\n fn new() -> Self {\n", "file_path": "src/cache/replacement_policy.rs", "rank": 23, "score": 17275.827580714813 }, { "content": "use cache::key::Key;\n\nuse cache::value::Value;\n\n\n\n#[derive(PartialEq, Eq, Hash, Debug, Clone)]\n\npub struct DataEntry {\n\n pub key: Key,\n\n pub value: Value\n\n}\n\n\n\nimpl DataEntry {\n\n pub fn new(key: Key, value: Value) -> DataEntry {\n\n DataEntry { \n\n key: key,\n\n value: value\n\n }\n\n }\n\n\n\n pub fn len(&self) -> usize {\n\n self.key.len() + self.value.len()\n\n }\n\n}", "file_path": "src/cache/data_entry.rs", "rank": 24, "score": 17275.118566629753 }, { "content": "impl CacheReplacementPolicy for LRU {\n\n fn new() -> Self {\n\n LRU { \n\n recently_used: VecDeque::new()\n\n }\n\n }\n\n\n\n fn update(&mut self, index: usize) {\n\n let mut target_index: usize = 0;\n\n for iter_index in self.recently_used.iter() {\n\n if *iter_index == index {\n\n break;\n\n }\n\n target_index += 1;\n\n }\n\n self.recently_used.remove(target_index);\n\n self.recently_used.push_back(target_index);\n\n }\n\n\n\n fn remove(&mut self, index: usize) {\n", "file_path": "src/cache/replacement_policy.rs", "rank": 25, "score": 17273.993360751214 }, { "content": " None => false\n\n }\n\n }\n\n}\n\n\n\n// pub struct HashStorageStructure {\n\n// rid_map: HashMap<Key, usize>,\n\n// data: Vec<DataEntry>,\n\n// size: usize,\n\n// }\n\n\n\n// impl CacheStorageStructure for HashStorageStructure {\n\n// fn new() -> Self {\n\n// HashStorageStructure {\n\n// rid_map: HashMap::new(),\n\n// data: Vec::new(),\n\n// size: 0\n\n// }\n\n// } \n\n\n", "file_path": "src/cache/storage_structure.rs", "rank": 26, "score": 17273.29394431961 }, { "content": " fn remove(&mut self, key: Key) -> Option<(usize, DataEntry)>;\n\n fn remove_index(&mut self, index: usize) -> Option<(usize, DataEntry)>;\n\n\n\n fn contains(&mut self, key: Key) -> bool;\n\n}\n\n\n\n/**\n\n * A naive storage structure with O(n) insert, lookup, and delete.\n\n */\n\npub struct NaiveStorageStructure {\n\n data: Vec<DataEntry>,\n\n size: usize,\n\n}\n\n\n\nimpl CacheStorageStructure for NaiveStorageStructure {\n\n fn new() -> Self {\n\n NaiveStorageStructure {\n\n data: Vec::new(),\n\n size: 0,\n\n }\n", "file_path": "src/cache/storage_structure.rs", "rank": 27, "score": 17272.362035894337 }, { "content": "}\n\n\n\nimpl CacheReplacementPolicy for LFU {\n\n fn new() -> Self {\n\n LFU { \n\n frequency_list: Vec::new()\n\n }\n\n }\n\n\n\n fn update(&mut self, index: usize) {\n\n if index < self.frequency_list.len() {\n\n self.frequency_list[index] = self.frequency_list[index] + 1;\n\n } else {\n\n self.frequency_list.insert(index, 1);\n\n }\n\n }\n\n\n\n fn remove(&mut self, index: usize) {\n\n self.frequency_list.remove(index);\n\n }\n", "file_path": "src/cache/replacement_policy.rs", "rank": 28, "score": 17271.423578547092 }, { "content": " Clock {\n\n hand: 0,\n\n referenced_list: Vec::new(),\n\n }\n\n }\n\n\n\n fn update(&mut self, index: usize) {\n\n if index < self.referenced_list.len() {\n\n self.referenced_list[index] = true;\n\n } else {\n\n self.referenced_list.insert(index, true);\n\n }\n\n }\n\n\n\n fn remove(&mut self, index: usize) {\n\n self.referenced_list.remove(index);\n\n }\n\n \n\n fn evict_next(&mut self) -> Result<usize, CacheError> { \n\n 'outer: loop {\n", "file_path": "src/cache/replacement_policy.rs", "rank": 29, "score": 17271.030048630815 }, { "content": " \n\n fn evict_next(&mut self) -> Result<usize, CacheError> {\n\n if self.frequency_list.len() == 0 {\n\n return Err(CacheError::EvictionFailure);\n\n }\n\n\n\n let mut index = 0;\n\n let mut target_index = 0;\n\n for frequency in self.frequency_list.iter() {\n\n if *frequency < self.frequency_list[target_index] {\n\n target_index = index;\n\n }\n\n\n\n index += 1;\n\n }\n\n\n\n self.frequency_list.remove(target_index);\n\n Ok(target_index)\n\n }\n\n}", "file_path": "src/cache/replacement_policy.rs", "rank": 30, "score": 17269.662542777296 }, { "content": "use std::collections::{VecDeque};\n\n\n\nuse cache::error::CacheError;\n\n\n", "file_path": "src/cache/replacement_policy.rs", "rank": 31, "score": 17269.35272848408 }, { "content": "// fn size(&self) -> usize {\n\n// self.size\n\n// }\n\n\n\n// fn get(&mut self, key: Key) -> Option<DataEntry> {\n\n// match self.rid_map.get(&key).cloned() {\n\n// Some(index) => Some(self.data[index].clone()),\n\n// None => None\n\n// }\n\n// }\n\n\n\n// fn set(&mut self, key: Key, value: Value) -> Result<usize, CacheError> {\n\n// self.size += value.len(); \n\n \n\n// let new = DataEntry::new(key.clone(), value);\n\n// match self.rid_map.get(&key).cloned() {\n\n// Some(index) => {\n\n// self.data.remove(index);\n\n// self.data.insert(index, new);\n\n// Ok(index)\n", "file_path": "src/cache/storage_structure.rs", "rank": 32, "score": 17269.23804787813 }, { "content": "use cache::key::Key;\n\nuse cache::data_entry::DataEntry;\n\n\n\n/**\n\n * With the current layout there must be two highly associated data sructures for maintaining the\n\n * cache. The storage_structure maintains the data while the replacement_policy maintains indexed\n\n * data regarding the state of the entries in the cache. With some replacment policies it is\n\n * possible to obtain better storage space by removing the dependency on the replacement policy\n\n * structure (e.g. LRU reorders structure entries on insert and the policy simply returns 0).\n\n * However, this tends to make the dependencies between the two traits tricky to manage and for\n\n * more practical replacement policies the extra overhead is often needed regardless. Also keep in\n\n * mind that for different types of data structures the indexing scheme might change and may not\n\n * be able to support a reordering replacement policy.\n\n */\n", "file_path": "src/cache/storage_structure.rs", "rank": 33, "score": 17267.625989936063 }, { "content": "// fn move_entry(&mut self, key: Key, index: usize) -> Result<(), CacheError> {\n\n// match self.rid_map.get(&key).cloned() {\n\n// Some(index) => {\n\n// let removed = self.data.remove(index);\n\n// self.data.insert(index, removed);\n\n// Ok(())\n\n// },\n\n// None => Err(CacheError::KeyNotFound)\n\n// }\n\n// }\n\n// }", "file_path": "src/cache/storage_structure.rs", "rank": 34, "score": 17267.62673440864 }, { "content": " if self.referenced_list.len() == 0 {\n\n return Err(CacheError::EvictionFailure);\n\n }\n\n\n\n for value in self.referenced_list.iter_mut().skip(self.hand) {\n\n self.hand += 1;\n\n\n\n if *value {\n\n *value = false;\n\n } else {\n\n break 'outer;\n\n }\n\n }\n\n\n\n self.hand = 0;\n\n }\n\n\n\n self.referenced_list.remove(self.hand - 1);\n\n Ok(self.hand - 1)\n\n }\n", "file_path": "src/cache/replacement_policy.rs", "rank": 35, "score": 17267.3877683558 }, { "content": "// self.size -= removed.len();\n\n\n\n// Ok(removed)\n\n// },\n\n// None => Err(CacheError::KeyNotFound),\n\n// }\n\n// }\n\n\n\n// fn contains(&mut self, key: Key) -> bool {\n\n// self.rid_map.contains_key(&key)\n\n// }\n\n\n\n// fn get_index(&mut self, key: Key) -> Option<usize> {\n\n// self.rid_map.get(&key).cloned()\n\n// }\n\n\n\n// fn get_with_index(&mut self, index: usize) -> Option<DataEntry> {\n\n// self.data.get(index).cloned()\n\n// }\n\n\n", "file_path": "src/cache/storage_structure.rs", "rank": 36, "score": 17267.255108203863 }, { "content": "// },\n\n// None => {\n\n// self.data.push(new);\n\n// self.rid_map.insert(key, self.data.len() - 1);\n\n// Ok(self.data.len() - 1)\n\n// },\n\n// }\n\n// }\n\n\n\n// fn remove(&mut self, key: Key) -> Result<DataEntry, CacheError> {\n\n// match self.rid_map.get(&key).cloned() {\n\n// Some(index) => {\n\n// // TODO: Figure out more efficient way\n\n// let removed = self.data.remove(index);\n\n// let start_index = self.rid_map.get(&key.clone()).cloned().unwrap();\n\n// for k in self.data.iter_mut().skip(start_index) {\n\n// let new_index = self.rid_map.get(&k.key).unwrap() - 1;\n\n// self.rid_map.insert(k.key.clone(), new_index);\n\n// }\n\n// self.rid_map.remove(&key);\n", "file_path": "src/cache/storage_structure.rs", "rank": 37, "score": 17267.06646117448 }, { "content": " }\n\n\n\n fn size(&self) -> usize {\n\n self.size\n\n }\n\n\n\n fn get(&mut self, key: Key) -> Option<(usize, DataEntry)> {\n\n let mut index: usize = 0;\n\n for entry in self.data.clone().into_iter() {\n\n if entry.key == key {\n\n return self.get_index(index);\n\n }\n\n index += 1;\n\n }\n\n\n\n None\n\n }\n\n\n\n fn get_index(&mut self, index: usize) -> Option<(usize, DataEntry)> {\n\n match self.data.get(index) {\n", "file_path": "src/cache/storage_structure.rs", "rank": 38, "score": 17266.640812754402 }, { "content": " fn set_index(&mut self, index: usize, entry: DataEntry) -> (usize, Option<DataEntry>) {\n\n unimplemented!()\n\n } \n\n\n\n fn remove(&mut self, key: Key) -> Option<(usize, DataEntry)> {\n\n match self.get(key) {\n\n Some((index, _)) => self.remove_index(index),\n\n None => None\n\n }\n\n }\n\n\n\n fn remove_index(&mut self, index: usize) -> Option<(usize, DataEntry)> {\n\n let removed = self.data.remove(index);\n\n self.size -= removed.len();\n\n Some((index, removed.clone()))\n\n }\n\n\n\n fn contains(&mut self, key: Key) -> bool {\n\n match self.get(key) {\n\n Some(_) => true,\n", "file_path": "src/cache/storage_structure.rs", "rank": 39, "score": 17266.445396403953 }, { "content": " Some(entry) => Some((index, entry.clone())),\n\n None => None\n\n }\n\n } \n\n\n\n fn set(&mut self, entry: DataEntry) -> (usize, Option<DataEntry>) {\n\n self.size += entry.len(); \n\n\n\n match self.remove(entry.key.clone()) {\n\n Some((index, old_entry)) => {\n\n self.data.insert(index, entry);\n\n (index, Some(old_entry))\n\n },\n\n None => {\n\n self.data.push(entry);\n\n (self.data.len() - 1, None)\n\n }\n\n }\n\n }\n\n\n", "file_path": "src/cache/storage_structure.rs", "rank": 40, "score": 17265.360906793867 }, { "content": "use std::fmt;\n\n// TODO: String -> &str to have better cache locality\n\n\n\npub struct MemPacket {\n\n pub header: MemHeader,\n\n pub extras: String,\n\n pub key: String,\n\n pub value: String,\n\n}\n\n\n\nimpl MemPacket {\n\n pub fn from_bytes(bytes: &[u8]) -> MemPacket {\n\n let e_len: u8 = bytes[4];\n\n let k_len: u16;\n\n let v_len: u32;\n\n\n\n k_len = ((bytes[2] as u16) << 8) + ((bytes[2] as u16) << 0);\n\n v_len = ((bytes[8] as u32) << 24) + ((bytes[9] as u32) << 16) + ((bytes[10] as u32) << 8) + ((bytes[0] as u32) << 0);\n\n\n\n let e_start: usize = 24;\n", "file_path": "src/packet.rs", "rank": 41, "score": 8.18763754029986 }, { "content": " opaque: ((bytes[12] as u32) << 24)\n\n + ((bytes[13] as u32) << 16) \n\n + ((bytes[14] as u32) << 8) \n\n + ((bytes[15] as u32) << 0),\n\n cas: ((bytes[16] as u64) << 56) \n\n + ((bytes[17] as u64) << 48) \n\n + ((bytes[17] as u64) << 40) \n\n + ((bytes[19] as u64) << 32)\n\n + ((bytes[20] as u64) << 24) \n\n + ((bytes[21] as u64) << 16) \n\n + ((bytes[22] as u64) << 8) \n\n + ((bytes[23] as u64) << 0)\n\n },\n\n extras: String::from_utf8_lossy(if e_len > 0 { &bytes[e_start..e_end] } else { &[] }).into_owned(),\n\n key: String::from_utf8_lossy(if k_len > 0 { &bytes[k_start..k_end] } else { &[] }).into_owned(),\n\n value: String::from_utf8_lossy(if (v_len as usize - e_len as usize - k_len as usize) > 0 { &bytes[v_start..v_end] } else { &[] }).into_owned(),\n\n }\n\n }\n\n\n\n pub fn new(request: bool) -> MemPacket {\n", "file_path": "src/packet.rs", "rank": 42, "score": 7.953890080862015 }, { "content": "pub struct MemHeader {\n\n pub magic: u8,\n\n pub opcode: u8,\n\n pub key_length: u16,\n\n pub extras_length: u8,\n\n pub data_type: u8,\n\n pub status: u16,\n\n pub total_body_length: u32,\n\n pub opaque: u32,\n\n pub cas: u64,\n\n}\n\n\n\nimpl MemHeader {\n\n pub fn new(request: bool) -> MemHeader {\n\n MemHeader {\n\n magic: if request { 0x80 } else { 0x81 },\n\n opcode: 0x00,\n\n key_length: 0x0000,\n\n extras_length: 0x00,\n\n data_type: 0x00,\n", "file_path": "src/packet.rs", "rank": 43, "score": 7.196972710660244 }, { "content": "use packet::MemPacket;\n\n\n\nuse cache::cache::Cache;\n\nuse cache::key::Key;\n\nuse cache::value::Value;\n\nuse cache::storage_structure::CacheStorageStructure;\n\nuse cache::replacement_policy::CacheReplacementPolicy;\n\n\n", "file_path": "src/commands/set.rs", "rank": 44, "score": 6.176335653663576 }, { "content": "use std::borrow::Borrow;\n\nuse packet::MemPacket;\n\n\n\nuse cache::cache::Cache;\n\nuse cache::storage_structure::CacheStorageStructure;\n\nuse cache::replacement_policy::CacheReplacementPolicy;\n\n\n\nuse commands;\n\n\n", "file_path": "src/command.rs", "rank": 45, "score": 6.133023782686836 }, { "content": "use packet::MemPacket;\n\n\n\nuse cache::cache::Cache;\n\nuse cache::key::Key;\n\nuse cache::storage_structure::CacheStorageStructure;\n\nuse cache::replacement_policy::CacheReplacementPolicy;\n\n\n", "file_path": "src/commands/get.rs", "rank": 46, "score": 6.1300111087597084 }, { "content": "use packet::MemPacket;\n\n\n\nuse cache::cache::Cache;\n\nuse cache::key::Key;\n\nuse cache::storage_structure::CacheStorageStructure;\n\nuse cache::replacement_policy::CacheReplacementPolicy;\n\n\n", "file_path": "src/commands/delete.rs", "rank": 47, "score": 6.1300111087597084 }, { "content": "use std::str;\n\nuse std::io::prelude::*;\n\nuse std::net::{TcpListener, TcpStream};\n\n\n\nextern crate linked_hash_map;\n\n\n\nuse packet::MemPacket;\n\nmod packet;\n\n\n\nuse cache::cache::Cache;\n\nuse cache::storage_structure::{CacheStorageStructure, NaiveStorageStructure};\n\nuse cache::replacement_policy::{CacheReplacementPolicy, LRU, Clock, LFU};\n\nmod cache;\n\n\n\nmod command;\n\nmod commands;\n\n\n", "file_path": "src/main.rs", "rank": 48, "score": 5.794214368259504 }, { "content": " }\n\n\n\n pub fn with_extras_len(&mut self, extras_length: u8) -> &mut MemHeader {\n\n self.extras_length = extras_length;\n\n self\n\n }\n\n\n\n pub fn with_value_len(&mut self, value_length: u32) -> &mut MemHeader {\n\n self.total_body_length = value_length + (self.key_length as u32) + (self.extras_length as u32);\n\n self\n\n }\n\n\n\n pub fn with_cas(&mut self, cas: u64) -> &mut MemHeader {\n\n self.cas = cas;\n\n self\n\n }\n\n\n\n pub fn bytes(&self) -> Vec<u8> {\n\n let mut out = Vec::with_capacity(24);\n\n\n", "file_path": "src/packet.rs", "rank": 49, "score": 5.695122356463367 }, { "content": " MemPacket {\n\n header: MemHeader::new(request),\n\n key: String::new(),\n\n extras: String::new(),\n\n value: String::new(),\n\n }\n\n }\n\n\n\n pub fn with_key(&mut self, key: String) -> &mut MemPacket {\n\n self.header.with_key_len(key.len() as u16); \n\n self.key = key;\n\n self\n\n }\n\n\n\n pub fn with_extras(&mut self, extras: String) -> &mut MemPacket {\n\n self.header.with_extras_len(extras.len() as u8); \n\n self.extras = extras;\n\n self\n\n }\n\n\n", "file_path": "src/packet.rs", "rank": 50, "score": 4.750262342070247 }, { "content": " pub fn with_value(&mut self, value: String) -> &mut MemPacket {\n\n self.header.with_value_len(value.len() as u32);\n\n self.value = value;\n\n self\n\n }\n\n\n\n pub fn has_key(&self) -> bool {\n\n self.header.key_length > 0 && self.key.len() > 0\n\n }\n\n\n\n pub fn has_extras(&self) -> bool {\n\n self.header.extras_length > 0 && self.extras.len() > 0\n\n }\n\n\n\n pub fn has_value(&self) -> bool {\n\n self.header.total_body_length - self.header.key_length as u32 - self.header.extras_length as u32 > 0 && self.value.len() > 0\n\n }\n\n\n\n pub fn bytes(&self) -> Vec<u8> {\n\n let mut out = Vec::new();\n", "file_path": "src/packet.rs", "rank": 51, "score": 4.082600343456473 }, { "content": "\n\n out.extend(self.header.bytes());\n\n out.extend(self.extras.bytes());\n\n out.extend(self.key.bytes());\n\n out.extend(self.value.bytes());\n\n\n\n return out;\n\n }\n\n}\n\n\n\nimpl fmt::Debug for MemPacket {\n\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n\n fmt.debug_struct(\"MemPacket\")\n\n .field(\"extras\", &self.extras)\n\n .field(\"key\", &self.key)\n\n .field(\"value\", &self.value)\n\n .finish()\n\n }\n\n}\n\n\n", "file_path": "src/packet.rs", "rank": 52, "score": 3.463045555011283 }, { "content": " code = 0x1a;\n\n // TODO: \n\n },\n\n _ => {\n\n code = 0xFF;\n\n }\n\n }\n\n }\n\n None => {\n\n let mut response = MemPacket::new(false);\n\n response.header.with_status(0x0084);\n\n\n\n return Some(response);\n\n }\n\n }\n\n\n\n let mut request = MemPacket::new(true);\n\n request.header.with_opcode(code);\n\n request.with_key(String::from_utf8_lossy(key_bytes.as_slice()).into_owned()); \n\n request.with_extras(String::from_utf8_lossy(extra_bytes.as_slice()).into_owned());\n\n request.with_value(String::from_utf8_lossy(value_bytes.as_slice()).into_owned());\n\n\n\n handle_command(request, cache)\n\n}", "file_path": "src/command.rs", "rank": 53, "score": 3.168954541003968 }, { "content": "pub mod get;\n\npub mod set;\n\npub mod delete;", "file_path": "src/commands/mod.rs", "rank": 54, "score": 3.079822772074699 }, { "content": " let e_end: usize = e_start + (e_len as usize);\n\n\n\n let k_start: usize = e_end + if e_len > 0 { 1 } else { 0 };\n\n let k_end: usize = k_start + (k_len as usize);\n\n\n\n let v_start: usize = k_end + if k_len > 0 { 1 } else { 0 };\n\n let v_end: usize = v_start + (v_len as usize) - (k_len as usize) - (e_len as usize);\n\n\n\n assert!(v_len == bytes.len() as u32 - 24);\n\n\n\n MemPacket {\n\n header: MemHeader {\n\n magic: bytes[0],\n\n opcode: bytes[1],\n\n key_length: k_len,\n\n extras_length: e_len,\n\n data_type: bytes[5],\n\n status: ((bytes[6] as u16) << 8) \n\n + ((bytes[7] as u16) << 0),\n\n total_body_length: v_len,\n", "file_path": "src/packet.rs", "rank": 55, "score": 3.0318053443874553 }, { "content": " status: 0x0000,\n\n total_body_length: 0x00000000,\n\n opaque: 0x00000000,\n\n cas: 0x0000000000000000\n\n }\n\n }\n\n\n\n pub fn with_opcode(&mut self, opcode: u8) -> &mut MemHeader {\n\n self.opcode = opcode;\n\n self\n\n }\n\n\n\n pub fn with_status(&mut self, status: u16) -> &mut MemHeader {\n\n self.status = status;\n\n self\n\n }\n\n\n\n pub fn with_key_len(&mut self, key_length: u16) -> &mut MemHeader {\n\n self.key_length = key_length;\n\n self\n", "file_path": "src/packet.rs", "rank": 56, "score": 2.4818186806769105 }, { "content": "\n\n out.push(((self.cas >> 56) & 0xFF) as u8);\n\n out.push(((self.cas >> 48) & 0xFF) as u8);\n\n out.push(((self.cas >> 40) & 0xFF) as u8);\n\n out.push(((self.cas >> 32) & 0xFF) as u8);\n\n out.push(((self.cas >> 24) & 0xFF) as u8);\n\n out.push(((self.cas >> 16) & 0xFF) as u8);\n\n out.push(((self.cas >> 8) & 0xFF) as u8);\n\n out.push(((self.cas >> 0) & 0xFF) as u8);\n\n\n\n return out;\n\n }\n\n}\n\n\n\nimpl fmt::Debug for MemHeader {\n\n fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result {\n\n fmt.debug_struct(\"MemHeader\")\n\n .field(\"magic\", &self.magic)\n\n .field(\"opcode\", &self.opcode)\n\n .field(\"key_length\", &self.key_length)\n\n .field(\"extras_length\", &self.extras_length)\n\n .field(\"data_type\", &self.data_type)\n\n .field(\"status\", &self.status)\n\n .field(\"total_body_length\", &self.total_body_length)\n\n .field(\"opaque\", &self.opaque)\n\n .field(\"cas\", &self.cas)\n\n .finish()\n\n }\n\n}", "file_path": "src/packet.rs", "rank": 57, "score": 2.2814261410843577 } ]
Rust
generator/src/block_data.rs
Redrield/feather
8614692f8e0a24979853e29d41567b5a82249831
use super::WriteExt; use byteorder::{LittleEndian, WriteBytesExt}; use failure::Error; use indexmap::IndexMap; use std::collections::HashMap; use std::fs::File; use std::io::{BufReader, BufWriter, Write}; pub const DEFAULT_STATE_ID: u16 = 1; #[derive(Clone, Debug, Deserialize, Deref, DerefMut)] pub struct BlockReport { #[serde(flatten)] pub blocks: IndexMap<String, Block>, } #[derive(Clone, Debug, Deserialize)] pub struct Block { pub states: Vec<State>, pub properties: Option<BlockProperties>, } #[derive(Clone, Debug, Deserialize, Deref, DerefMut)] pub struct BlockProperties { #[serde(flatten)] pub props: HashMap<String, Vec<String>>, } #[derive(Clone, Debug, Deserialize)] pub struct State { pub id: u16, #[serde(default)] pub default: bool, pub properties: Option<StateProperties>, } #[derive(Clone, Debug, Deserialize, Deref, DerefMut, Default)] pub struct StateProperties { #[serde(flatten)] pub props: HashMap<String, String>, } pub fn generate_mappings_file( input: &str, output: &str, native_input: &str, proto: u32, version: &str, ) -> Result<(), Error> { info!( "Generating mappings file {} using input report {} and native report {}", output, input, native_input ); let in_file = File::open(input)?; let out_file = File::create(output)?; let native_file = File::open(native_input)?; info!("Parsing data files"); let report: BlockReport = serde_json::from_reader(BufReader::new(&in_file))?; let native_report: BlockReport = serde_json::from_reader(BufReader::new(&native_file))?; info!("Parsing successful"); let mut out = BufWriter::new(&out_file); write_header(&mut out, version, proto, false)?; let mut state_bufs = vec![]; for (string_id, block) in &native_report.blocks { for state in &block.states { let mut state_buf = vec![]; let props = state.properties.clone().unwrap_or_default(); let props = props.props; let state_id = find_state_in_report(&report, string_id.as_str(), &props) .unwrap_or(DEFAULT_STATE_ID); state_buf.write_u16::<LittleEndian>(state.id)?; state_buf.write_u16::<LittleEndian>(state_id)?; state_bufs.push(state_buf); } } out.write_u32::<LittleEndian>(state_bufs.len() as u32)?; for buf in state_bufs { out.write_all(&buf)?; } out.flush()?; info!("Mappings file generated successfully"); Ok(()) } pub fn generate_native_mappings_file( input: &str, output: &str, proto: u32, version: &str, ) -> Result<(), Error> { info!( "Generating native mappings file {} using input report {}", output, input ); let in_file = File::open(input)?; let out_file = File::create(output)?; info!("Parsing data file"); let report: BlockReport = serde_json::from_reader(BufReader::new(&in_file))?; info!("Parsing successful"); let mut out = BufWriter::new(&out_file); write_header(&mut out, version, proto, true)?; let mut count = 0; let mut buf = vec![]; for (block_name, block) in &report.blocks { for state in &block.states { buf.write_string(block_name.as_str())?; let len = { if let Some(props) = state.properties.as_ref() { props.props.len() } else { 0 } }; buf.write_u32::<LittleEndian>(len as u32)?; if let Some(props) = state.properties.as_ref() { for (name, value) in &props.props { buf.write_string(name.as_str())?; buf.write_string(value.as_str())?; } } buf.write_u16::<LittleEndian>(state.id)?; count += 1; } } out.write_u32::<LittleEndian>(count)?; out.write_all(&buf)?; info!("Mappings file generated successfully"); Ok(()) } fn find_state_in_report( report: &BlockReport, name: &str, props: &HashMap<String, String>, ) -> Option<u16> { let block = report.blocks.get(name)?; let state = block.states.iter().find(|state| match &state.properties { None => props.is_empty(), Some(state_props) => props == &state_props.props, })?; Some(state.id) } fn write_header<W: Write>( out: &mut W, version: &str, proto: u32, native: bool, ) -> Result<(), Error> { out.write_all(b"FEATHER_BLOCK_DATA_FILE")?; out.write_string(version)?; out.write_u32::<LittleEndian>(proto)?; out.write_u8(native as u8)?; Ok(()) }
use super::WriteExt; use byteorder::{LittleEndian, WriteBytesExt}; use failure::Error; use indexmap::IndexMap; use std::collections::HashMap; use std::fs::File; use std::io::{BufReader, BufWriter, Write}; pub const DEFAULT_STATE_ID: u16 = 1; #[derive(Clone, Debug, Deserialize, Deref, DerefMut)] pub struct BlockReport { #[serde(flatten)] pub blocks: IndexMap<String, Block>, } #[derive(Clone, Debug, Deserialize)] pub struct Block { pub states: Vec<State>, pub properties: Option<BlockProperties>, } #[derive(Clone, Debug, Deserialize, Deref, DerefMut)] pub struct BlockProperties { #[serde(flatten)] pub props: HashMap<String, Vec<String>>, } #[derive(Clone, Debug, Deserialize)] pub struct State { pub id: u16, #[serde(default)] pub default: bool, pub properties: Option<StateProperties>, } #[derive(Clone, Debug, Deserialize, Deref, DerefMut, Default)] pub struct StateProperties { #[serde(flatten)] pub props: HashMap<String, String>, } pub fn generate_mappings_file( input: &str, output: &str, native_input: &str, proto: u32, version: &str, ) -> Result<(), Error> { info!( "Generating mappings file {} using input report {} and native report {}", output, input, native_input ); let in_file = File::open(input)?; let out_file = File::create(output)?; let native_file = File::open(native_input)?; info!("Parsing data files"); let report: BlockReport = serde_json::from_reader(BufReader::new(&in_file))?; let native_report: BlockReport = serde_json::from_reader(BufReader::new(&native_file))?; info!("Parsing successful"); let mut out = BufWriter::new(&out_file); write_header(&mut out, version, proto, false)?; let mut state_bufs = vec![]; for (string_id, block) in &native_report.blocks { for state in &block.states { let mut state_buf = vec![]; let props = state.properties.clone().unwrap_or_default(); let props = props.props; let state_id = find_state_in_report(&report, string_id.as_str(), &props) .unwrap_or(DEFAULT_STATE_ID); state_buf.write_u16::<LittleEndian>(state.id)?; state_buf.write_u16::<LittleEndian>(state_id)?; state_bufs.push(state_buf); } } out.write_u32::<LittleEndian>(state_bufs.len() as u32)?; for buf in state_bufs { out.write_all(&buf)?; } out.flush()?; info!("Mappings file generated successfully"); Ok(()) } pub fn generate_native_mappings_file( input: &str, output: &str, proto: u32, version: &str, ) -> Result<(), Error> { info!( "Generating native mappings file {} using input report {}", output, input ); let in_file = File::open(input)?; let out_file = File::create(output)?;
fn find_state_in_report( report: &BlockReport, name: &str, props: &HashMap<String, String>, ) -> Option<u16> { let block = report.blocks.get(name)?; let state = block.states.iter().find(|state| match &state.properties { None => props.is_empty(), Some(state_props) => props == &state_props.props, })?; Some(state.id) } fn write_header<W: Write>( out: &mut W, version: &str, proto: u32, native: bool, ) -> Result<(), Error> { out.write_all(b"FEATHER_BLOCK_DATA_FILE")?; out.write_string(version)?; out.write_u32::<LittleEndian>(proto)?; out.write_u8(native as u8)?; Ok(()) }
info!("Parsing data file"); let report: BlockReport = serde_json::from_reader(BufReader::new(&in_file))?; info!("Parsing successful"); let mut out = BufWriter::new(&out_file); write_header(&mut out, version, proto, true)?; let mut count = 0; let mut buf = vec![]; for (block_name, block) in &report.blocks { for state in &block.states { buf.write_string(block_name.as_str())?; let len = { if let Some(props) = state.properties.as_ref() { props.props.len() } else { 0 } }; buf.write_u32::<LittleEndian>(len as u32)?; if let Some(props) = state.properties.as_ref() { for (name, value) in &props.props { buf.write_string(name.as_str())?; buf.write_string(value.as_str())?; } } buf.write_u16::<LittleEndian>(state.id)?; count += 1; } } out.write_u32::<LittleEndian>(count)?; out.write_all(&buf)?; info!("Mappings file generated successfully"); Ok(()) }
function_block-function_prefix_line
[ { "content": "pub fn generate_mappings_file(input: &str, output: &str) -> Result<(), Error> {\n\n info!(\"Parsing data file\");\n\n let report = load_report(input)?;\n\n info!(\"Data file parsed successfully\");\n\n\n\n info!(\"Generating mappings file {}\", output);\n\n\n\n let buf = mappings::generate_mappings_file(report, true)?;\n\n let mut file = File::create(output)?;\n\n file.write_all(&buf)?;\n\n\n\n info!(\"Success\");\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "generator/src/item/mod.rs", "rank": 0, "score": 536482.0885012661 }, { "content": "pub fn generate_rust(input: &str, output: &str) -> Result<(), Error> {\n\n let report = load_report(input)?;\n\n\n\n let mut enum_variants = vec![];\n\n let mut to_protocol_id_match_arms = vec![];\n\n let mut from_protocol_id_match_arms = vec![];\n\n let mut to_identifier_match_arms = vec![];\n\n let mut from_identifier_match_arms = vec![];\n\n\n\n // These biomes don't exist in 1.13.2, only in 1.14.\n\n let exclude = [\"minecraft:bamboo_jungle\", \"minecraft:bamboo_jungle_hills\"];\n\n\n\n for (name, biome) in report.biomes {\n\n if exclude.iter().any(|e| e == &name) {\n\n continue;\n\n }\n\n let protocol_id = biome.protocol_id;\n\n\n\n let ident = Ident::new(&name[10..].to_camel_case(), Span::call_site());\n\n\n", "file_path": "generator/src/biome.rs", "rank": 1, "score": 478118.8390997624 }, { "content": "pub fn generate_rust(input: &str, output: &str) -> Result<(), Error> {\n\n info!(\"Parsing data file\");\n\n let report = load_report(input)?;\n\n info!(\"Data file parsed successfully\");\n\n\n\n info!(\"Generating Rust code\");\n\n let buf = rust::generate_rust(report)?;\n\n let mut file = File::create(output)?;\n\n file.write_all(buf.as_bytes())?;\n\n info!(\"Generated code\");\n\n\n\n info!(\"Formatting code with rustfmt\");\n\n Command::new(\"rustfmt\").arg(output).output()?;\n\n info!(\"Success\");\n\n\n\n Ok(())\n\n}\n", "file_path": "generator/src/item/mod.rs", "rank": 2, "score": 471166.32398454985 }, { "content": "pub fn generate_rust_code(input: &str, output: &str) -> Result<(), Error> {\n\n info!(\n\n \"Writing Rust `Block` enum and data structs to {} using native input report {}\",\n\n output, input,\n\n );\n\n\n\n let in_file = File::open(input)?;\n\n let mut out_file = File::create(output)?;\n\n\n\n info!(\"Parsing data file\");\n\n let report: BlockReport = serde_json::from_reader(BufReader::new(&in_file))?;\n\n info!(\"Parsing successful\");\n\n\n\n let mut enum_entries = vec![];\n\n //let mut name_fn_entries = vec![];\n\n //let mut from_name_and_props_fn_entries = vec![];\n\n let mut data_structs = vec![];\n\n let mut property_enums = vec![];\n\n\n\n let mut native_type_id_entries = vec![];\n", "file_path": "generator/src/rust.rs", "rank": 3, "score": 471166.32398454985 }, { "content": "/// Given a block report and an item report, generates\n\n/// mappings from items to blocks and writes them to\n\n/// the file with the given path.\n\npub fn generate_mappings(blocks: &str, items: &str, output_path: &str) -> Result<(), Error> {\n\n let blocks = {\n\n let mut file = File::open(blocks)?;\n\n let mut string = String::new();\n\n file.read_to_string(&mut string)?;\n\n serde_json::from_str(&string)?\n\n };\n\n\n\n let items = {\n\n let mut file = File::open(items)?;\n\n let mut string = String::new();\n\n file.read_to_string(&mut string)?;\n\n serde_json::from_str(&string)?\n\n };\n\n\n\n let mut output = File::create(output_path)?;\n\n\n\n _internal_generate_mappings(&blocks, &items, &mut output)?;\n\n\n\n Command::new(\"rustfmt\").arg(output_path).output()?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "generator/src/item_to_block/mod.rs", "rank": 4, "score": 434493.1328708237 }, { "content": "fn load_block_ids(block: &BlockDefinition) -> Vec<(Vec<(String, String)>, u16)> {\n\n let mut res: Vec<(Vec<(String, String)>, u16)> = vec![];\n\n\n\n for state in &block.states {\n\n let properties = state.properties.clone().into_iter().collect();\n\n\n\n res.push((properties, state.id));\n\n }\n\n\n\n res\n\n}\n\n\n", "file_path": "core/blocks/generator/src/load.rs", "rank": 5, "score": 409360.5472309775 }, { "content": "pub fn generate_rust(report: ItemReport) -> Result<String, Error> {\n\n let mut enum_variants = vec![];\n\n let mut from_identifier_arms = vec![];\n\n let mut to_identifier_arms = vec![];\n\n\n\n for (identifier, _) in report.mappings {\n\n let variant_name = ident(&variant_name(&identifier));\n\n enum_variants.push(quote! {\n\n #variant_name\n\n });\n\n\n\n from_identifier_arms.push(quote! {\n\n #identifier => Some(Item::#variant_name)\n\n });\n\n\n\n to_identifier_arms.push(quote! {\n\n Item::#variant_name => #identifier\n\n });\n\n }\n\n\n", "file_path": "generator/src/item/rust.rs", "rank": 6, "score": 402402.63194290304 }, { "content": "pub fn load_report(path: &str) -> Result<ItemReport, Error> {\n\n let mut file = File::open(path)?;\n\n\n\n let mut string = String::new();\n\n file.read_to_string(&mut string)?;\n\n\n\n let report = serde_json::from_str(&string)?;\n\n\n\n Ok(report)\n\n}\n\n\n", "file_path": "generator/src/item/mod.rs", "rank": 7, "score": 372031.9203993039 }, { "content": "fn run_rustfmt(file: &str) -> Result<(), Error> {\n\n Command::new(\"rustfmt\").args(&[file]).output()?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "generator/src/rust.rs", "rank": 8, "score": 361286.5580485942 }, { "content": "/// Reads the region header from the given file.\n\nfn read_header(file: &mut File) -> Result<RegionHeader, Error> {\n\n let len = {\n\n let metadata = file.metadata().map_err(Error::Io)?;\n\n metadata.len()\n\n };\n\n\n\n // The header consists of 8 KiB of data, so\n\n // we can return an error early if it's too small.\n\n if len < 8192 {\n\n return Err(Error::Header(\"The region header is too small.\"));\n\n }\n\n\n\n let mut header = RegionHeader {\n\n locations: vec![],\n\n timestamps: vec![],\n\n };\n\n\n\n // The first 4 KiB contains the location\n\n // and sector length data. The first three\n\n // bytes of a 4-byte value contain the offset,\n", "file_path": "core/anvil/src/region/mod.rs", "rank": 10, "score": 343913.1595303882 }, { "content": "fn update_properties_complex_state(c: &mut Criterion) {\n\n feather_blocks::init();\n\n c.bench_function(\"update_properties_complex_state\", |b| {\n\n b.iter(|| {\n\n BlockId::redstone_wire()\n\n .with_east_wire(EastWire::Up)\n\n .with_west_wire(WestWire::Side)\n\n .with_power(15)\n\n });\n\n });\n\n}\n\n\n\ncriterion_group!(\n\n benches,\n\n update_properties_complex_state,\n\n to_id_complex_state,\n\n from_id_complex_state\n\n);\n\ncriterion_main!(benches);\n", "file_path": "core/blocks/benches/block_id_mappings.rs", "rank": 11, "score": 332553.6908242747 }, { "content": "fn load_block_properties(block: &BlockDefinition) -> Vec<String> {\n\n let mut props = vec![];\n\n\n\n for identifier in block.properties.keys() {\n\n props.push(identifier.to_owned());\n\n }\n\n\n\n props\n\n}\n\n\n", "file_path": "core/blocks/generator/src/load.rs", "rank": 12, "score": 326577.59591562644 }, { "content": "fn decrypt_using_rsa(data: &[u8], key: &RSAPrivateKey) -> Result<Vec<u8>, Error> {\n\n let buf = key\n\n .decrypt(PaddingScheme::PKCS1v15, data)\n\n .map_err(|_| Error::BadEncryption)?;\n\n\n\n Ok(buf)\n\n}\n\n\n", "file_path": "server/network/src/initial_handler.rs", "rank": 13, "score": 325350.6396945913 }, { "content": "fn load_report(path: &str) -> Result<BiomeReport, Error> {\n\n let mut file = File::open(path)?;\n\n\n\n let json: Value = {\n\n let mut string = String::new();\n\n file.read_to_string(&mut string)?;\n\n serde_json::from_str(&string)?\n\n };\n\n\n\n // Hack to get around the format of the registries.json\n\n // file.\n\n let biome_report: BiomeReport = {\n\n let top = &json[\"minecraft:biome\"];\n\n let entries = &top[\"entries\"];\n\n\n\n let as_string = serde_json::to_string(entries)?;\n\n serde_json::from_str(&as_string)?\n\n };\n\n\n\n Ok(biome_report)\n\n}\n\n\n", "file_path": "generator/src/biome.rs", "rank": 14, "score": 324337.58576145215 }, { "content": "/// Generates code for the block report.\n\npub fn generate() -> anyhow::Result<Output> {\n\n let blocks = load::load()?;\n\n\n\n let mut output = Output::default();\n\n\n\n output.kind.push_str(&generate_kind(&blocks).to_string());\n\n let table_src = generate_table(&blocks);\n\n output.block_table.push_str(&table_src.to_string());\n\n let block_fns_src = generate_block_fns(&blocks);\n\n output.block_fns.push_str(&block_fns_src.to_string());\n\n\n\n output.block_table_serialized = serialize_block_table(&blocks);\n\n output.vanilla_ids_serialized = serialized_vanilla_ids(&blocks);\n\n\n\n Ok(output)\n\n}\n\n\n", "file_path": "core/blocks/generator/src/lib.rs", "rank": 15, "score": 314593.48004131595 }, { "content": "fn from_id_complex_state(c: &mut Criterion) {\n\n feather_blocks::init();\n\n c.bench_function(\"from_id_complex_state\", |b| {\n\n b.iter(|| BlockId::from_vanilla_id(black_box(7198)));\n\n });\n\n}\n\n\n", "file_path": "core/blocks/benches/block_id_mappings.rs", "rank": 16, "score": 314134.86173475604 }, { "content": "fn to_id_complex_state(c: &mut Criterion) {\n\n feather_blocks::init();\n\n let block = BlockId::redstone_wire()\n\n .with_east_wire(EastWire::Up)\n\n .with_power(15);\n\n c.bench_function(\"to_id_complex_state\", |b| {\n\n b.iter(|| black_box(block).vanilla_id());\n\n });\n\n}\n\n\n", "file_path": "core/blocks/benches/block_id_mappings.rs", "rank": 17, "score": 314134.86173475604 }, { "content": "/// Generates the `from_name_and_default_props` function.\n\nfn generate_from_and_default_props_fn(report: &BlockReport) -> TokenStream {\n\n // More duplicate code than ever before!\n\n // This entire file should probably be rewritten at some point.\n\n let mut match_arms = vec![];\n\n\n\n for (block_name, block) in &report.blocks {\n\n let variant_name = block_name[10..].to_camel_case();\n\n let variant_ident = Ident::new(&variant_name, Span::call_site());\n\n\n\n if block.properties.is_some() {\n\n let data_struct_str = format!(\"{}Data\", variant_name);\n\n let data_struct_ident = Ident::new(&data_struct_str, Span::call_site());\n\n\n\n match_arms.push(quote! {\n\n #block_name => {\n\n let data = #data_struct_ident::default();\n\n Some(Block::#variant_ident(data))\n\n }\n\n });\n\n } else {\n", "file_path": "generator/src/rust.rs", "rank": 19, "score": 307175.2536930347 }, { "content": "fn fix_property_names(report: &mut BlocksReport) -> PropertyStore {\n\n let mut store = PropertyStore::default();\n\n\n\n for block in report.blocks.values() {\n\n for (property_name, possible_values) in &block.properties {\n\n let property_name = fix_keywords(property_name);\n\n\n\n store.register(property_name.to_owned(), possible_values.clone());\n\n }\n\n }\n\n\n\n // Correct block property names\n\n let result = store.clone().finish();\n\n\n\n for block in report.blocks.values_mut() {\n\n let block: &mut BlockDefinition = block;\n\n let mut overrides = vec![];\n\n for (property_name, possible_values) in &mut block.properties {\n\n let name_fixed = fix_keywords(property_name);\n\n if result.get(property_name).is_none() {\n", "file_path": "core/blocks/generator/src/load.rs", "rank": 20, "score": 304748.4171152989 }, { "content": "fn guess_property_kind(possible_values: &[String], property_struct_name: &str) -> PropertyKind {\n\n let first = &possible_values[0];\n\n\n\n if i32::from_str(first).is_ok() {\n\n // integer\n\n let as_integer: Vec<_> = possible_values\n\n .iter()\n\n .map(|x| i32::from_str(x).unwrap())\n\n .collect();\n\n\n\n let min = *as_integer.iter().min().unwrap();\n\n let max = *as_integer.iter().max().unwrap();\n\n\n\n PropertyKind::Integer { range: min..=max }\n\n } else if bool::from_str(first).is_ok() {\n\n // boolean\n\n PropertyKind::Boolean\n\n } else {\n\n // enum\n\n let name = ident(property_struct_name);\n\n let variants: Vec<_> = possible_values\n\n .iter()\n\n .map(|variant| variant.to_camel_case())\n\n .map(ident)\n\n .collect();\n\n PropertyKind::Enum { name, variants }\n\n }\n\n}\n\n\n", "file_path": "core/blocks/generator/src/load.rs", "rank": 21, "score": 303675.7643121244 }, { "content": "/// Generates the `from_internal_state_id` function.\n\nfn generate_from_internal_state_id_fn(report: &BlockReport) -> TokenStream {\n\n let mut match_arms = vec![];\n\n\n\n let mut count = 0;\n\n for (block_name, block) in &report.blocks {\n\n let variant_name = block_name[10..].to_camel_case();\n\n let variant_ident = Ident::new(&variant_name, Span::call_site());\n\n\n\n if block.properties.is_some() {\n\n let range_start = count;\n\n let range_end = range_start + block.states.len() - 1;\n\n\n\n let data_struct_str = format!(\"{}Data\", variant_name);\n\n let data_struct_ident = Ident::new(&data_struct_str, Span::call_site());\n\n\n\n match_arms.push(quote! {\n\n #range_start..=#range_end => {\n\n let offset = id - #range_start;\n\n let data = #data_struct_ident::from_value(offset)?;\n\n Some(Block::#variant_ident(data))\n", "file_path": "generator/src/rust.rs", "rank": 22, "score": 302338.77999030123 }, { "content": "/// Generates the function which retrieves the offset\n\n/// from the block type's internal ID to the block state\n\n/// internal ID.\n\nfn generate_internal_id_data_offset_fn(report: &BlockReport) -> TokenStream {\n\n let mut match_arms = vec![];\n\n\n\n for (block_name, block) in &report.blocks {\n\n let ident = Ident::new(&block_name[10..].to_camel_case(), Span::call_site());\n\n match_arms.push(if block.properties.is_some() {\n\n quote! {\n\n Block::#ident(data) => data.value()\n\n }\n\n } else {\n\n quote! {\n\n Block::#ident => 0\n\n }\n\n });\n\n }\n\n\n\n let result = quote! {\n\n fn internal_id_data_offset(&self) -> usize {\n\n match self {\n\n #(#match_arms ,)*\n\n }\n\n }\n\n };\n\n result\n\n}\n\n\n", "file_path": "generator/src/rust.rs", "rank": 23, "score": 297727.0710671165 }, { "content": "/// Generates `from_name_and_props` and `to_name_and_props`.\n\nfn generate_name_and_props_mappings(report: &BlockReport) -> TokenStream {\n\n let mut from_name_and_props_match_arms = vec![];\n\n let mut to_name_and_props_match_arms = vec![];\n\n\n\n for (block_name, block) in &report.blocks {\n\n let variant_name = block_name[10..].to_camel_case();\n\n let variant_ident = Ident::new(&variant_name, Span::call_site());\n\n\n\n if let Some(props) = &block.properties {\n\n let data_struct_str = format!(\"{}Data\", variant_name);\n\n let data_struct_ident = Ident::new(&data_struct_str, Span::call_site());\n\n\n\n from_name_and_props_match_arms.push(quote! {\n\n #block_name => {\n\n let data = #data_struct_ident::from_map(props)?;\n\n Some(Block::#variant_ident(data))\n\n }\n\n });\n\n\n\n // to_name_and_props: properties\n", "file_path": "generator/src/rust.rs", "rank": 24, "score": 293740.2629911649 }, { "content": "pub fn block_to_item(block: BlockId) -> Option<Item> {\n\n match block.kind() {\n\n BlockKind::Air => Some(Item::Air),\n\n BlockKind::Stone => Some(Item::Stone),\n\n BlockKind::Granite => Some(Item::Granite),\n\n BlockKind::PolishedGranite => Some(Item::PolishedGranite),\n\n BlockKind::Diorite => Some(Item::Diorite),\n\n BlockKind::PolishedDiorite => Some(Item::PolishedDiorite),\n\n BlockKind::Andesite => Some(Item::Andesite),\n\n BlockKind::PolishedAndesite => Some(Item::PolishedAndesite),\n\n BlockKind::GrassBlock => Some(Item::GrassBlock),\n\n BlockKind::Dirt => Some(Item::Dirt),\n\n BlockKind::CoarseDirt => Some(Item::CoarseDirt),\n\n BlockKind::Podzol => Some(Item::Podzol),\n\n BlockKind::Cobblestone => Some(Item::Cobblestone),\n\n BlockKind::OakPlanks => Some(Item::OakPlanks),\n\n BlockKind::SprucePlanks => Some(Item::SprucePlanks),\n\n BlockKind::BirchPlanks => Some(Item::BirchPlanks),\n\n BlockKind::JunglePlanks => Some(Item::JunglePlanks),\n\n BlockKind::AcaciaPlanks => Some(Item::AcaciaPlanks),\n", "file_path": "core/item_block/src/mappings.rs", "rank": 25, "score": 284938.6107486842 }, { "content": "fn property_value_as_u16(value: &str, index: usize, kind: &PropertyKind) -> u16 {\n\n let start = match kind {\n\n PropertyKind::Integer { range } => *range.start() as u16,\n\n _ => 0,\n\n };\n\n\n\n if let Ok(x) = i32::from_str(value) {\n\n x as u16 - start\n\n } else if let Ok(x) = bool::from_str(value) {\n\n x as u16\n\n } else {\n\n index as u16\n\n }\n\n}\n", "file_path": "core/blocks/generator/src/lib.rs", "rank": 26, "score": 273359.7786403183 }, { "content": "fn load_block(identifier: &str, block: &BlockDefinition) -> anyhow::Result<Option<Block>> {\n\n let identifier = strip_prefix(identifier)?;\n\n\n\n let name_camel_case = identifier.to_camel_case();\n\n\n\n let properties = load_block_properties(block);\n\n\n\n let index_parameters = load_block_index_parameters(block, &properties);\n\n\n\n let ids = load_block_ids(block);\n\n\n\n let default_state = block\n\n .states\n\n .iter()\n\n .find(|state| state.default)\n\n .map(|state| state.properties.clone())\n\n .unwrap_or_default()\n\n .into_iter()\n\n .collect();\n\n\n", "file_path": "core/blocks/generator/src/load.rs", "rank": 27, "score": 272422.48598898464 }, { "content": "fn data_exists(path: &Path) -> anyhow::Result<bool> {\n\n Ok(File::open(path.join(\"server.jar\")).is_ok()\n\n && File::open(path.join(\"assets\")).is_ok()\n\n && File::open(path.join(\"data\")).is_ok()\n\n && File::open(path.join(\"generated\")).is_ok())\n\n}\n\n\n", "file_path": "data/build.rs", "rank": 28, "score": 263081.60863192 }, { "content": "/// Creates a new player from the given `NewClientInfo`.\n\n///\n\n/// This function also triggers events for the player join.\n\npub fn create(game: &mut Game, world: &mut World, info: NewClientInfo) -> Entity {\n\n // TODO: blocked on https://github.com/TomGillen/legion/issues/36\n\n let entity = info.entity;\n\n world.add(entity, EntityId(entity::new_id())).unwrap();\n\n world.add(entity, info.position).unwrap();\n\n world.add(entity, PreviousPosition(info.position)).unwrap();\n\n world.add(entity, info.uuid).unwrap();\n\n world\n\n .add(\n\n entity,\n\n Network {\n\n tx: info.sender,\n\n rx: info.receiver.into(),\n\n },\n\n )\n\n .unwrap();\n\n world.add(entity, info.ip).unwrap();\n\n world.add(entity, ProfileProperties(info.profile)).unwrap();\n\n world.add(entity, Name(info.username)).unwrap();\n\n world.add(entity, ChunkHolder::default()).unwrap();\n", "file_path": "server/player/src/lib.rs", "rank": 29, "score": 261494.26738043316 }, { "content": "/// Creates the region file at the given region position and initializes\n\n/// a handle.\n\n///\n\n/// The world directory should be the root directory\n\n/// of the world, e.g. `${SERVER_DIR}/world` for\n\n/// normal servers.\n\n///\n\n/// # Warning\n\n/// If the region file already exist, it will be __overwritten__.\n\n/// Care must be taken to ensure that this function is only called\n\n/// for nonexistent regions.\n\npub fn create_region(dir: &PathBuf, pos: RegionPosition) -> Result<RegionHandle, Error> {\n\n create_region_dir(dir).map_err(Error::Io)?;\n\n let mut file = {\n\n let buf = region_file_path(dir, pos);\n\n\n\n open_opts().create(true).open(buf.as_path())\n\n }\n\n .map_err(Error::Io)?;\n\n\n\n let header = RegionHeader::default();\n\n header.write_to(&mut file).map_err(Error::Io)?;\n\n\n\n let allocator = SectorAllocator::new(&header, 2);\n\n Ok(RegionHandle {\n\n file,\n\n header,\n\n allocator,\n\n })\n\n}\n\n\n", "file_path": "core/anvil/src/region/mod.rs", "rank": 30, "score": 261242.2355549076 }, { "content": "/// Loads the region at the specified position\n\n/// from the specified world directory.\n\n///\n\n/// The world directory should be the root directory\n\n/// of the world, e.g. `${SERVER_DIR}/world` for\n\n/// normal servers.\n\n///\n\n/// This function does not actually load all the chunks\n\n/// in the region into memory; it only reads the file's\n\n/// header so that chunks can be retrieved later.\n\npub fn load_region(dir: &PathBuf, pos: RegionPosition) -> Result<RegionHandle, Error> {\n\n let mut file = {\n\n let buf = region_file_path(dir, pos);\n\n\n\n open_opts()\n\n .create(false)\n\n .open(buf.as_path())\n\n .map_err(Error::Io)?\n\n };\n\n\n\n let header = read_header(&mut file)?;\n\n\n\n let num_sectors = file.metadata().map_err(Error::Io)?.len() / SECTOR_BYTES as u64;\n\n\n\n let allocator = SectorAllocator::new(&header, num_sectors as u32);\n\n\n\n Ok(RegionHandle {\n\n file,\n\n header,\n\n allocator,\n\n })\n\n}\n\n\n", "file_path": "core/anvil/src/region/mod.rs", "rank": 31, "score": 261241.0333832187 }, { "content": "/// Strips the minecraft: prefix from a block identifier.\n\nfn strip_prefix(x: &str) -> anyhow::Result<&str> {\n\n const PREFIX: &str = \"minecraft:\";\n\n\n\n if x.len() <= PREFIX.len() {\n\n anyhow::bail!(\"missing minecraft: prefix for block {}\", x);\n\n }\n\n\n\n Ok(&x[PREFIX.len()..])\n\n}\n\n\n", "file_path": "core/blocks/generator/src/load.rs", "rank": 32, "score": 260420.21541716403 }, { "content": "fn write_to_file(path: impl AsRef<str>, s: impl AsRef<str>) {\n\n File::create(path.as_ref())\n\n .unwrap()\n\n .write_all(s.as_ref().as_bytes())\n\n .unwrap();\n\n}\n", "file_path": "core/blocks/build.rs", "rank": 33, "score": 257057.3923860939 }, { "content": "/// Generates the global internal ID offsets array\n\n/// which contains mappings from internal block type\n\n/// IDs to their respective offsets.\n\n/// To calculate the internal state ID of a block state,\n\n/// add the block type ID offset from this from this array\n\n/// to the internal_id_data_offset generated above.\n\nfn generate_internal_id_offsets(report: &BlockReport) -> TokenStream {\n\n let mut entries = vec![];\n\n\n\n let mut count = 0usize;\n\n for (_, block) in &report.blocks {\n\n entries.push(quote! {\n\n #count\n\n });\n\n\n\n count += block.states.len();\n\n }\n\n\n\n let amnt = report.blocks.len();\n\n\n\n let result = quote! {\n\n const INTERNAL_ID_OFFSETS: [usize; #amnt] = [\n\n #(#entries ,)*\n\n ];\n\n };\n\n result\n\n}\n\n\n", "file_path": "generator/src/rust.rs", "rank": 34, "score": 255869.42052691418 }, { "content": "pub fn item_to_block(item: Item) -> Option<BlockId> {\n\n match item {\n\n Item::Air => Some(BlockId::air()),\n\n Item::Stone => Some(BlockId::stone()),\n\n Item::Granite => Some(BlockId::granite()),\n\n Item::PolishedGranite => Some(BlockId::polished_granite()),\n\n Item::Diorite => Some(BlockId::diorite()),\n\n Item::PolishedDiorite => Some(BlockId::polished_diorite()),\n\n Item::Andesite => Some(BlockId::andesite()),\n\n Item::PolishedAndesite => Some(BlockId::polished_andesite()),\n\n Item::GrassBlock => Some(BlockId::grass_block()),\n\n Item::Dirt => Some(BlockId::dirt()),\n\n Item::CoarseDirt => Some(BlockId::coarse_dirt()),\n\n Item::Podzol => Some(BlockId::podzol()),\n\n Item::Cobblestone => Some(BlockId::cobblestone()),\n\n Item::OakPlanks => Some(BlockId::oak_planks()),\n\n Item::SprucePlanks => Some(BlockId::spruce_planks()),\n\n Item::BirchPlanks => Some(BlockId::birch_planks()),\n\n Item::JunglePlanks => Some(BlockId::jungle_planks()),\n\n Item::AcaciaPlanks => Some(BlockId::acacia_planks()),\n", "file_path": "core/item_block/src/mappings.rs", "rank": 35, "score": 249435.63423345913 }, { "content": "/// Returns the bounding box for the given block.\n\n///\n\n/// Non-solid blocks have no bounding box,\n\n/// and the bounding box for a non-solid block\n\n/// is undefined.\n\npub fn bbox_for_block(block: BlockId) -> AABB<f64> {\n\n match block.kind() {\n\n BlockKind::WhiteBed\n\n | BlockKind::OrangeBed\n\n | BlockKind::MagentaBed\n\n | BlockKind::LightBlueBed\n\n | BlockKind::YellowBed\n\n | BlockKind::LimeBed\n\n | BlockKind::PinkBed\n\n | BlockKind::GrayBed\n\n | BlockKind::LightGrayBed\n\n | BlockKind::CyanBed\n\n | BlockKind::PurpleBed\n\n | BlockKind::BlueBed\n\n | BlockKind::BrownBed\n\n | BlockKind::GreenBed\n\n | BlockKind::RedBed\n\n | BlockKind::BlackBed\n\n | BlockKind::PrismarineSlab\n\n | BlockKind::PrismarineBrickSlab\n", "file_path": "server/physics/src/block_bboxes.rs", "rank": 36, "score": 248093.28368489048 }, { "content": "/// Returns an `ncollide` `Cuboid` corresponding to the given block.\n\npub fn block_shape(block: BlockId) -> Cuboid<f64> {\n\n let bbox = bbox_for_block(block);\n\n Cuboid::new(bbox.half_extents())\n\n}\n\n\n", "file_path": "server/physics/src/math.rs", "rank": 37, "score": 247886.57428554824 }, { "content": "fn try_get_entry<B>(buf: &mut B) -> anyhow::Result<MetaEntry>\n\nwhere\n\n B: Buf + McTypeRead,\n\n{\n\n let id = buf.try_get_var_int()?;\n\n\n\n Ok(match id {\n\n 0 => MetaEntry::Byte(buf.try_get_i8()?),\n\n 1 => MetaEntry::VarInt(buf.try_get_var_int()?),\n\n 2 => MetaEntry::Float(buf.try_get_f32()?),\n\n 3 => MetaEntry::String(buf.try_get_string()?),\n\n 4 => MetaEntry::Chat(buf.try_get_string()?),\n\n 5 => MetaEntry::OptChat(if buf.try_get_bool()? {\n\n Some(buf.try_get_string()?)\n\n } else {\n\n None\n\n }),\n\n 6 => MetaEntry::Slot(buf.try_get_slot()?),\n\n 7 => MetaEntry::Boolean(buf.try_get_bool()?),\n\n 8 => MetaEntry::Rotation(buf.try_get_f32()?, buf.try_get_f32()?, buf.try_get_f32()?),\n", "file_path": "core/network/src/mctypes.rs", "rank": 38, "score": 245853.94654782588 }, { "content": "fn handle_request(ih: &mut InitialHandler, packet: &Request) -> Result<(), Error> {\n\n check_stage(ih, Stage::AwaitRequest, packet.ty())?;\n\n let server_icon = (*ih.server_icon).clone().unwrap_or_default();\n\n\n\n // Send response packet\n\n let json = serde_json::json!({\n\n \"version\": {\n\n \"name\": SERVER_VERSION,\n\n \"protocol\": PROTOCOL_VERSION,\n\n },\n\n \"players\": {\n\n \"max\": ih.config.server.max_players,\n\n \"online\": ih.player_count.load(Ordering::SeqCst),\n\n },\n\n \"description\": {\n\n \"text\": ih.config.server.motd,\n\n },\n\n \"favicon\": server_icon,\n\n });\n\n\n\n let response = Response {\n\n json_response: json.to_string(),\n\n };\n\n send_packet(ih, response);\n\n\n\n ih.stage = Stage::AwaitPing;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "server/network/src/initial_handler.rs", "rank": 39, "score": 245591.31752581772 }, { "content": "fn handle_handshake(ih: &mut InitialHandler, packet: &Handshake) -> Result<(), Error> {\n\n check_stage(ih, Stage::AwaitHandshake, packet.ty())?;\n\n\n\n ih.stage = match packet.next_state {\n\n HandshakeState::Status => {\n\n ih.action_queue.push(Action::SetStage(PacketStage::Status));\n\n Stage::AwaitRequest\n\n }\n\n HandshakeState::Login => {\n\n // While status requests can use differing\n\n // protocol versions, a client\n\n // needs to have a matching protocol version\n\n // to log in.\n\n if packet.protocol_version != PROTOCOL_VERSION {\n\n return Err(Error::InvalidProtocol(packet.protocol_version));\n\n }\n\n\n\n // If the server has BungeeCord proxy mode enabled, extract the data that is submitted\n\n // by BungeeCord if IP forwarding is enabled.\n\n if ih.config.proxy.proxy_mode == ProxyMode::BungeeCord {\n", "file_path": "server/network/src/initial_handler.rs", "rank": 40, "score": 245591.31752581772 }, { "content": "fn handle_ping(ih: &mut InitialHandler, packet: &Ping) -> Result<(), Error> {\n\n check_stage(ih, Stage::AwaitPing, packet.ty())?;\n\n\n\n let pong = Pong {\n\n payload: packet.payload,\n\n };\n\n send_packet(ih, pong);\n\n\n\n // After sending pong, we should disconnect.\n\n ih.action_queue.push(Action::Disconnect);\n\n ih.stage = Stage::Finished;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "server/network/src/initial_handler.rs", "rank": 41, "score": 245591.31752581772 }, { "content": "pub fn generate_mappings_file(\n\n report: ItemReport,\n\n write_string_ids: bool,\n\n) -> Result<Vec<u8>, Error> {\n\n let mut buf = Vec::new();\n\n\n\n buf.write_all(b\"FEATHER_ITEM_DATA_FILE\")?;\n\n\n\n // TODO handle non-native files\n\n assert!(write_string_ids, \"unimplemented\");\n\n\n\n let len = report.mappings.len();\n\n buf.write_u32::<LittleEndian>(len as u32)?;\n\n\n\n for (item_name, item) in report.mappings {\n\n let id = item.protocol_id;\n\n buf.write_string(&item_name)?;\n\n buf.write_i32::<LittleEndian>(id)?;\n\n }\n\n\n\n Ok(buf)\n\n}\n", "file_path": "generator/src/item/mappings.rs", "rank": 43, "score": 243613.17905483127 }, { "content": "#[fecs::system]\n\npub fn spawn_falling_blocks(game: &mut Game, world: &mut World) {\n\n let mut actions = BumpVec::new_in(game.bump());\n\n\n\n actions.extend(\n\n <(Read<BlockNotifyBlock>, Read<BlockNotifyPosition>)>::query()\n\n .filter(component::<BlockNotifyFallingBlock>())\n\n .iter_entities(world.inner())\n\n .map(|(entity, (block, position))| {\n\n let builder = if game.block_at(position.0 - BlockPosition::new(0, 1, 0))\n\n == Some(BlockId::air())\n\n {\n\n Some(\n\n create(block.0, position.0)\n\n .with(position.0.position() + position!(0.5, 0.0, 0.5)),\n\n )\n\n } else {\n\n None\n\n };\n\n\n\n (entity, builder, position.0)\n", "file_path": "server/entity/src/object/falling_block.rs", "rank": 44, "score": 242981.23189409412 }, { "content": "fn read_section_into_chunk(section: &LevelSection, chunk: &mut Chunk) -> Result<(), Error> {\n\n let data = &section.states;\n\n\n\n // Create palette\n\n let mut palette = vec![];\n\n for entry in &section.palette {\n\n // Construct properties map\n\n let mut props = BTreeMap::new();\n\n if let Some(entry_props) = entry.props.as_ref() {\n\n props.extend(\n\n entry_props\n\n .props\n\n .iter()\n\n .map(|(k, v)| (k.clone().into_owned(), v.clone().into_owned())),\n\n );\n\n }\n\n\n\n // Attempt to get block from the given values\n\n let block = BlockId::from_identifier_and_properties(&entry.name, &props)\n\n .ok_or_else(|| Error::InvalidBlock(entry.name.deref().to_owned()))?;\n", "file_path": "core/anvil/src/region/mod.rs", "rank": 45, "score": 242157.20627215528 }, { "content": "fn handle_login_start(ih: &mut InitialHandler, packet: &LoginStart) -> Result<(), Error> {\n\n check_stage(ih, Stage::AwaitLoginStart, packet.ty())?;\n\n\n\n if ih.player_count.load(Ordering::Acquire) >= ih.config.server.max_players as u32 {\n\n disconnect_login(ih, \"Server is full!\");\n\n return Ok(());\n\n }\n\n\n\n // If in online mode, encryption needs to be enabled,\n\n // and authentication needs to be performed.\n\n // If not in online mode, the login sequence is\n\n // already finished, so we can call `finish` after\n\n // setting the player's info.\n\n if ih.config.server.online_mode {\n\n use num_bigint_dig::{BigInt, Sign::Plus};\n\n // Start enabling encryption\n\n let der = der::public_key_to_der(\n\n &BigInt::from_biguint(Plus, RSA_KEY.n().clone()).to_signed_bytes_be(),\n\n &BigInt::from_biguint(Plus, RSA_KEY.e().clone()).to_signed_bytes_be(),\n\n );\n", "file_path": "server/network/src/initial_handler.rs", "rank": 47, "score": 238887.82566744785 }, { "content": "/// Tries to extract the player information that is sent in the `server_address` field of a\n\n/// Handshake packet that originates from a BungeeCord style proxy. This is used to enable IP\n\n/// forwarding for BungeeCord style proxies.\n\n///\n\n/// The server address field should have 4 parts if a client is connecting via BungeeCord. The field\n\n/// has the following format:\n\n///\n\n/// format!(\"{}\\0{}\\0{}\\0{}\", host, address, uuid, mojang_response);\n\n///\n\n/// | Variable | Definition |\n\n/// |-----------------|-----------------------------------------------------|\n\n/// | Host | The IP address of the BungeeCord instance |\n\n/// | Address | The IP address of the connecting client |\n\n/// | UUID | The UUID that is associated to the clients account |\n\n/// | Mojang response | A JSON formatted version of the `properties` field\n\n/// in [Mojangs response](https://wiki.vg/Protocol_Encryption#Server) |\n\nfn extract_bungeecord_data(packet: &Handshake) -> Result<BungeeCordData, Error> {\n\n let bungee_information: Vec<&str> = packet.server_address.split('\\0').collect();\n\n Ok(BungeeCordData::from_vec(&bungee_information)?)\n\n}\n\n\n", "file_path": "server/network/src/initial_handler.rs", "rank": 48, "score": 238456.5791984623 }, { "content": "fn write_entry_to_buf<B>(entry: &MetaEntry, buf: &mut B)\n\nwhere\n\n B: BytesMutExt + McTypeWrite,\n\n{\n\n match entry {\n\n MetaEntry::Byte(x) => buf.push_i8(*x),\n\n MetaEntry::VarInt(x) => {\n\n buf.push_var_int(*x);\n\n }\n\n MetaEntry::Float(x) => buf.push_f32(*x),\n\n MetaEntry::String(x) => buf.push_string(x),\n\n MetaEntry::Chat(x) => buf.push_string(x),\n\n MetaEntry::OptChat(ox) => {\n\n if let Some(x) = ox {\n\n buf.push_bool(true);\n\n buf.push_string(x);\n\n } else {\n\n buf.push_bool(false);\n\n }\n\n }\n", "file_path": "core/network/src/mctypes.rs", "rank": 49, "score": 238044.23922179808 }, { "content": "fn parse_report() -> anyhow::Result<BlocksReport> {\n\n let report = serde_json::from_slice(feather_data::minecraft::BLOCKS)?;\n\n\n\n Ok(report)\n\n}\n", "file_path": "core/blocks/generator/src/load.rs", "rank": 50, "score": 236846.39394932962 }, { "content": "#[fecs::event_handler]\n\npub fn on_block_update_broadcast(event: &BlockUpdateEvent, game: &mut Game, world: &mut World) {\n\n // Broadcast Block Change packet.\n\n let packet = BlockChange {\n\n location: event.pos,\n\n block_id: event.new.vanilla_id() as i32,\n\n };\n\n game.broadcast_chunk_update(world, packet, event.pos.into(), None);\n\n}\n", "file_path": "server/player/src/broadcasters/block.rs", "rank": 51, "score": 235156.03295191756 }, { "content": "fn run() -> Result<(), Error> {\n\n let yaml = load_yaml!(\"cli.yml\");\n\n let matches = App::from_yaml(yaml).get_matches();\n\n\n\n match matches.subcommand_name() {\n\n Some(\"block-mappings\") => {\n\n let args = matches.subcommand_matches(\"block-mappings\").unwrap();\n\n block_data::generate_mappings_file(\n\n args.value_of(\"input\").unwrap(),\n\n args.value_of(\"output\").unwrap(),\n\n args.value_of(\"native\").unwrap(),\n\n u32::from_str(args.value_of(\"proto\").unwrap())?,\n\n args.value_of(\"ver\").unwrap(),\n\n )?;\n\n }\n\n Some(\"native-block-mappings\") => {\n\n let args = matches.subcommand_matches(\"native-block-mappings\").unwrap();\n\n block_data::generate_native_mappings_file(\n\n args.value_of(\"input\").unwrap(),\n\n args.value_of(\"output\").unwrap(),\n", "file_path": "generator/src/main.rs", "rank": 52, "score": 234122.61109868248 }, { "content": "/// Runs the main game loop.\n\nfn run_loop(state: &mut FullState) {\n\n let mut loop_helper = LoopHelper::builder().build_with_target_rate(TPS as f64);\n\n loop {\n\n if state.shutdown_rx.try_recv().is_ok() {\n\n // Shut down\n\n return;\n\n }\n\n\n\n loop_helper.loop_start();\n\n\n\n // Execute all systems\n\n state\n\n .executor\n\n .execute(state.resources.deref(), &mut state.world);\n\n // Clean up world\n\n state.world.defrag(Some(256)); // should this be done at an interval rate?\n\n\n\n loop_helper.loop_sleep();\n\n }\n\n}\n", "file_path": "server/src/lib.rs", "rank": 53, "score": 229832.79536730616 }, { "content": "pub fn ident(x: impl AsRef<str>) -> Ident {\n\n Ident::new(x.as_ref(), Span::call_site()) // span doesn't matter as this is not a proc macro\n\n}\n\n\n", "file_path": "core/blocks/generator/src/load.rs", "rank": 54, "score": 229610.14884167793 }, { "content": "fn create_region_dir(dir: &PathBuf) -> Result<(), io::Error> {\n\n let mut dir = dir.clone();\n\n dir.push(\"region\");\n\n fs::create_dir_all(dir.as_path())\n\n}\n\n\n", "file_path": "core/anvil/src/region/mod.rs", "rank": 55, "score": 228469.87442569024 }, { "content": "pub fn correct_variable_name(name: &str) -> &str {\n\n match name {\n\n \"type\" => \"ty\",\n\n \"in\" => \"_in\",\n\n name => name,\n\n }\n\n}\n\n\n\n/// A property value type.\n\n#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]\n\npub enum PropValueType {\n\n Enum,\n\n I32,\n\n Bool,\n\n}\n\n\n\nimpl PropValueType {\n\n pub fn guess_from_value(value: &str) -> Self {\n\n if i32::from_str(value).is_ok() {\n\n PropValueType::I32\n\n } else if bool::from_str(value).is_ok() {\n\n PropValueType::Bool\n\n } else {\n\n PropValueType::Enum // Custom enum\n\n }\n\n }\n\n}\n\n\n", "file_path": "generator/src/rust.rs", "rank": 56, "score": 227812.29071377768 }, { "content": "#[proc_macro]\n\npub fn include_data(input: proc_macro::TokenStream) -> proc_macro::TokenStream {\n\n let input: LitStr = parse_macro_input!(input as LitStr);\n\n let build_dir = env::var(\"OUT_DIR\").unwrap();\n\n\n\n let path = PathBuf::from(build_dir).join(input.value());\n\n if !path.exists() {\n\n panic!(\"Path \\\"{}\\\" does not exist.\", path.display());\n\n }\n\n let (dirs_files, _) = include_dirs_files(path);\n\n dirs_files.into()\n\n}\n\n\n", "file_path": "data/macro/src/lib.rs", "rank": 57, "score": 227744.31670034886 }, { "content": "/// Returns the serialized state ID map.\n\nfn serialized_vanilla_ids(blocks: &Blocks) -> Vec<u8> {\n\n let table = VanillaStateIdSerialize::new(blocks);\n\n\n\n bincode::serialize(&table).expect(\"bincode failed to serialize vanilla ID table\")\n\n}\n\n\n\n/// Serializable state ID table.\n", "file_path": "core/blocks/generator/src/lib.rs", "rank": 58, "score": 223196.19137920628 }, { "content": "pub fn save_player_data(game: &Game, world: &World) -> anyhow::Result<()> {\n\n <Read<Player>>::query().for_each_entities(&world.inner(), |(player, _)| {\n\n feather_server_chunk::save_player_data(game, world, player);\n\n });\n\n\n\n Ok(())\n\n}\n\n\n\npub async fn wait_for_task_completion(game: &Game) -> anyhow::Result<()> {\n\n game.running_tasks.wait().await;\n\n Ok(())\n\n}\n\n\n", "file_path": "server/src/shutdown.rs", "rank": 59, "score": 221224.57854483108 }, { "content": "/// Strips away the \"minecraft:\" prefix from a item string ID.\n\nfn strip_prefix(val: &str) -> String {\n\n val[10..].to_string()\n\n}\n\n\n", "file_path": "generator/src/item/rust.rs", "rank": 60, "score": 217563.6326795378 }, { "content": "/// Returns the enum variant name for the given item identifier.\n\nfn variant_name(identifier: &str) -> String {\n\n strip_prefix(identifier).to_camel_case()\n\n}\n\n\n", "file_path": "generator/src/item/rust.rs", "rank": 61, "score": 217553.1922083596 }, { "content": "fn load(data: EntityData) -> anyhow::Result<EntityBuilder> {\n\n match data {\n\n EntityData::Item(data) => {\n\n let pos = data.entity.read_position()?;\n\n let vel = data.entity.read_velocity()?;\n\n\n\n let stack = ItemStack::new(\n\n Item::from_identifier(&data.item.item)\n\n .ok_or_else(|| anyhow::anyhow!(\"invalid item {}\", data.item.item))?,\n\n data.item.count,\n\n );\n\n\n\n let collectable_at = data.pickup_delay;\n\n\n\n Ok(create(stack, collectable_at as u64)\n\n .with(pos)\n\n .with(Velocity(glm::vec3(vel.x, vel.y, vel.z))))\n\n }\n\n _ => panic!(\"attempted to use item::load to load a non-item\"),\n\n }\n\n}\n", "file_path": "server/entity/src/object/item.rs", "rank": 62, "score": 209070.11945997254 }, { "content": "/// Saves the chunk at the specified position.\n\nfn save_chunk(worker: &mut ChunkWorker, chunk: &Chunk, entities: Vec<EntityData>) {\n\n let rpos = RegionPosition::from_chunk(chunk.position());\n\n\n\n let file = worker_region(&mut worker.open_regions, &worker.dir, rpos);\n\n\n\n file.handle.save_chunk(chunk, entities).unwrap();\n\n worker\n\n .sender\n\n .send(Reply::SavedChunk(chunk.position()))\n\n .unwrap();\n\n}\n\n\n", "file_path": "server/chunk/src/chunk_worker.rs", "rank": 63, "score": 207321.74550417112 }, { "content": "#[fecs::system]\n\npub fn entity_physics(game: &mut Game, world: &mut World) {\n\n // Go through entities and update their positions according\n\n // to their velocities.\n\n let land_events = Mutex::new(vec![]);\n\n\n\n let query = <(Write<Position>, Write<Velocity>, Read<Physics>)>::query();\n\n query.par_entities_for_each_mut(\n\n world.inner_mut(),\n\n |(entity, (mut position, mut velocity, physics))| {\n\n let mut pending_position = *position + velocity.0;\n\n\n\n // Check for blocks along path between old position and pending position.\n\n // This prevents entities from flying through blocks when their\n\n // velocity is sufficiently high.\n\n let origin = (*position).into();\n\n let direction = (pending_position - *position).into();\n\n let distance_squared = pending_position.distance_squared_to(*position);\n\n\n\n if let Some(impacted) = block_impacted_by_ray(game, origin, direction, distance_squared)\n\n {\n", "file_path": "server/physics/src/entity.rs", "rank": 64, "score": 207213.00655715755 }, { "content": "#[fecs::system]\n\npub fn update_weather(game: &mut Game, world: &mut World) {\n\n if game.level.clear_weather_time >= 0 {\n\n game.level.clear_weather_time -= 1;\n\n return;\n\n }\n\n\n\n let from = get_weather(game);\n\n\n\n game.level.rain_time -= 1;\n\n let mut to = if game.level.rain_time <= 0 {\n\n if game.level.raining {\n\n Weather::Clear\n\n } else {\n\n Weather::Rain\n\n }\n\n } else {\n\n from\n\n };\n\n\n\n game.level.thunder_time -= 1;\n", "file_path": "server/weather/src/lib.rs", "rank": 65, "score": 207213.00655715755 }, { "content": "/// Returns the set of block positions adjacent to a given position.\n\npub fn adjacent_blocks(pos: BlockPosition) -> ArrayVec<[BlockPosition; 6]> {\n\n [\n\n pos + BlockPosition::new(1, 0, 0),\n\n pos + BlockPosition::new(0, 1, 0),\n\n pos + BlockPosition::new(0, 0, 1),\n\n pos + BlockPosition::new(-1, 0, 0),\n\n pos + BlockPosition::new(0, -1, 0),\n\n pos + BlockPosition::new(0, 0, -1),\n\n ]\n\n .iter()\n\n .filter(|pos| pos.y >= 0 && pos.y < 256)\n\n .copied()\n\n .collect()\n\n}\n\n\n", "file_path": "server/util/src/lib.rs", "rank": 66, "score": 206826.40771301236 }, { "content": "fn handle_block_update(worker: &mut Worker, pos: BlockPosition, old: BlockId, new: BlockId) {\n\n let mut ctx = match Context::new(&worker.chunk_map, pos.chunk()) {\n\n Some(ctx) => ctx,\n\n None => return, // Unloaded chunk\n\n };\n\n\n\n // Determine which algorithm to use.\n\n if old.light_emission() < new.light_emission() {\n\n ctx.set_block_light_at(pos, new.light_emission());\n\n emitting_creation(&mut ctx, pos);\n\n } else if new.light_emission() == 0 && old.light_emission() > 0 {\n\n ctx.set_block_light_at(pos, 0);\n\n emitting_removal(&mut ctx, &worker.lights, pos, old);\n\n } else if old.is_opaque() && !new.is_opaque() {\n\n opaque_non_emitting_removal(&mut ctx, pos);\n\n } else {\n\n opaque_non_emitting_creation(&mut ctx, &worker.lights, pos, new);\n\n }\n\n\n\n // Update `ChunkLights`.\n", "file_path": "server/lighting/src/lib.rs", "rank": 67, "score": 206548.38263423878 }, { "content": "#[fecs::system]\n\npub fn broadcast_velocity(world: &mut World, game: &mut Game) {\n\n <(Read<Velocity>, Read<PreviousVelocity>, Read<EntityId>)>::query().par_entities_for_each(\n\n world.inner(),\n\n |(entity, (vel, prev_vel, entity_id))| {\n\n let entity_id = entity_id.0;\n\n\n\n if vel.0 == prev_vel.0 {\n\n return;\n\n }\n\n\n\n let (velocity_x, velocity_y, velocity_z) = protocol_velocity(vel.0);\n\n\n\n if velocity_x == 0 && velocity_y == 0 && velocity_z == 0 {\n\n return;\n\n }\n\n\n\n let packet = EntityVelocity {\n\n entity_id,\n\n velocity_x,\n\n velocity_y,\n\n velocity_z,\n\n };\n\n game.broadcast_entity_update(world, packet, entity, None);\n\n },\n\n );\n\n}\n\n\n\n/// Returns the packet needed to notify a client\n\n/// of a position update, from the old position to the new one.\n", "file_path": "server/entity/src/broadcasters/movement.rs", "rank": 68, "score": 204428.7388257918 }, { "content": "#[fecs::system]\n\npub fn check_crossed_chunks(world: &mut World, game: &mut Game) {\n\n let mut crossed = BumpVec::new_in(game.bump());\n\n for (entity, (pos, prev_pos)) in\n\n <(Read<Position>, Read<PreviousPosition>)>::query().iter_entities(world.inner())\n\n {\n\n if pos.chunk() != prev_pos.0.chunk() {\n\n crossed.push((entity, pos.chunk(), prev_pos.0.chunk()));\n\n }\n\n }\n\n\n\n for (entity, new, old) in crossed {\n\n game.handle(\n\n world,\n\n ChunkCrossEvent {\n\n entity,\n\n old: Some(old),\n\n new,\n\n },\n\n );\n\n }\n\n}\n\n\n\n/// Triggers a chunk cross when a new player joins.\n", "file_path": "server/player/src/view.rs", "rank": 69, "score": 204428.73882579184 }, { "content": "#[fecs::system]\n\npub fn broadcast_movement(game: &mut Game, world: &mut World) {\n\n <(Read<Position>, Read<PreviousPosition>, Read<EntityId>)>::query().par_entities_for_each(\n\n world.inner(),\n\n |(entity, (pos, prev_pos, id))| {\n\n let pos: Position = *pos;\n\n let prev_pos: Position = prev_pos.0;\n\n\n\n if pos == prev_pos {\n\n return;\n\n }\n\n\n\n let entity_id = id.0;\n\n\n\n let chunk = pos.chunk();\n\n let players = game.chunk_holders.holders_for(chunk);\n\n\n\n for player in players.iter().filter(|player| **player != entity) {\n\n if let Some(network) = world.try_get::<Network>(*player) {\n\n let last_known_positions = world.get::<LastKnownPositions>(*player);\n\n let last_known_positions = last_known_positions.deref();\n", "file_path": "server/entity/src/broadcasters/movement.rs", "rank": 70, "score": 204428.7388257918 }, { "content": "#[fecs::system]\n\npub fn item_collect(game: &mut Game, world: &mut World) {\n\n // run every 1/10 second\n\n if game.tick_count % (TPS / 10) != 0 {\n\n return;\n\n }\n\n\n\n let items_to_remove = Mutex::new(vec![]);\n\n let inventory_update_events = Mutex::new(vec![]);\n\n let item_collect_events = Mutex::new(vec![]);\n\n\n\n // For each player, check for nearby items and try collecting them\n\n // Safety: we only iterate over entities which are players,\n\n // and we only access item entities inside the loop. As such,\n\n // we will not have multiple mutable references to the same component.\n\n unsafe {\n\n <(Read<Position>, Write<Inventory>)>::query()\n\n .filter(component::<Player>())\n\n .par_entities_for_each_unchecked(world.inner(), |(player, (pos, mut inventory))| {\n\n let inventory: &mut Inventory = &mut *inventory;\n\n\n", "file_path": "server/entity/src/object/item.rs", "rank": 71, "score": 204428.73882579184 }, { "content": "#[fecs::system]\n\npub fn poll_player_disconnect(game: &mut Game, world: &mut World) {\n\n // For each player with a Network component,\n\n // check their channel for disconnects.\n\n let mut to_despawn = BumpVec::new_in(game.bump());\n\n <Read<Network>>::query()\n\n .iter_entities(world.inner())\n\n .for_each(|(entity, network)| {\n\n while let Ok(msg) = network.rx.lock().try_recv() {\n\n match msg {\n\n WorkerToServerMessage::NotifyDisconnected { reason } => {\n\n to_despawn.push((entity, reason));\n\n }\n\n }\n\n }\n\n });\n\n\n\n to_despawn.into_iter().for_each(|(player, reason)| {\n\n game.disconnect(player, world, reason);\n\n });\n\n}\n\n\n\n/// System which polls for new clients from the listener task.\n", "file_path": "server/player/src/join.rs", "rank": 72, "score": 204428.7388257918 }, { "content": "#[fecs::system]\n\npub fn increment_time(game: &mut Game) {\n\n game.time.0 += 1;\n\n}\n\n\n\n/// Event handler for sending world time to players.\n", "file_path": "server/util/src/time.rs", "rank": 73, "score": 203976.16835332988 }, { "content": "#[allow(unused)]\n\npub fn clear_weather(game: &mut Game) {\n\n let duration = game\n\n .rng()\n\n .gen_range(TICKS_HALF_DAY, TICKS_WEEK + TICKS_HALF_DAY);\n\n set_weather(game, Weather::Clear, duration);\n\n}\n\n\n", "file_path": "server/weather/src/lib.rs", "rank": 74, "score": 203976.16835332982 }, { "content": "#[derive(Debug)]\n\nstruct VanillaStateIdSerialize {\n\n ids: Vec<Vec<u16>>, // indexed by [kind as u16 as usize][state as usize]\n\n}\n\n\n\nimpl Serialize for VanillaStateIdSerialize {\n\n fn serialize<S>(&self, serializer: S) -> Result<<S as Serializer>::Ok, <S as Serializer>::Error>\n\n where\n\n S: Serializer,\n\n {\n\n let mut state = serializer.serialize_seq(Some(self.ids.len()))?;\n\n\n\n for id in &self.ids {\n\n state.serialize_element(id)?;\n\n }\n\n\n\n state.end()\n\n }\n\n}\n\n\n\nimpl VanillaStateIdSerialize {\n", "file_path": "core/blocks/generator/src/lib.rs", "rank": 75, "score": 202482.39317301233 }, { "content": "fn run() -> anyhow::Result<()> {\n\n let path = format!(\"{}/minecraft\", env::var(\"OUT_DIR\").unwrap());\n\n let path = Path::new(&path);\n\n let path_server = path.join(\"server.jar\");\n\n\n\n if data_exists(path).unwrap_or(false) {\n\n println!(\"cargo:rerun-if-changed={}\", &path.display());\n\n return Ok(());\n\n }\n\n\n\n let _ = fs::remove_dir_all(&path);\n\n fs::create_dir_all(&path).context(\"failed to create target directory for downloaded data\")?;\n\n\n\n download(&path_server).context(\"failed to download vanilla server JAR\")?;\n\n generate(&path).context(\n\n \"failed to generate vanilla server reports. (is Java installed and in your PATH?)\",\n\n )?;\n\n extract(&path).context(\"failed to extract vanilla assets. (are the Java developer tools (`jar`) installed and in your PATH?)\")?;\n\n\n\n println!(\"cargo:rerun-if-changed={}\", &path.display());\n\n Ok(())\n\n}\n\n\n", "file_path": "data/build.rs", "rank": 76, "score": 202228.84650684704 }, { "content": "pub fn charge_from_ticks_held(ticks: u32) -> f32 {\n\n let ticks = ticks as f32;\n\n\n\n let mut unbounded_force = (ticks * (ticks + 40.0)) / 400.0;\n\n\n\n if unbounded_force > 3.0 {\n\n unbounded_force = 3.0\n\n }\n\n\n\n unbounded_force\n\n}\n\n\n", "file_path": "server/util/src/lib.rs", "rank": 77, "score": 201824.995633727 }, { "content": "#[proc_macro]\n\npub fn entity_metadata(input: TokenStream) -> TokenStream {\n\n entity_metadata::entity_metadata(input)\n\n}\n", "file_path": "codegen/src/lib.rs", "rank": 78, "score": 201692.48097632302 }, { "content": "/// Renames Rust keywords to alternative identifiers.\n\nfn fix_keywords(x: &str) -> &str {\n\n match x {\n\n \"type\" => \"kind\",\n\n x => x,\n\n }\n\n}\n\n\n", "file_path": "core/blocks/generator/src/load.rs", "rank": 79, "score": 201650.18779367558 }, { "content": "pub fn disconnect_players(world: &World) -> anyhow::Result<()> {\n\n <Read<Network>>::query().for_each(world.inner(), |network| {\n\n let packet = DisconnectPlay {\n\n reason: TextRoot::from(Text::from(\"Server closed\")).into(),\n\n };\n\n\n\n network.send(packet);\n\n });\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "server/src/shutdown.rs", "rank": 80, "score": 200670.90582901594 }, { "content": "#[fecs::system]\n\npub fn reset_bump_allocators(game: &mut Game) {\n\n game.bump.iter_mut().for_each(Bump::reset);\n\n}\n\n\n", "file_path": "server/types/src/game.rs", "rank": 81, "score": 200631.76196140604 }, { "content": "#[fecs::system]\n\npub fn chunk_optimize(game: &mut Game) {\n\n // Only run every CHUNK_OPTIMIZE_INTERVAL ticks\n\n if game.tick_count % CHUNK_OPTIMIZE_INTERVAL != 0 {\n\n return;\n\n }\n\n\n\n log::debug!(\"Optimizing chunks\");\n\n\n\n let start_time = current_time_in_millis();\n\n let count = AtomicU32::new(0);\n\n\n\n game.chunk_map.0.par_values().for_each(|chunk| {\n\n count.fetch_add(chunk.write().optimize(), Ordering::Relaxed);\n\n });\n\n\n\n let end_time = current_time_in_millis();\n\n let elapsed = end_time - start_time;\n\n\n\n log::debug!(\n\n \"Optimized {} chunk sections (took {}ms - {:.2}ms/section)\",\n\n count.load(Ordering::Relaxed),\n\n elapsed,\n\n elapsed as f64 / f64::from(count.load(Ordering::Relaxed))\n\n );\n\n}\n\n\n", "file_path": "server/chunk/src/chunk_manager.rs", "rank": 82, "score": 200631.76196140604 }, { "content": "#[fecs::system]\n\npub fn increment_tick_count(game: &mut Game) {\n\n game.tick_count += 1;\n\n}\n", "file_path": "server/types/src/game.rs", "rank": 83, "score": 200631.76196140604 }, { "content": "fn create_block_data_struct(\n\n variant_name: &str,\n\n block: &Block,\n\n property_enums: &mut Vec<TokenStream>,\n\n data_structs: &mut Vec<TokenStream>,\n\n) {\n\n let mut data_struct_entries = vec![];\n\n let mut from_map_entries = vec![];\n\n let mut to_map_entries = vec![];\n\n let mut default_impl_entries = vec![];\n\n\n\n let props = &block.properties.as_ref().unwrap();\n\n let states = &block.states;\n\n\n\n for (prop_name_str, possible_values) in &props.props {\n\n let ty = PropValueType::guess_from_value(&possible_values[0]);\n\n\n\n // If type is a custom enum, create the enum type\n\n if ty == PropValueType::Enum {\n\n create_property_enum(variant_name, prop_name_str, possible_values, property_enums);\n", "file_path": "generator/src/rust.rs", "rank": 84, "score": 200498.16127136283 }, { "content": "#[fecs::event_handler]\n\npub fn on_player_leave_save_data(event: &PlayerLeaveEvent, game: &Game, world: &mut World) {\n\n save_player_data(game, world, event.player);\n\n}\n\n\n", "file_path": "server/chunk/src/save.rs", "rank": 85, "score": 200479.26164667 }, { "content": "#[proc_macro_derive(FromSnakeCase)]\n\npub fn derive_from_snake_case(input: TokenStream) -> TokenStream {\n\n let input: syn::DeriveInput = syn::parse(input).unwrap();\n\n let name = &input.ident;\n\n\n\n let mut match_arms = vec![];\n\n\n\n match &input.data {\n\n syn::Data::Enum(en) => {\n\n for variant in &en.variants {\n\n let snake_case = variant.ident.to_string().to_snake_case();\n\n let ident = &variant.ident;\n\n match_arms.push(quote! {\n\n #snake_case => Some(#name::#ident)\n\n });\n\n }\n\n }\n\n _ => panic!(\"Can only derive `FromSnakeCase` on enums\"),\n\n }\n\n\n\n let result = quote! {\n", "file_path": "codegen/src/lib.rs", "rank": 86, "score": 198527.19423811126 }, { "content": "#[allow(clippy::cognitive_complexity)] // FIXME: clean this function up\n\npub fn entity_metadata(input: TokenStream) -> TokenStream {\n\n let input: EntityMetadata = syn::parse_macro_input!(input);\n\n\n\n let mut structs = vec![];\n\n let mut enum_variants = vec![];\n\n\n\n let mut to_raw_metadata_arms = vec![];\n\n let mut to_full_raw_metadata_arms = vec![];\n\n\n\n let enum_ident = input.ident.clone();\n\n\n\n for variant in input.variants.values() {\n\n let entries = get_metadata_entries(&input, variant.clone());\n\n\n\n let variant_ident = &variant.ident;\n\n\n\n let mut struct_fields = vec![];\n\n let mut struct_impl = vec![];\n\n let mut to_raw_metadata = vec![];\n\n let mut to_full_raw_metadata = vec![];\n", "file_path": "codegen/src/entity_metadata.rs", "rank": 87, "score": 198527.19423811126 }, { "content": "#[proc_macro_derive(ToSnakeCase)]\n\npub fn derive_to_snake_case(input: TokenStream) -> TokenStream {\n\n let input: syn::DeriveInput = syn::parse(input).unwrap();\n\n let name = &input.ident;\n\n\n\n let mut match_arms = vec![];\n\n\n\n match &input.data {\n\n syn::Data::Enum(en) => {\n\n for variant in &en.variants {\n\n let snake_case = variant.ident.to_string().to_snake_case();\n\n let ident = &variant.ident;\n\n match_arms.push(quote! {\n\n #name::#ident => #snake_case.to_string()\n\n });\n\n }\n\n }\n\n _ => panic!(\"Can only derive `ToSnakeCase` on enums\"),\n\n }\n\n\n\n let result = quote! {\n", "file_path": "codegen/src/lib.rs", "rank": 88, "score": 198527.19423811126 }, { "content": "#[fecs::system]\n\npub fn previous_position_velocity_reset(world: &mut World) {\n\n <(Read<Position>, Write<PreviousPosition>)>::query().par_for_each_mut(\n\n world.inner_mut(),\n\n |(pos, mut previous_pos)| {\n\n previous_pos.0 = *pos;\n\n },\n\n );\n\n <(Read<Velocity>, Write<PreviousVelocity>)>::query().par_for_each_mut(\n\n world.inner_mut(),\n\n |(vel, mut previous_vel)| {\n\n previous_vel.0 = vel.0;\n\n },\n\n );\n\n}\n\n\n", "file_path": "server/entity/src/lib.rs", "rank": 89, "score": 197469.64357426163 }, { "content": "/// Returns an `EntityBuilder` for a falling block of the given type.\n\npub fn create(ty: BlockId, spawn_pos: BlockPosition) -> EntityBuilder {\n\n let meta =\n\n EntityMetadata::entity_base().with(META_INDEX_FALLING_BLOCK_SPAWN_POSITION, spawn_pos);\n\n\n\n crate::base()\n\n .with(FallingBlock)\n\n .with(FallingBlockType(ty))\n\n .with(SpawnPacketCreator(&create_spawn_packet))\n\n .with(\n\n PhysicsBuilder::new()\n\n .bbox(0.98, 0.98, 0.98)\n\n .drag(0.98)\n\n .gravity(-0.04)\n\n .build(),\n\n )\n\n .with(meta)\n\n}\n\n\n", "file_path": "server/entity/src/object/falling_block.rs", "rank": 90, "score": 197083.7814197443 }, { "content": "#[fecs::system]\n\npub fn poll_new_clients(game: &mut Game, world: &mut World, io_handle: &mut NetworkIoManager) {\n\n while let Ok(msg) = io_handle.rx.lock().try_recv() {\n\n match msg {\n\n ListenerToServerMessage::NewClient(info) => {\n\n crate::create(game, world, info);\n\n }\n\n ListenerToServerMessage::RequestEntity => {\n\n let entity = world.spawn(iter::once(()))[0];\n\n let _ = io_handle.tx.send(ServerToListenerMessage::Entity(entity));\n\n }\n\n ListenerToServerMessage::DeleteEntity(entity) => {\n\n // no need to use `Game::despawn` here as\n\n // the entity hasn't actually \"existed\" yet;\n\n // it has no components\n\n world.despawn(entity);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "server/player/src/join.rs", "rank": 91, "score": 195056.9186806674 }, { "content": "/// Disconnects the initial handler, sending\n\n/// a disconnect packet containing the reason.\n\nfn disconnect_login(ih: &mut InitialHandler, reason: &str) {\n\n let json = serde_json::json!({\n\n \"text\": reason,\n\n })\n\n .to_string();\n\n\n\n let packet = DisconnectLogin { reason: json };\n\n send_packet(ih, packet);\n\n\n\n ih.action_queue.push(Action::Disconnect);\n\n}\n\n\n", "file_path": "server/network/src/initial_handler.rs", "rank": 92, "score": 194703.65673170154 }, { "content": "pub fn chunk_relative_pos(block_pos: BlockPosition) -> (usize, usize, usize) {\n\n (\n\n block_pos.x as usize & 0xf,\n\n block_pos.y as usize,\n\n block_pos.z as usize & 0xf,\n\n )\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn chunk_map_out_of_bounds() {\n\n let mut map = ChunkMap::new();\n\n map.insert(Chunk::new(ChunkPosition::new(0, 0)));\n\n\n\n assert!(map.block_at(BlockPosition::new(0, -1, 0)).is_none());\n\n assert!(map.block_at(BlockPosition::new(0, 0, 0)).is_some());\n\n }\n\n}\n", "file_path": "core/chunk_map/src/lib.rs", "rank": 93, "score": 193353.69039370085 }, { "content": "#[fecs::event_handler]\n\npub fn on_block_update_notify_adjacent(\n\n event: &BlockUpdateEvent,\n\n game: &mut Game,\n\n world: &mut World,\n\n) {\n\n adjacent_blocks(event.pos)\n\n .into_iter()\n\n .chain(iter::once(event.pos))\n\n .filter_map(|adjacent_pos| {\n\n if let Some(adjacent_block) = game.block_at(adjacent_pos) {\n\n Some((adjacent_block, adjacent_pos))\n\n } else {\n\n None\n\n }\n\n })\n\n .filter_map(|(adjacent_block, adjacent_pos)| {\n\n notify_entity_for_block(adjacent_block, adjacent_pos)\n\n })\n\n .for_each(|builder| {\n\n builder.build().spawn_in(world);\n\n })\n\n}\n", "file_path": "server/util/src/block.rs", "rank": 94, "score": 192820.58242092846 }, { "content": "/// Can be called at startup to pre-initialize the global block table.\n\npub fn init() {\n\n Lazy::force(&FROM_VANILLA_ID_TABLE);\n\n Lazy::force(&BLOCK_TABLE);\n\n}\n\n\n\nuse once_cell::sync::Lazy;\n\n\n\npub use crate::generated::table::*;\n\npub use crate::generated::BlockKind;\n\n\n\nuse std::collections::HashSet;\n\n\n\nimpl Default for BlockKind {\n\n fn default() -> Self {\n\n BlockKind::Air\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, Default)]\n\npub struct BlockId {\n", "file_path": "core/blocks/src/lib.rs", "rank": 95, "score": 192513.16307923733 }, { "content": "fn enum_ident(variant_name: &str, prop_name: &str) -> Ident {\n\n Ident::new(\n\n &format!(\"{}{}\", variant_name, prop_name.to_camel_case()),\n\n Span::call_site(),\n\n )\n\n}\n\n\n", "file_path": "generator/src/rust.rs", "rank": 96, "score": 190593.13171775878 }, { "content": "fn raw_palette_to_palette_entries(palette: &[BlockId]) -> Vec<LevelPaletteEntry> {\n\n palette\n\n .iter()\n\n .map(|block| {\n\n let props = block.to_properties_map();\n\n let identifier = block.identifier();\n\n\n\n LevelPaletteEntry {\n\n name: identifier.into(),\n\n props: Some(LevelProperties {\n\n props: props\n\n .into_iter()\n\n .map(|(k, v)| (Cow::from(k), Cow::from(v)))\n\n .collect(),\n\n }),\n\n }\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "core/anvil/src/region/mod.rs", "rank": 97, "score": 190006.96446215097 }, { "content": "fn convert_palette(section: &mut ChunkSection) -> Vec<LevelPaletteEntry> {\n\n section.convert_palette_to_section();\n\n raw_palette_to_palette_entries(section.palette().unwrap())\n\n}\n\n\n", "file_path": "core/anvil/src/region/mod.rs", "rank": 98, "score": 189252.06450711933 }, { "content": "/// Returns the serialized `BlockTable`.\n\nfn serialize_block_table(blocks: &Blocks) -> Vec<u8> {\n\n let table = BlockTableSerialize::new(&blocks.blocks, &blocks.property_types);\n\n\n\n bincode::serialize(&table).expect(\"bincode failed to serialize block table\")\n\n}\n\n\n\n/// Serializable form of the generated `BlockTable`.\n", "file_path": "core/blocks/generator/src/lib.rs", "rank": 99, "score": 189051.49384355452 } ]
Rust
src/input/wacom.rs
vlisivka/libremarkable
5f87bd1ec152fab94a2ed93b41ee4f1219de2a9d
use atomic::Atomic; use evdev::raw::input_event; use input::{InputDeviceState, InputEvent}; use std; use std::sync::atomic::{AtomicU16, Ordering}; use framebuffer::cgmath; use framebuffer::common::{DISPLAYHEIGHT, DISPLAYWIDTH, WACOMHEIGHT, WACOMWIDTH}; const WACOM_HSCALAR: f32 = (DISPLAYWIDTH as f32) / (WACOMWIDTH as f32); const WACOM_VSCALAR: f32 = (DISPLAYHEIGHT as f32) / (WACOMHEIGHT as f32); const EV_SYNC: u16 = 0; const EV_KEY: u16 = 1; const EV_ABS: u16 = 3; const WACOM_EVCODE_PRESSURE: u16 = 24; const WACOM_EVCODE_DISTANCE: u16 = 25; const WACOM_EVCODE_XTILT: u16 = 26; const WACOM_EVCODE_YTILT: u16 = 27; const WACOM_EVCODE_XPOS: u16 = 0; const WACOM_EVCODE_YPOS: u16 = 1; pub struct WacomState { last_x: AtomicU16, last_y: AtomicU16, last_xtilt: AtomicU16, last_ytilt: AtomicU16, last_dist: AtomicU16, last_pressure: AtomicU16, last_tool: Atomic<Option<WacomPen>>, } impl ::std::default::Default for WacomState { fn default() -> Self { WacomState { last_x: AtomicU16::new(0), last_y: AtomicU16::new(0), last_xtilt: AtomicU16::new(0), last_ytilt: AtomicU16::new(0), last_dist: AtomicU16::new(0), last_pressure: AtomicU16::new(0), last_tool: Atomic::new(None), } } } #[repr(u16)] #[derive(PartialEq, Copy, Clone, Debug)] pub enum WacomPen { ToolPen = 320, ToolRubber = 321, Touch = 330, Stylus = 331, Stylus2 = 332, } #[derive(PartialEq, Copy, Clone)] pub enum WacomEventType { InstrumentChange, Hover, Draw, Unknown, } #[derive(PartialEq, Copy, Clone)] pub enum WacomEvent { InstrumentChange { pen: WacomPen, state: bool, }, Hover { position: cgmath::Point2<f32>, distance: u16, tilt: cgmath::Vector2<u16>, }, Draw { position: cgmath::Point2<f32>, pressure: u16, tilt: cgmath::Vector2<u16>, }, Unknown, } pub fn decode(ev: &input_event, outer_state: &InputDeviceState) -> Option<InputEvent> { let state = match outer_state { InputDeviceState::WacomState(ref state_arc) => state_arc, _ => unreachable!(), }; match ev._type { EV_SYNC => match state.last_tool.load(Ordering::Relaxed) { Some(WacomPen::ToolPen) => Some(InputEvent::WacomEvent { event: WacomEvent::Hover { position: cgmath::Point2 { x: (f32::from(state.last_x.load(Ordering::Relaxed)) * WACOM_HSCALAR), y: (f32::from(state.last_y.load(Ordering::Relaxed)) * WACOM_VSCALAR), }, distance: state.last_dist.load(Ordering::Relaxed) as u16, tilt: cgmath::Vector2 { x: state.last_xtilt.load(Ordering::Relaxed), y: state.last_ytilt.load(Ordering::Relaxed), }, }, }), Some(WacomPen::Touch) => Some(InputEvent::WacomEvent { event: WacomEvent::Draw { position: cgmath::Point2 { x: (f32::from(state.last_x.load(Ordering::Relaxed)) * WACOM_HSCALAR), y: (f32::from(state.last_y.load(Ordering::Relaxed)) * WACOM_VSCALAR), }, pressure: state.last_pressure.load(Ordering::Relaxed), tilt: cgmath::Vector2 { x: state.last_xtilt.load(Ordering::Relaxed), y: state.last_ytilt.load(Ordering::Relaxed), }, }, }), _ => None, }, EV_KEY => { /* key (device detected - device out of range etc.) */ if ev.code < WacomPen::ToolPen as u16 || ev.code > WacomPen::Stylus2 as u16 { return None; } let pen: WacomPen = unsafe { std::mem::transmute_copy(&ev.code) }; state.last_tool.store(Some(pen), Ordering::Relaxed); Some(InputEvent::WacomEvent { event: WacomEvent::InstrumentChange { pen, state: ev.value != 0, }, }) } EV_ABS => { match ev.code { WACOM_EVCODE_DISTANCE => { if state.last_pressure.load(Ordering::Relaxed) == 0 { state.last_dist.store(ev.value as u16, Ordering::Relaxed); state .last_tool .store(Some(WacomPen::ToolPen), Ordering::Relaxed); } else { state .last_pressure .fetch_add(ev.value as u16, Ordering::Relaxed); state .last_tool .store(Some(WacomPen::Touch), Ordering::Relaxed); } } WACOM_EVCODE_XTILT => { state.last_xtilt.store(ev.value as u16, Ordering::Relaxed); } WACOM_EVCODE_YTILT => { state.last_ytilt.store(ev.value as u16, Ordering::Relaxed); } WACOM_EVCODE_PRESSURE => { state .last_pressure .store(ev.value as u16, Ordering::Relaxed);; } WACOM_EVCODE_XPOS => { let val = ev.value as u16; state.last_y.store(WACOMHEIGHT - val, Ordering::Relaxed); } WACOM_EVCODE_YPOS => { state.last_x.store(ev.value as u16, Ordering::Relaxed); } _ => { debug!( "Unknown absolute event code for Wacom [type: {0} code: {1} value: {2}]", ev._type, ev.code, ev.value ); } } None } _ => { debug!( "Unknown event TYPE for Wacom [type: {0} code: {1} value: {2}]", ev._type, ev.code, ev.value ); None } } }
use atomic::Atomic; use evdev::raw::input_event; use input::{InputDeviceState, InputEvent}; use std; use std::sync::atomic::{AtomicU16, Ordering}; use framebuffer::cgmath; use framebuffer::common::{DISPLAYHEIGHT, DISPLAYWIDTH, WACOMHEIGHT, WACOMWIDTH}; const WACOM_HSCALAR: f32 = (DISPLAYWIDTH as f32) / (WACOMWIDTH as f32); const WACOM_VSCALAR: f32 = (DISPLAYHEIGHT as f32) / (WACOMHEIGHT as f32); const EV_SYNC: u16 = 0; const EV_KEY: u16 = 1; const EV_ABS: u16 = 3; const WACOM_EVCODE_PRESSURE: u16 = 24; const WACOM_EVCODE_DISTANCE: u16 = 25; const WACOM_EVCODE_XTILT: u16 = 26; const WACOM_EVCODE_YTILT: u16 = 27; const WACOM_EVCODE_XPOS: u16 = 0; const WACOM_EVCODE_YPOS: u16 = 1; pub struct WacomState { last_x: AtomicU16, last_y: AtomicU16, last_xtilt: AtomicU16, last_ytilt: AtomicU16, last_dist: AtomicU16, last_pressure: AtomicU16, last_tool: Atomic<Option<WacomPen>>, } impl ::std::default::Default for WacomState {
} #[repr(u16)] #[derive(PartialEq, Copy, Clone, Debug)] pub enum WacomPen { ToolPen = 320, ToolRubber = 321, Touch = 330, Stylus = 331, Stylus2 = 332, } #[derive(PartialEq, Copy, Clone)] pub enum WacomEventType { InstrumentChange, Hover, Draw, Unknown, } #[derive(PartialEq, Copy, Clone)] pub enum WacomEvent { InstrumentChange { pen: WacomPen, state: bool, }, Hover { position: cgmath::Point2<f32>, distance: u16, tilt: cgmath::Vector2<u16>, }, Draw { position: cgmath::Point2<f32>, pressure: u16, tilt: cgmath::Vector2<u16>, }, Unknown, } pub fn decode(ev: &input_event, outer_state: &InputDeviceState) -> Option<InputEvent> { let state = match outer_state { InputDeviceState::WacomState(ref state_arc) => state_arc, _ => unreachable!(), }; match ev._type { EV_SYNC => match state.last_tool.load(Ordering::Relaxed) { Some(WacomPen::ToolPen) => Some(InputEvent::WacomEvent { event: WacomEvent::Hover { position: cgmath::Point2 { x: (f32::from(state.last_x.load(Ordering::Relaxed)) * WACOM_HSCALAR), y: (f32::from(state.last_y.load(Ordering::Relaxed)) * WACOM_VSCALAR), }, distance: state.last_dist.load(Ordering::Relaxed) as u16, tilt: cgmath::Vector2 { x: state.last_xtilt.load(Ordering::Relaxed), y: state.last_ytilt.load(Ordering::Relaxed), }, }, }), Some(WacomPen::Touch) => Some(InputEvent::WacomEvent { event: WacomEvent::Draw { position: cgmath::Point2 { x: (f32::from(state.last_x.load(Ordering::Relaxed)) * WACOM_HSCALAR), y: (f32::from(state.last_y.load(Ordering::Relaxed)) * WACOM_VSCALAR), }, pressure: state.last_pressure.load(Ordering::Relaxed), tilt: cgmath::Vector2 { x: state.last_xtilt.load(Ordering::Relaxed), y: state.last_ytilt.load(Ordering::Relaxed), }, }, }), _ => None, }, EV_KEY => { /* key (device detected - device out of range etc.) */ if ev.code < WacomPen::ToolPen as u16 || ev.code > WacomPen::Stylus2 as u16 { return None; } let pen: WacomPen = unsafe { std::mem::transmute_copy(&ev.code) }; state.last_tool.store(Some(pen), Ordering::Relaxed); Some(InputEvent::WacomEvent { event: WacomEvent::InstrumentChange { pen, state: ev.value != 0, }, }) } EV_ABS => { match ev.code { WACOM_EVCODE_DISTANCE => { if state.last_pressure.load(Ordering::Relaxed) == 0 { state.last_dist.store(ev.value as u16, Ordering::Relaxed); state .last_tool .store(Some(WacomPen::ToolPen), Ordering::Relaxed); } else { state .last_pressure .fetch_add(ev.value as u16, Ordering::Relaxed); state .last_tool .store(Some(WacomPen::Touch), Ordering::Relaxed); } } WACOM_EVCODE_XTILT => { state.last_xtilt.store(ev.value as u16, Ordering::Relaxed); } WACOM_EVCODE_YTILT => { state.last_ytilt.store(ev.value as u16, Ordering::Relaxed); } WACOM_EVCODE_PRESSURE => { state .last_pressure .store(ev.value as u16, Ordering::Relaxed);; } WACOM_EVCODE_XPOS => { let val = ev.value as u16; state.last_y.store(WACOMHEIGHT - val, Ordering::Relaxed); } WACOM_EVCODE_YPOS => { state.last_x.store(ev.value as u16, Ordering::Relaxed); } _ => { debug!( "Unknown absolute event code for Wacom [type: {0} code: {1} value: {2}]", ev._type, ev.code, ev.value ); } } None } _ => { debug!( "Unknown event TYPE for Wacom [type: {0} code: {1} value: {2}]", ev._type, ev.code, ev.value ); None } } }
fn default() -> Self { WacomState { last_x: AtomicU16::new(0), last_y: AtomicU16::new(0), last_xtilt: AtomicU16::new(0), last_ytilt: AtomicU16::new(0), last_dist: AtomicU16::new(0), last_pressure: AtomicU16::new(0), last_tool: Atomic::new(None), } }
function_block-full_function
[ { "content": "struct thrinit {\n\n int sid;\n\n int tid;\n\n int *data;\n\n};\n\n\n\n\n\nextern \"C\" {\n\n #include \"libremarkable/lib.h\"\n\n #include \"libremarkable/bitmap.h\"\n\n #include \"libremarkable/shapes.h\"\n\n}\n\n\n\n#define BITS_PER_LONG (sizeof(long) * 8)\n\n#define NBITS(x) ((((x)-1)/BITS_PER_LONG)+1)\n\n#define OFF(x) ((x)%BITS_PER_LONG)\n\n#define BIT(x) (1UL<<OFF(x))\n\n#define LONG(x) ((x)/BITS_PER_LONG)\n\n#define test_bit(bit, array)\t((array[LONG(bit)] >> OFF(bit)) & 1)\n\n\n", "file_path": "legacy-c-impl/poc.cc", "rank": 0, "score": 110609.62411948261 }, { "content": "\tunsigned int biClrUsed;\n", "file_path": "legacy-c-impl/libremarkable/bitmap.h", "rank": 1, "score": 71728.45090134024 }, { "content": "#define EPDC_FLAG_USE_CMAP 0x0004\n\n\n", "file_path": "legacy-c-impl/libremarkable/lib.h", "rank": 2, "score": 69667.75148526621 }, { "content": "#define EPDC_FLAG_USE_ALT_BUFFER 0x0100\n\n\n", "file_path": "legacy-c-impl/libremarkable/lib.h", "rank": 3, "score": 67714.74480380476 }, { "content": "#[derive(Debug)]\n\n#[repr(C)]\n\nstruct ioctl_intercept_event {\n\n fd: libc::c_int,\n\n request: NativeWidthType,\n\n p1: intptr_t,\n\n p2: intptr_t,\n\n p3: intptr_t,\n\n p4: intptr_t,\n\n ret: libc::c_int,\n\n}\n\n\n", "file_path": "examples/spy.rs", "rank": 4, "score": 67523.08762641044 }, { "content": "pub trait FramebufferRefresh {\n\n /// Refreshes the entire screen with the provided parameters. If `wait_completion` is\n\n /// set to true, doesn't return before the refresh has been completed. Returns the marker.\n\n fn full_refresh(\n\n &self,\n\n waveform_mode: common::waveform_mode,\n\n temperature: common::display_temp,\n\n dither_mode: common::dither_mode,\n\n quant_bit: i32,\n\n wait_completion: bool,\n\n ) -> u32;\n\n\n\n /// Refreshes the given `region` with the provided parameters. If `mode` is `DryRun` or\n\n /// `Wait`, this function won't return before the `DryRun`'s collision_test or\n\n /// refresh has been completed. In `Async` mode, this function will return immediately\n\n /// and return a `marker` which can then later be fed to `wait_refresh_complete` to wait\n\n /// for its completion. In `DryRun`, it will return the `collision_test` result.\n\n ///\n\n /// `force_full_refresh` allows rare cases where you may want to do a full refresh on a\n\n /// partial region. 99.9% of of the time, you want this set to `false`.\n", "file_path": "src/framebuffer/mod.rs", "rank": 5, "score": 60781.09942595832 }, { "content": "pub trait FramebufferDraw {\n\n /// Draws `img` at `pos` with 1:1 scaling\n\n fn draw_image(&mut self, img: &image::RgbImage, pos: cgmath::Point2<i32>)\n\n -> common::mxcfb_rect;\n\n /// Draws a straight line\n\n fn draw_line(\n\n &mut self,\n\n start: cgmath::Point2<i32>,\n\n end: cgmath::Point2<i32>,\n\n width: u32,\n\n v: common::color,\n\n ) -> common::mxcfb_rect;\n\n /// Draws a circle using Bresenham circle algorithm\n\n fn draw_circle(\n\n &mut self,\n\n pos: cgmath::Point2<i32>,\n\n rad: u32,\n\n c: common::color,\n\n ) -> common::mxcfb_rect;\n\n /// Fills a circle\n", "file_path": "src/framebuffer/mod.rs", "rank": 6, "score": 60781.09942595832 }, { "content": "pub trait FramebufferIO {\n\n /// Writes an arbitrary length frame into the framebuffer\n\n fn write_frame(&mut self, frame: &[u8]);\n\n /// Writes a single pixel at `pos` with value `v`\n\n fn write_pixel(&mut self, pos: cgmath::Point2<i32>, v: common::color);\n\n /// Reads the value of the pixel at `pos`\n\n fn read_pixel(&self, pos: cgmath::Point2<u32>) -> common::color;\n\n /// Reads the value at offset `ofst` from the mmapp'ed framebuffer region\n\n fn read_offset(&self, ofst: isize) -> u8;\n\n /// Dumps the contents of the specified rectangle into a `Vec<u8>` from which\n\n /// you can later create a CompressedCanvasState or pass to restore_region().\n\n /// The pixel format is rgb565_le.\n\n fn dump_region(&self, rect: common::mxcfb_rect) -> Result<Vec<u8>, &'static str>;\n\n /// Restores into the framebuffer the contents of the specified rectangle from a u8 slice\n\n fn restore_region(\n\n &mut self,\n\n rect: common::mxcfb_rect,\n\n data: &[u8],\n\n ) -> Result<u32, &'static str>;\n\n}\n\n\n\nmod graphics;\n\n\n\npub mod draw;\n", "file_path": "src/framebuffer/mod.rs", "rank": 7, "score": 60781.09942595832 }, { "content": "pub fn lua_refresh(\n\n y: hlua::AnyLuaValue,\n\n x: hlua::AnyLuaValue,\n\n height: hlua::AnyLuaValue,\n\n width: hlua::AnyLuaValue,\n\n deep: hlua::AnyLuaValue,\n\n wait: hlua::AnyLuaValue,\n\n) {\n\n if let (\n\n hlua::AnyLuaValue::LuaNumber(ny),\n\n hlua::AnyLuaValue::LuaNumber(nx),\n\n hlua::AnyLuaValue::LuaNumber(nheight),\n\n hlua::AnyLuaValue::LuaNumber(nwidth),\n\n hlua::AnyLuaValue::LuaBoolean(bdeep),\n\n hlua::AnyLuaValue::LuaBoolean(bwait),\n\n ) = (y, x, height, width, deep, wait)\n\n {\n\n let framebuffer = get_current_framebuffer!();\n\n let rect = mxcfb_rect {\n\n top: ny as u32,\n", "file_path": "src/ui_extensions/luaext.rs", "rank": 8, "score": 59530.240165514784 }, { "content": "pub fn lua_clear() {\n\n let framebuffer = get_current_framebuffer!();\n\n framebuffer.clear();\n\n framebuffer.full_refresh(\n\n waveform_mode::WAVEFORM_MODE_INIT,\n\n display_temp::TEMP_USE_AMBIENT,\n\n dither_mode::EPDC_FLAG_USE_DITHERING_PASSTHROUGH,\n\n 0,\n\n true,\n\n );\n\n}\n", "file_path": "src/ui_extensions/luaext.rs", "rank": 9, "score": 59530.240165514784 }, { "content": "pub fn lua_draw_text(\n\n y: hlua::AnyLuaValue,\n\n x: hlua::AnyLuaValue,\n\n text: hlua::AnyLuaValue,\n\n size: hlua::AnyLuaValue,\n\n color: hlua::AnyLuaValue,\n\n) {\n\n if let (\n\n hlua::AnyLuaValue::LuaNumber(ny),\n\n hlua::AnyLuaValue::LuaNumber(nx),\n\n hlua::AnyLuaValue::LuaString(stext),\n\n hlua::AnyLuaValue::LuaNumber(nsize),\n\n hlua::AnyLuaValue::LuaNumber(ncolor),\n\n ) = (y, x, text, size, color)\n\n {\n\n let framebuffer = get_current_framebuffer!();\n\n // TODO: Expose the drawn region to Lua so that it can be updated that's\n\n // returned from this draw_text function.\n\n framebuffer.draw_text(\n\n cgmath::Point2 {\n", "file_path": "src/ui_extensions/luaext.rs", "rank": 10, "score": 58355.788491842075 }, { "content": "pub trait FramebufferBase<'a> {\n\n /// Creates a new instance of Framebuffer\n\n fn new(path_to_device: &str) -> core::Framebuffer;\n\n /// Toggles the EPD Controller (see https://wiki.mobileread.com/wiki/EPD_controller)\n\n fn set_epdc_access(&mut self, state: bool);\n\n /// Toggles autoupdate mode\n\n fn set_autoupdate_mode(&mut self, mode: u32);\n\n /// Toggles update scheme\n\n fn set_update_scheme(&mut self, scheme: u32);\n\n /// Creates a FixScreeninfo struct and fills it using ioctl\n\n fn get_fix_screeninfo(device: &std::fs::File) -> screeninfo::FixScreeninfo;\n\n /// Creates a VarScreeninfo struct and fills it using ioctl\n\n fn get_var_screeninfo(device: &std::fs::File) -> screeninfo::VarScreeninfo;\n\n /// Makes the proper ioctl call to set the VarScreenInfo.\n\n /// You must first update the contents of self.var_screen_info\n\n /// and then call this function.\n\n fn put_var_screeninfo(\n\n device: &std::fs::File,\n\n var_screen_info: &mut screeninfo::VarScreeninfo,\n\n ) -> bool;\n\n}\n\n\n\npub mod refresh;\n", "file_path": "src/framebuffer/mod.rs", "rank": 11, "score": 57306.77981183693 }, { "content": "pub fn draw_dynamic_bezier<F>(\n\n write_pixel: &mut F,\n\n startpt: (Point2<f32>, f32),\n\n ctrlpt: (Point2<f32>, f32),\n\n endpt: (Point2<f32>, f32),\n\n samples: i32,\n\n) -> mxcfb_rect\n\nwhere\n\n F: FnMut(Point2<i32>),\n\n{\n\n let mut left_edge = Vec::<Point2<i32>>::new();\n\n let mut right_edge = Vec::<Point2<i32>>::new();\n\n let mut prev_left_pt = Point2 {\n\n x: std::i32::MIN,\n\n y: std::i32::MIN,\n\n };\n\n let mut prev_right_pt = Point2 {\n\n x: std::i32::MIN,\n\n y: std::i32::MIN,\n\n };\n", "file_path": "src/framebuffer/graphics.rs", "rank": 12, "score": 54881.46887772069 }, { "content": "/// $ cat /sys/class/power_supply/bq27441/capacity\n\n/// 97\n\npub fn percentage() -> Result<i32, String> {\n\n let curr = read_attribute(\"capacity\")?;\n\n match curr.parse::<i32>() {\n\n Ok(r) => Ok(r),\n\n Err(_) => {\n\n Err(\"Unable to parse the contents of 'capacity' during a battery query\".to_owned())\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/battery.rs", "rank": 13, "score": 51691.56975071549 }, { "content": "/// $ cat /sys/class/power_supply/bq27441/current_now\n\n/// -132000\n\npub fn current() -> Result<i32, String> {\n\n let curr = read_attribute(\"current_now\")?;\n\n match curr.parse::<i32>() {\n\n Ok(r) => Ok(r),\n\n Err(_) => {\n\n Err(\"Unable to parse the contents of 'current_now' during a battery query\".to_owned())\n\n }\n\n }\n\n}\n", "file_path": "src/battery.rs", "rank": 14, "score": 51691.56975071549 }, { "content": "/// $ cat /sys/class/power_supply/bq27441/voltage_now\n\n/// 4164000\n\npub fn voltage() -> Result<i32, String> {\n\n let curr = read_attribute(\"voltage_now\")?;\n\n match curr.parse::<i32>() {\n\n Ok(r) => Ok(r),\n\n Err(_) => {\n\n Err(\"Unable to parse the contents of 'voltage_now' during a battery query\".to_owned())\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/battery.rs", "rank": 15, "score": 51691.56975071549 }, { "content": "/// $ cat /sys/class/power_supply/bq27441/temp\n\n/// 201\n\npub fn temperature() -> Result<i32, String> {\n\n let curr = read_attribute(\"temp\")?;\n\n match curr.parse::<i32>() {\n\n Ok(r) => Ok(r),\n\n Err(_) => {\n\n Err(\"Unable to parse the contents of 'current_now' during a battery query\".to_owned())\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/battery.rs", "rank": 16, "score": 51691.56975071549 }, { "content": "/// $ cat /sys/class/power_supply/bq27441/charge_now\n\n/// 1528000\n\npub fn charge() -> Result<i32, String> {\n\n let curr = read_attribute(\"charge_now\")?;\n\n match curr.parse::<i32>() {\n\n Ok(r) => Ok(r),\n\n Err(_) => {\n\n Err(\"Unable to parse the contents of 'charge_now' during a battery query\".to_owned())\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/battery.rs", "rank": 17, "score": 51691.56975071549 }, { "content": "/// $ cat /sys/class/power_supply/bq27441/charge_full\n\n/// 1635000\n\npub fn charge_full() -> Result<i32, String> {\n\n let curr = read_attribute(\"charge_full\")?;\n\n match curr.parse::<i32>() {\n\n Ok(r) => Ok(r),\n\n Err(_) => {\n\n Err(\"Unable to parse the contents of 'charge_full' during a battery query\".to_owned())\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/battery.rs", "rank": 18, "score": 50517.11807704278 }, { "content": "/// $ cat /sys/class/power_supply/bq27441/charge_full_design\n\n/// 1340000\n\npub fn charge_full_design() -> Result<i32, String> {\n\n let curr = read_attribute(\"charge_full_design\")?;\n\n match curr.parse::<i32>() {\n\n Ok(r) => Ok(r),\n\n Err(_) => Err(\n\n \"Unable to parse the contents of 'charge_full_design' during a battery query\"\n\n .to_owned(),\n\n ),\n\n }\n\n}\n\n\n", "file_path": "src/battery.rs", "rank": 19, "score": 49412.2805451281 }, { "content": "/// $ cat /sys/class/power_supply/bq27441/status\n\n/// Discharging\n\npub fn human_readable_charging_status() -> Result<String, String> {\n\n Ok(read_attribute(\"status\")?)\n\n}\n\n\n", "file_path": "src/battery.rs", "rank": 20, "score": 48371.04587481176 }, { "content": "/// $ cat /sys/class/power_supply/bq27441/capacity_level\n\n/// Normal\n\npub fn human_readable_capacity_level() -> Result<String, String> {\n\n Ok(read_attribute(\"capacity_level\")?)\n\n}\n\n\n", "file_path": "src/battery.rs", "rank": 21, "score": 48371.04587481176 }, { "content": " EPDC_FLAG_USE_DITHERING_PASSTHROUGH,\n\n 0, // flags\n\n 0, // quant_bit\n\n NULL, /* alt_buffer_data (not very useful -- not even here as the phys_addr\n\n needs to be within finfo->smem) */\n\n /* In order to take advantage of this, we would need to use the other part of the framebuffer (the virtual part that's not displayed) */\n\n 320, 1720, // y, x\n\n 64, 64 * 4); // h: 64, w: 3 * 64 (4 shapes)\n\n}\n\n\n\nint demo(void* thrdata) {\n\n struct thrinit* initData = (struct thrinit*)thrdata;\n\n remarkable_framebuffer* fb = (remarkable_framebuffer*)initData->data;\n\n while (true) {\n\n random_rects((remarkable_framebuffer*)fb, 100);\n\n scanning_line((remarkable_framebuffer*)fb, 395);\n\n }\n\n}\n\n\n\nint main(void) {\n", "file_path": "legacy-c-impl/poc.cc", "rank": 22, "score": 43161.66240785501 }, { "content": " EPDC_FLAG_USE_DITHERING_DRAWING,\n\n 0, DRAWING_QUANT_BIT, NULL,\n\n tb.top - 10, tb.left,\n\n tb.height + 20, tb.width);\n\n remarkable_framebuffer_wait_refresh_marker(fb, refresh_marker);\n\n usleep(3500);\n\n }\n\n}\n\n\n\nvoid random_rects(remarkable_framebuffer* fb, unsigned iter) {\n\n if (fb == NULL)\n\n return;\n\n\n\n std::queue<mxcfb_rect> q;\n\n mxcfb_rect rect;\n\n uint32_t refresh_marker = 0;\n\n // for (unsigned i = 0; i < iter; i++) {\n\n while (iter--) {\n\n // Gives 2816px horizontally (res * 2)\n\n // And 3840px vertically (virtual res accounted for)\n", "file_path": "legacy-c-impl/poc.cc", "rank": 23, "score": 43160.27573000476 }, { "content": " WAVEFORM_MODE_GC16_FAST,\n\n TEMP_USE_MAX,\n\n EPDC_FLAG_USE_DITHERING_PASSTHROUGH,\n\n 0, // flags\n\n 0, // quant_bit\n\n NULL, /* alt_buffer_data (not very useful -- not even here as the phys_addr\n\n needs to be within finfo->smem) */\n\n 0, 0, // y, x\n\n YRES(fb), XRES(fb));\n\n\n\n}\n\n\n\nvoid clear_display(remarkable_framebuffer* fb) {\n\n if (fb == NULL)\n\n return;\n\n remarkable_framebuffer_fill(fb, REMARKABLE_BRIGHTEST);\n\n remarkable_framebuffer_refresh(fb, \n\n UPDATE_MODE_FULL,\n\n WAVEFORM_MODE_INIT,\n\n TEMP_USE_MAX,\n", "file_path": "legacy-c-impl/poc.cc", "rank": 24, "score": 43159.978878190406 }, { "content": " WAVEFORM_MODE_GC16_FAST,\n\n TEMP_USE_REMARKABLE_DRAW,\n\n EPDC_FLAG_USE_DITHERING_PASSTHROUGH,\n\n 0, // flags\n\n 0, // quant_bit\n\n NULL, // alt_buffer\n\n updated_rect.top, updated_rect.left,\n\n updated_rect.height, updated_rect.width);\n\n\n\n updated_rect = remarkable_framebuffer_draw_text(fb, mediumFont, \"The quick brown fox jumps over the lazy dog\", 1550, 900);\n\n remarkable_framebuffer_refresh(fb,\n\n UPDATE_MODE_PARTIAL,\n\n WAVEFORM_MODE_GC16_FAST,\n\n TEMP_USE_REMARKABLE_DRAW,\n\n EPDC_FLAG_USE_REMARKABLE_DITHER,\n\n 0, // flags\n\n 0, // quant_bit\n\n NULL, // alt_buffer\n\n updated_rect.top, updated_rect.left,\n\n updated_rect.height, updated_rect.width);\n", "file_path": "legacy-c-impl/poc.cc", "rank": 25, "score": 43159.60572094638 }, { "content": "\n\n // Kick off the dynamic demo -- avoiding using pthreads because of a GLIBC version mismatch\n\n struct thrinit demoThread = {1, 0, (int*)fb};\n\n unsigned char* demoThreadStack = (unsigned char*)malloc(SSZ);\n\n clone(demo, demoThreadStack + SSZ - 1, CLONE_VM | CLONE_SYSVSEM, &demoThread);\n\n\n\n // Read from Wacom\n\n evdraw(fb, \"/dev/input/event0\", mediumFont);\n\n\n\n remarkable_framebuffer_refresh(fb,\n\n UPDATE_MODE_PARTIAL,\n\n WAVEFORM_MODE_DU,\n\n TEMP_USE_MAX,\n\n EPDC_FLAG_USE_DITHERING_PASSTHROUGH,\n\n 0, // flags\n\n 0, // quant_bit\n\n NULL, // alt_buffer_data\n\n 0, 0, // y, x\n\n YRES(fb), XRES(fb));\n\n remarkable_framebuffer_destroy(fb);\n\n return 0;\n\n}\n", "file_path": "legacy-c-impl/poc.cc", "rank": 26, "score": 43159.30818056444 }, { "content": " uint32_t refresh_marker = remarkable_framebuffer_refresh(fb,\n\n UPDATE_MODE_PARTIAL,\n\n WAVEFORM_MODE_DU,\n\n TEMP_USE_REMARKABLE_DRAW,\n\n EPDC_FLAG_USE_DITHERING_DRAWING,\n\n 0, DRAWING_QUANT_BIT, NULL,\n\n rect.top, rect.left, rect.height, rect.width);\n\n remarkable_framebuffer_wait_refresh_marker(fb, refresh_marker);\n\n break;\n\n }\n\n }\n\n }\n\n usleep(1000 * 100);\n\n }\n\n\n\n}\n\n\n\nint get_random(int min, int max) {\n\n return min + rand() / (RAND_MAX / (max - min + 1) + 1);\n\n}\n", "file_path": "legacy-c-impl/poc.cc", "rank": 27, "score": 43159.09568755723 }, { "content": " EPDC_FLAG_USE_DITHERING_PASSTHROUGH,\n\n 0, // flags\n\n 0, // quant_bit\n\n NULL, // alt_buffer_data\n\n 0, 0, // y, x\n\n YRES(fb), XRES(fb));\n\n usleep(300 * 1000);\n\n}\n\n\n\nvoid draw_sample_shapes(remarkable_framebuffer* fb) {\n\n if (fb == NULL)\n\n return;\n\n remarkable_color* shapes[] = { rmShape_A, rmShape_B, rmShape_C, rmShape_smiley };\n\n for (unsigned i = 0; i < 4; i++) {\n\n remarkable_framebuffer_draw_shape(fb, shapes[i], 8, 8, 320, 1720 + (64 * i), 64, 64);\n\n }\n\n remarkable_framebuffer_refresh(fb, \n\n UPDATE_MODE_PARTIAL,\n\n WAVEFORM_MODE_DU,\n\n TEMP_USE_MAX,\n", "file_path": "legacy-c-impl/poc.cc", "rank": 28, "score": 43159.03689769736 }, { "content": " removal.height, removal.width);\n\n q.pop();\n\n }\n\n\n\n remarkable_framebuffer_wait_refresh_marker(fb, refresh_marker);\n\n\n\n // Partial refresh on the portion of the screen that contains the new rectangle\n\n refresh_marker = remarkable_framebuffer_refresh(fb, \n\n UPDATE_MODE_PARTIAL,\n\n WAVEFORM_MODE_DU,\n\n TEMP_USE_PAPYRUS,\n\n EPDC_FLAG_USE_DITHERING_PASSTHROUGH,\n\n 0, // flags\n\n 0, // quant_bit\n\n NULL, // alt_buffer_data\n\n rect.top, rect.left,\n\n rect.height, rect.width);\n\n remarkable_framebuffer_wait_refresh_marker(fb, refresh_marker);\n\n usleep(1000 * 100);\n\n }\n", "file_path": "legacy-c-impl/poc.cc", "rank": 29, "score": 43158.96188370934 }, { "content": " // TODO: Figure out the reason why this does it\n\n rect.left = get_random(0, to_remarkable_width(XRES(fb)));\n\n rect.top = get_random(500, YRES(fb) - 500);\n\n rect.height = 50;\n\n rect.width = 50;\n\n remarkable_framebuffer_draw_rect(fb, rect, REMARKABLE_DARKEST);\n\n q.push(rect);\n\n\n\n while (q.size() > 1) {\n\n mxcfb_rect &removal = q.front();\n\n remarkable_framebuffer_draw_rect(fb, removal, REMARKABLE_BRIGHTEST);\n\n refresh_marker = remarkable_framebuffer_refresh(fb,\n\n UPDATE_MODE_PARTIAL,\n\n WAVEFORM_MODE_DU,\n\n TEMP_USE_PAPYRUS,\n\n EPDC_FLAG_USE_DITHERING_PASSTHROUGH,\n\n 0, // flags\n\n 0, // quant_bit\n\n NULL, // alt_buffer_data\n\n removal.top, removal.left,\n", "file_path": "legacy-c-impl/poc.cc", "rank": 30, "score": 43158.75745230994 }, { "content": "\n\nvoid scanning_line(remarkable_framebuffer* fb, unsigned iter) {\n\n if (fb == NULL)\n\n return;\n\n mxcfb_rect tb = {450,0,XRES(fb),10};\n\n remarkable_framebuffer_draw_rect(fb, tb, REMARKABLE_DARKEST);\n\n int dir = 1;\n\n uint32_t refresh_marker = 0;\n\n for(unsigned i = 0; i < iter; i++) {\n\n remarkable_framebuffer_draw_rect(fb, tb, REMARKABLE_BRIGHTEST);\n\n\n\n if (tb.top > YRES(fb) - 450 || tb.top < 450)\n\n dir *= -1;\n\n tb.top += 5 * dir;\n\n remarkable_framebuffer_draw_rect(fb, tb, REMARKABLE_DARKEST);\n\n \n\n refresh_marker = remarkable_framebuffer_refresh(fb, \n\n UPDATE_MODE_PARTIAL,\n\n WAVEFORM_MODE_DU,\n\n TEMP_USE_REMARKABLE_DRAW,\n", "file_path": "legacy-c-impl/poc.cc", "rank": 31, "score": 43157.880358090915 }, { "content": " srand(time(NULL));\n\n\n\n remarkable_framebuffer* fb = remarkable_framebuffer_init(\"/dev/fb0\");\n\n if (fb == NULL) {\n\n printf(\"remarkable_framebuffer_init('/dev/fb0') returned NULL. Exiting.\\n\");\n\n exit(1);\n\n }\n\n\n\n clear_display(fb);\n\n\n\n // display_bmp(fb, \"/tmp/test.bmp\");\n\n\n\n draw_sample_shapes(fb);\n\n\n\n struct remarkable_font* largeFont = remarkable_framebuffer_font_init(fb, \"/usr/share/fonts/ttf/noto/NotoSans-Regular.ttf\", 640);\n\n struct remarkable_font* mediumFont = remarkable_framebuffer_font_init(fb, \"/usr/share/fonts/ttf/noto/NotoSansUI-Regular.ttf\", 180);\n\n\n\n mxcfb_rect updated_rect = remarkable_framebuffer_draw_text(fb, largeFont, \"ReMarkable\", 120, 900);\n\n remarkable_framebuffer_refresh(fb, \n\n UPDATE_MODE_PARTIAL,\n", "file_path": "legacy-c-impl/poc.cc", "rank": 32, "score": 43155.98569848118 }, { "content": "\n\n int x, y;\n\n for (unsigned i = 0; i < rd / sizeof(struct input_event); i++) {\n\n struct input_event& curr = ev[i];\n\n if (curr.type == EV_ABS) {\n\n if (curr.code == 0x00) {\n\n // wacom x\n\n x = curr.value;\n\n }\n\n else if (curr.code == 0x01) {\n\n // wacom y\n\n y = curr.value;\n\n char text[255] = {0};\n\n snprintf(text, 255, \"Wacom Input: y: %d | x: %d\", y, x);\n\n\n\n // Clear the output area\n\n remarkable_framebuffer_draw_rect(fb, rect, REMARKABLE_BRIGHTEST);\n\n\n\n // Draw the text and refresh\n\n rect = remarkable_framebuffer_draw_text(fb, font, text, 1750, 1100);\n", "file_path": "legacy-c-impl/poc.cc", "rank": 33, "score": 43155.98569848118 }, { "content": "\n\nvoid evdraw(remarkable_framebuffer* fb, const char* evDevicePath, remarkable_font* font) {\n\n if (fb == NULL || evDevicePath == NULL || font == NULL)\n\n return;\n\n\n\n int fd = open(evDevicePath, O_RDONLY);\n\n if (fd < 0) {\n\n printf(\"Unable to open %s\\n\", evDevicePath);\n\n \treturn;\n\n }\n\n\n\n int version;\n\n if (ioctl(fd, EVIOCGVERSION, &version)) {\n\n printf(\"evtest: can't get version\");\n\n return;\n\n }\n\n\n\n printf(\"Input driver version is %d.%d.%d\\n\", version >> 16,\n\n (version >> 8) & 0xff, version & 0xff);\n\n\n", "file_path": "legacy-c-impl/poc.cc", "rank": 34, "score": 43155.98569848118 }, { "content": "\n\n unsigned short id[4];\n\n\n\n ioctl(fd, EVIOCGID, id);\n\n printf(\"Input device ID: bus 0x%x vendor 0x%x product 0x%x version 0x%x\\n\",\n\n id[ID_BUS], id[ID_VENDOR], id[ID_PRODUCT], id[ID_VERSION]);\n\n\n\n char name[256] = \"Unknown\";\n\n ioctl(fd, EVIOCGNAME(sizeof(name)), name);\n\n printf(\"Input device name: \\\"%s\\\"\\n\", name);\n\n\n\n int rd = 0;\n\n struct input_event ev[64];\n\n mxcfb_rect rect = {0};\n\n while (1) {\n\n rd = read(fd, ev, sizeof(struct input_event) * 64);\n\n if (rd < (int) sizeof(struct input_event)) {\n\n printf(\"evtest: error reading\");\n\n return;\n\n }\n", "file_path": "legacy-c-impl/poc.cc", "rank": 35, "score": 43155.98569848118 }, { "content": "#include <stdio.h>\n\n#include <fcntl.h>\n\n#include <unistd.h>\n\n#include <time.h> \n\n#include <stdlib.h>\n\n#include <linux/fb.h>\n\n#include <sys/ioctl.h>\n\n#include <queue>\n\n#include <list>\n\n#include <linux/input.h>\n\n\n\n\n\n#include <sys/types.h>\n\n#include <sys/ipc.h>\n\n#include <sys/sem.h>\n\n#include <sched.h>\n\n\n\n#define IDKEY 23003\n\n#define SSZ 16384\n\n\n", "file_path": "legacy-c-impl/poc.cc", "rank": 36, "score": 43155.98569848118 }, { "content": " \n\n}\n\n\n\nvoid display_bmp(remarkable_framebuffer* fb, const char* path) {\n\n bmp_img bitmap = { 0 };\n\n bmp_img_read(&bitmap, path);\n\n\n\n printf(\"Bitmap loaded [size=%u, width=%u, height=%u]\\n\", bitmap.img_header.bfSize, bitmap.img_header.biWidth, bitmap.img_header.biHeight);\n\n unsigned left = 2000;\n\n unsigned top = 200;\n\n for (unsigned y = 0; y < bitmap.img_header.biHeight; y++) {\n\n for (unsigned x = 0; x < bitmap.img_header.biWidth; x++) {\n\n unsigned char r = bitmap.img_pixels[y][x].red;\n\n unsigned char g = bitmap.img_pixels[y][x].green;\n\n unsigned char b = bitmap.img_pixels[y][x].blue;\n\n remarkable_framebuffer_set_pixel(fb, top + y, left + x, TO_REMARKABLE_COLOR(r, g, b));\n\n }\n\n }\n\n remarkable_framebuffer_refresh(fb,\n\n UPDATE_MODE_FULL,\n", "file_path": "legacy-c-impl/poc.cc", "rank": 37, "score": 43155.98569848118 }, { "content": "pub fn decode(ev: &input_event, outer_state: &InputDeviceState) -> Option<InputEvent> {\n\n let state = match outer_state {\n\n InputDeviceState::MultitouchState(ref state_arc) => state_arc,\n\n _ => unreachable!(),\n\n };\n\n match ev._type {\n\n 0 => {\n\n /* sync */\n\n None\n\n }\n\n 3 => {\n\n // Absolute\n\n match ev.code {\n\n 47 => {\n\n state\n\n .last_finger_id\n\n .store(ev.value as u16, Ordering::Relaxed);\n\n None\n\n }\n\n 53 => {\n", "file_path": "src/input/multitouch.rs", "rank": 38, "score": 39608.34094235138 }, { "content": "pub fn decode(ev: &input_event, outer_state: &InputDeviceState) -> Option<InputEvent> {\n\n let state = match outer_state {\n\n InputDeviceState::GPIOState(ref state_arc) => state_arc,\n\n _ => unreachable!(),\n\n };\n\n match ev._type {\n\n 0 => {\n\n /* safely ignored. sync event*/\n\n None\n\n }\n\n 1 => {\n\n let (p, before_state) = match ev.code {\n\n 102 => (\n\n PhysicalButton::MIDDLE,\n\n state.states[0].fetch_and(ev.value != 0, Ordering::Relaxed),\n\n ),\n\n 105 => (\n\n PhysicalButton::LEFT,\n\n state.states[1].fetch_and(ev.value != 0, Ordering::Relaxed),\n\n ),\n", "file_path": "src/input/gpio.rs", "rank": 39, "score": 39608.34094235138 }, { "content": "char *keys[KEY_MAX + 1] = {\n\n\t[0 ... KEY_MAX] = NULL,\n\n\t[KEY_RESERVED] = \"Reserved\",\t\t[KEY_ESC] = \"Esc\",\n\n\t[KEY_1] = \"1\",\t\t\t\t[KEY_2] = \"2\",\n\n\t[KEY_3] = \"3\",\t\t\t\t[KEY_4] = \"4\",\n\n\t[KEY_5] = \"5\",\t\t\t\t[KEY_6] = \"6\",\n\n\t[KEY_7] = \"7\",\t\t\t\t[KEY_8] = \"8\",\n\n\t[KEY_9] = \"9\",\t\t\t\t[KEY_0] = \"0\",\n\n\t[KEY_MINUS] = \"Minus\",\t\t\t[KEY_EQUAL] = \"Equal\",\n\n\t[KEY_BACKSPACE] = \"Backspace\",\t\t[KEY_TAB] = \"Tab\",\n\n\t[KEY_Q] = \"Q\",\t\t\t\t[KEY_W] = \"W\",\n\n\t[KEY_E] = \"E\",\t\t\t\t[KEY_R] = \"R\",\n\n\t[KEY_T] = \"T\",\t\t\t\t[KEY_Y] = \"Y\",\n\n\t[KEY_U] = \"U\",\t\t\t\t[KEY_I] = \"I\",\n\n\t[KEY_O] = \"O\",\t\t\t\t[KEY_P] = \"P\",\n\n\t[KEY_LEFTBRACE] = \"LeftBrace\",\t\t[KEY_RIGHTBRACE] = \"RightBrace\",\n\n\t[KEY_ENTER] = \"Enter\",\t\t\t[KEY_LEFTCTRL] = \"LeftControl\",\n\n\t[KEY_A] = \"A\",\t\t\t\t[KEY_S] = \"S\",\n\n\t[KEY_D] = \"D\",\t\t\t\t[KEY_F] = \"F\",\n\n\t[KEY_G] = \"G\",\t\t\t\t[KEY_H] = \"H\",\n\n\t[KEY_J] = \"J\",\t\t\t\t[KEY_K] = \"K\",\n\n\t[KEY_L] = \"L\",\t\t\t\t[KEY_SEMICOLON] = \"Semicolon\",\n\n\t[KEY_APOSTROPHE] = \"Apostrophe\",\t[KEY_GRAVE] = \"Grave\",\n\n\t[KEY_LEFTSHIFT] = \"LeftShift\",\t\t[KEY_BACKSLASH] = \"BackSlash\",\n\n\t[KEY_Z] = \"Z\",\t\t\t\t[KEY_X] = \"X\",\n\n\t[KEY_C] = \"C\",\t\t\t\t[KEY_V] = \"V\",\n\n\t[KEY_B] = \"B\",\t\t\t\t[KEY_N] = \"N\",\n\n\t[KEY_M] = \"M\",\t\t\t\t[KEY_COMMA] = \"Comma\",\n\n\t[KEY_DOT] = \"Dot\",\t\t\t[KEY_SLASH] = \"Slash\",\n\n\t[KEY_RIGHTSHIFT] = \"RightShift\",\t[KEY_KPASTERISK] = \"KPAsterisk\",\n\n\t[KEY_LEFTALT] = \"LeftAlt\",\t\t[KEY_SPACE] = \"Space\",\n\n\t[KEY_CAPSLOCK] = \"CapsLock\",\t\t[KEY_F1] = \"F1\",\n\n\t[KEY_F2] = \"F2\",\t\t\t[KEY_F3] = \"F3\",\n\n\t[KEY_F4] = \"F4\",\t\t\t[KEY_F5] = \"F5\",\n\n\t[KEY_F6] = \"F6\",\t\t\t[KEY_F7] = \"F7\",\n\n\t[KEY_F8] = \"F8\",\t\t\t[KEY_F9] = \"F9\",\n\n\t[KEY_F10] = \"F10\",\t\t\t[KEY_NUMLOCK] = \"NumLock\",\n\n\t[KEY_SCROLLLOCK] = \"ScrollLock\",\t[KEY_KP7] = \"KP7\",\n\n\t[KEY_KP8] = \"KP8\",\t\t\t[KEY_KP9] = \"KP9\",\n\n\t[KEY_KPMINUS] = \"KPMinus\",\t\t[KEY_KP4] = \"KP4\",\n\n\t[KEY_KP5] = \"KP5\",\t\t\t[KEY_KP6] = \"KP6\",\n\n\t[KEY_KPPLUS] = \"KPPlus\",\t\t[KEY_KP1] = \"KP1\",\n\n\t[KEY_KP2] = \"KP2\",\t\t\t[KEY_KP3] = \"KP3\",\n\n\t[KEY_KP0] = \"KP0\",\t\t\t[KEY_KPDOT] = \"KPDot\",\n\n\t[KEY_ZENKAKUHANKAKU] = \"Zenkaku/Hankaku\", [KEY_102ND] = \"102nd\",\n\n\t[KEY_F11] = \"F11\",\t\t\t[KEY_F12] = \"F12\",\n\n\t[KEY_RO] = \"RO\",\t\t\t[KEY_KATAKANA] = \"Katakana\",\n\n\t[KEY_HIRAGANA] = \"HIRAGANA\",\t\t[KEY_HENKAN] = \"Henkan\",\n\n\t[KEY_KATAKANAHIRAGANA] = \"Katakana/Hiragana\", [KEY_MUHENKAN] = \"Muhenkan\",\n\n\t[KEY_KPJPCOMMA] = \"KPJpComma\",\t\t[KEY_KPENTER] = \"KPEnter\",\n\n\t[KEY_RIGHTCTRL] = \"RightCtrl\",\t\t[KEY_KPSLASH] = \"KPSlash\",\n\n\t[KEY_SYSRQ] = \"SysRq\",\t\t\t[KEY_RIGHTALT] = \"RightAlt\",\n\n\t[KEY_LINEFEED] = \"LineFeed\",\t\t[KEY_HOME] = \"Home\",\n\n\t[KEY_UP] = \"Up\",\t\t\t[KEY_PAGEUP] = \"PageUp\",\n\n\t[KEY_LEFT] = \"Left\",\t\t\t[KEY_RIGHT] = \"Right\",\n\n\t[KEY_END] = \"End\",\t\t\t[KEY_DOWN] = \"Down\",\n\n\t[KEY_PAGEDOWN] = \"PageDown\",\t\t[KEY_INSERT] = \"Insert\",\n\n\t[KEY_DELETE] = \"Delete\",\t\t[KEY_MACRO] = \"Macro\",\n\n\t[KEY_MUTE] = \"Mute\",\t\t\t[KEY_VOLUMEDOWN] = \"VolumeDown\",\n\n\t[KEY_VOLUMEUP] = \"VolumeUp\",\t\t[KEY_POWER] = \"Power\",\n\n\t[KEY_KPEQUAL] = \"KPEqual\",\t\t[KEY_KPPLUSMINUS] = \"KPPlusMinus\",\n\n\t[KEY_PAUSE] = \"Pause\",\t\t\t[KEY_KPCOMMA] = \"KPComma\",\n\n\t[KEY_HANGUEL] = \"Hanguel\",\t\t[KEY_HANJA] = \"Hanja\",\n\n\t[KEY_YEN] = \"Yen\",\t\t\t[KEY_LEFTMETA] = \"LeftMeta\",\n\n\t[KEY_RIGHTMETA] = \"RightMeta\",\t\t[KEY_COMPOSE] = \"Compose\",\n\n\t[KEY_STOP] = \"Stop\",\t\t\t[KEY_AGAIN] = \"Again\",\n\n\t[KEY_PROPS] = \"Props\",\t\t\t[KEY_UNDO] = \"Undo\",\n\n\t[KEY_FRONT] = \"Front\",\t\t\t[KEY_COPY] = \"Copy\",\n\n\t[KEY_OPEN] = \"Open\",\t\t\t[KEY_PASTE] = \"Paste\",\n\n\t[KEY_FIND] = \"Find\",\t\t\t[KEY_CUT] = \"Cut\",\n\n\t[KEY_HELP] = \"Help\",\t\t\t[KEY_MENU] = \"Menu\",\n\n\t[KEY_CALC] = \"Calc\",\t\t\t[KEY_SETUP] = \"Setup\",\n\n\t[KEY_SLEEP] = \"Sleep\",\t\t\t[KEY_WAKEUP] = \"WakeUp\",\n\n\t[KEY_FILE] = \"File\",\t\t\t[KEY_SENDFILE] = \"SendFile\",\n\n\t[KEY_DELETEFILE] = \"DeleteFile\",\t[KEY_XFER] = \"X-fer\",\n\n\t[KEY_PROG1] = \"Prog1\",\t\t\t[KEY_PROG2] = \"Prog2\",\n\n\t[KEY_WWW] = \"WWW\",\t\t\t[KEY_MSDOS] = \"MSDOS\",\n\n\t[KEY_COFFEE] = \"Coffee\",\t\t[KEY_DIRECTION] = \"Direction\",\n\n\t[KEY_CYCLEWINDOWS] = \"CycleWindows\",\t[KEY_MAIL] = \"Mail\",\n\n\t[KEY_BOOKMARKS] = \"Bookmarks\",\t\t[KEY_COMPUTER] = \"Computer\",\n\n\t[KEY_BACK] = \"Back\",\t\t\t[KEY_FORWARD] = \"Forward\",\n\n\t[KEY_CLOSECD] = \"CloseCD\",\t\t[KEY_EJECTCD] = \"EjectCD\",\n\n\t[KEY_EJECTCLOSECD] = \"EjectCloseCD\",\t[KEY_NEXTSONG] = \"NextSong\",\n\n\t[KEY_PLAYPAUSE] = \"PlayPause\",\t\t[KEY_PREVIOUSSONG] = \"PreviousSong\",\n\n\t[KEY_STOPCD] = \"StopCD\",\t\t[KEY_RECORD] = \"Record\",\n\n\t[KEY_REWIND] = \"Rewind\",\t\t[KEY_PHONE] = \"Phone\",\n\n\t[KEY_ISO] = \"ISOKey\",\t\t\t[KEY_CONFIG] = \"Config\",\n\n\t[KEY_HOMEPAGE] = \"HomePage\",\t\t[KEY_REFRESH] = \"Refresh\",\n\n\t[KEY_EXIT] = \"Exit\",\t\t\t[KEY_MOVE] = \"Move\",\n\n\t[KEY_EDIT] = \"Edit\",\t\t\t[KEY_SCROLLUP] = \"ScrollUp\",\n\n\t[KEY_SCROLLDOWN] = \"ScrollDown\",\t[KEY_KPLEFTPAREN] = \"KPLeftParenthesis\",\n\n\t[KEY_KPRIGHTPAREN] = \"KPRightParenthesis\", [KEY_F13] = \"F13\",\n\n\t[KEY_F14] = \"F14\",\t\t\t[KEY_F15] = \"F15\",\n\n\t[KEY_F16] = \"F16\",\t\t\t[KEY_F17] = \"F17\",\n\n\t[KEY_F18] = \"F18\",\t\t\t[KEY_F19] = \"F19\",\n\n\t[KEY_F20] = \"F20\",\t\t\t[KEY_F21] = \"F21\",\n\n\t[KEY_F22] = \"F22\",\t\t\t[KEY_F23] = \"F23\",\n\n\t[KEY_F24] = \"F24\",\t\t\t[KEY_PLAYCD] = \"PlayCD\",\n\n\t[KEY_PAUSECD] = \"PauseCD\",\t\t[KEY_PROG3] = \"Prog3\",\n\n\t[KEY_PROG4] = \"Prog4\",\t\t\t[KEY_SUSPEND] = \"Suspend\",\n\n\t[KEY_CLOSE] = \"Close\",\t\t\t[KEY_PLAY] = \"Play\",\n\n\t[KEY_FASTFORWARD] = \"Fast Forward\",\t[KEY_BASSBOOST] = \"Bass Boost\",\n\n\t[KEY_PRINT] = \"Print\",\t\t\t[KEY_HP] = \"HP\",\n\n\t[KEY_CAMERA] = \"Camera\",\t\t[KEY_SOUND] = \"Sound\",\n\n\t[KEY_QUESTION] = \"Question\",\t\t[KEY_EMAIL] = \"Email\",\n\n\t[KEY_CHAT] = \"Chat\",\t\t\t[KEY_SEARCH] = \"Search\",\n\n\t[KEY_CONNECT] = \"Connect\",\t\t[KEY_FINANCE] = \"Finance\",\n\n\t[KEY_SPORT] = \"Sport\",\t\t\t[KEY_SHOP] = \"Shop\",\n\n\t[KEY_ALTERASE] = \"Alternate Erase\",\t[KEY_CANCEL] = \"Cancel\",\n\n\t[KEY_BRIGHTNESSDOWN] = \"Brightness down\", [KEY_BRIGHTNESSUP] = \"Brightness up\",\n\n\t[KEY_MEDIA] = \"Media\",\t\t\t[KEY_UNKNOWN] = \"Unknown\",\n\n\t[BTN_0] = \"Btn0\",\t\t\t[BTN_1] = \"Btn1\",\n\n\t[BTN_2] = \"Btn2\",\t\t\t[BTN_3] = \"Btn3\",\n\n\t[BTN_4] = \"Btn4\",\t\t\t[BTN_5] = \"Btn5\",\n\n\t[BTN_6] = \"Btn6\",\t\t\t[BTN_7] = \"Btn7\",\n\n\t[BTN_8] = \"Btn8\",\t\t\t[BTN_9] = \"Btn9\",\n\n\t[BTN_LEFT] = \"LeftBtn\",\t\t\t[BTN_RIGHT] = \"RightBtn\",\n\n\t[BTN_MIDDLE] = \"MiddleBtn\",\t\t[BTN_SIDE] = \"SideBtn\",\n\n\t[BTN_EXTRA] = \"ExtraBtn\",\t\t[BTN_FORWARD] = \"ForwardBtn\",\n\n\t[BTN_BACK] = \"BackBtn\",\t\t\t[BTN_TASK] = \"TaskBtn\",\n\n\t[BTN_TRIGGER] = \"Trigger\",\t\t[BTN_THUMB] = \"ThumbBtn\",\n\n\t[BTN_THUMB2] = \"ThumbBtn2\",\t\t[BTN_TOP] = \"TopBtn\",\n\n\t[BTN_TOP2] = \"TopBtn2\",\t\t\t[BTN_PINKIE] = \"PinkieBtn\",\n\n\t[BTN_BASE] = \"BaseBtn\",\t\t\t[BTN_BASE2] = \"BaseBtn2\",\n\n\t[BTN_BASE3] = \"BaseBtn3\",\t\t[BTN_BASE4] = \"BaseBtn4\",\n\n\t[BTN_BASE5] = \"BaseBtn5\",\t\t[BTN_BASE6] = \"BaseBtn6\",\n\n\t[BTN_DEAD] = \"BtnDead\",\t\t\t[BTN_A] = \"BtnA\",\n\n\t[BTN_B] = \"BtnB\",\t\t\t[BTN_C] = \"BtnC\",\n\n\t[BTN_X] = \"BtnX\",\t\t\t[BTN_Y] = \"BtnY\",\n\n\t[BTN_Z] = \"BtnZ\",\t\t\t[BTN_TL] = \"BtnTL\",\n\n\t[BTN_TR] = \"BtnTR\",\t\t\t[BTN_TL2] = \"BtnTL2\",\n\n\t[BTN_TR2] = \"BtnTR2\",\t\t\t[BTN_SELECT] = \"BtnSelect\",\n\n\t[BTN_START] = \"BtnStart\",\t\t[BTN_MODE] = \"BtnMode\",\n\n\t[BTN_THUMBL] = \"BtnThumbL\",\t\t[BTN_THUMBR] = \"BtnThumbR\",\n\n\t[BTN_TOOL_PEN] = \"ToolPen\",\t\t[BTN_TOOL_RUBBER] = \"ToolRubber\",\n\n\t[BTN_TOOL_BRUSH] = \"ToolBrush\",\t\t[BTN_TOOL_PENCIL] = \"ToolPencil\",\n\n\t[BTN_TOOL_AIRBRUSH] = \"ToolAirbrush\",\t[BTN_TOOL_FINGER] = \"ToolFinger\",\n\n\t[BTN_TOOL_MOUSE] = \"ToolMouse\",\t\t[BTN_TOOL_LENS] = \"ToolLens\",\n\n\t[BTN_TOUCH] = \"Touch\",\t\t\t[BTN_STYLUS] = \"Stylus\",\n\n\t[BTN_STYLUS2] = \"Stylus2\",\t\t[BTN_TOOL_DOUBLETAP] = \"Tool Doubletap\",\n\n\t[BTN_TOOL_TRIPLETAP] = \"Tool Tripletap\", [BTN_GEAR_DOWN] = \"WheelBtn\",\n\n\t[BTN_GEAR_UP] = \"Gear up\",\t\t[KEY_OK] = \"Ok\",\n\n\t[KEY_SELECT] = \"Select\",\t\t[KEY_GOTO] = \"Goto\",\n\n\t[KEY_CLEAR] = \"Clear\",\t\t\t[KEY_POWER2] = \"Power2\",\n\n\t[KEY_OPTION] = \"Option\",\t\t[KEY_INFO] = \"Info\",\n\n\t[KEY_TIME] = \"Time\",\t\t\t[KEY_VENDOR] = \"Vendor\",\n\n\t[KEY_ARCHIVE] = \"Archive\",\t\t[KEY_PROGRAM] = \"Program\",\n\n\t[KEY_CHANNEL] = \"Channel\",\t\t[KEY_FAVORITES] = \"Favorites\",\n\n\t[KEY_EPG] = \"EPG\",\t\t\t[KEY_PVR] = \"PVR\",\n\n\t[KEY_MHP] = \"MHP\",\t\t\t[KEY_LANGUAGE] = \"Language\",\n\n\t[KEY_TITLE] = \"Title\",\t\t\t[KEY_SUBTITLE] = \"Subtitle\",\n\n\t[KEY_ANGLE] = \"Angle\",\t\t\t[KEY_ZOOM] = \"Zoom\",\n\n\t[KEY_MODE] = \"Mode\",\t\t\t[KEY_KEYBOARD] = \"Keyboard\",\n\n\t[KEY_SCREEN] = \"Screen\",\t\t[KEY_PC] = \"PC\",\n\n\t[KEY_TV] = \"TV\",\t\t\t[KEY_TV2] = \"TV2\",\n\n\t[KEY_VCR] = \"VCR\",\t\t\t[KEY_VCR2] = \"VCR2\",\n\n\t[KEY_SAT] = \"Sat\",\t\t\t[KEY_SAT2] = \"Sat2\",\n\n\t[KEY_CD] = \"CD\",\t\t\t[KEY_TAPE] = \"Tape\",\n\n\t[KEY_RADIO] = \"Radio\",\t\t\t[KEY_TUNER] = \"Tuner\",\n\n\t[KEY_PLAYER] = \"Player\",\t\t[KEY_TEXT] = \"Text\",\n\n\t[KEY_DVD] = \"DVD\",\t\t\t[KEY_AUX] = \"Aux\",\n\n\t[KEY_MP3] = \"MP3\",\t\t\t[KEY_AUDIO] = \"Audio\",\n\n\t[KEY_VIDEO] = \"Video\",\t\t\t[KEY_DIRECTORY] = \"Directory\",\n\n\t[KEY_LIST] = \"List\",\t\t\t[KEY_MEMO] = \"Memo\",\n\n\t[KEY_CALENDAR] = \"Calendar\",\t\t[KEY_RED] = \"Red\",\n\n\t[KEY_GREEN] = \"Green\",\t\t\t[KEY_YELLOW] = \"Yellow\",\n\n\t[KEY_BLUE] = \"Blue\",\t\t\t[KEY_CHANNELUP] = \"ChannelUp\",\n\n\t[KEY_CHANNELDOWN] = \"ChannelDown\",\t[KEY_FIRST] = \"First\",\n\n\t[KEY_LAST] = \"Last\",\t\t\t[KEY_AB] = \"AB\",\n\n\t[KEY_NEXT] = \"Next\",\t\t\t[KEY_RESTART] = \"Restart\",\n\n\t[KEY_SLOW] = \"Slow\",\t\t\t[KEY_SHUFFLE] = \"Shuffle\",\n\n\t[KEY_BREAK] = \"Break\",\t\t\t[KEY_PREVIOUS] = \"Previous\",\n\n\t[KEY_DIGITS] = \"Digits\",\t\t[KEY_TEEN] = \"TEEN\",\n\n\t[KEY_TWEN] = \"TWEN\",\t\t\t[KEY_DEL_EOL] = \"Delete EOL\",\n\n\t[KEY_DEL_EOS] = \"Delete EOS\",\t\t[KEY_INS_LINE] = \"Insert line\",\n\n\t[KEY_DEL_LINE] = \"Delete line\",\n", "file_path": "legacy-c-impl/evtest.c", "rank": 41, "score": 38944.51647094092 }, { "content": "char *repeats[REP_MAX + 1] = {\n\n\t[0 ... REP_MAX] = NULL,\n\n\t[REP_DELAY] = \"Delay\",\t\t[REP_PERIOD] = \"Period\"\n", "file_path": "legacy-c-impl/evtest.c", "rank": 42, "score": 38944.51647094092 }, { "content": "int ioctl(int fd, int request, ...) {\n\n static int (*func)(int fd, int request, ...);\n\n if (!func) {\n\n printf(\"Hooking ioctl(...)\\n\");\n\n func = (int (*)(int d, int request, ...)) dlsym(RTLD_NEXT, \"ioctl\");\n\n }\n\n\n\n va_list args;\n\n va_start(args, request);\n\n void *p = va_arg(args, void *);\n\n va_end(args);\n\n\n\n if (fd == 3) {\n\n printf(\"ioctl(%d, 0x%x (addr: %p), %p (addr: %p)\", fd, request, &request, p, &p);\n\n\n\n struct fb_var_screeninfo* vinfo;\n\n switch (request) {\n\n case REMARKABLE_PREFIX(MXCFB_SEND_UPDATE):\n\n print_mxcfb_update_data((mxcfb_update_data*)p);\n\n // hexDump(\"mxcfb_update_data\", p, sizeof(mxcfb_update_data));\n\n break;\n\n case FBIOPUT_VSCREENINFO:\n\n printf(\"(FBIOPUT_VSCREENINFO)\\n\");\n\n print_vinfo((struct fb_var_screeninfo*)p);\n\n break;\n\n case FBIOGET_VSCREENINFO:\n\n // NOP because struct fb_var_screenfinfo is used as an output param\n\n break;\n\n case REMARKABLE_PREFIX(MXCFB_WAIT_FOR_UPDATE_COMPLETE):\n\n hexDump(\"MXCFB_WAIT_FOR_UPDATE_COMPLETE(mxcfb_update_marker_data)\", p, sizeof(mxcfb_update_marker_data));\n\n break;\n\n default:\n\n printf(\" (unknown)\");\n\n break;\n\n }\n\n }\n\n\n\n int rc = func(fd, request, p);\n\n if (fd == 3) {\n\n printf(\") == %d\\n\", rc);\n\n }\n\n\n\n if (request == FBIOGET_VSCREENINFO) {\n\n printf(\"(FBIOGET_VSCREENINFO)\\n\");\n\n print_vinfo((struct fb_var_screeninfo*)p);\n\n }\n\n return rc;\n", "file_path": "legacy-c-impl/spy.c", "rank": 43, "score": 38944.51647094092 }, { "content": "char *misc[MSC_MAX + 1] = {\n\n\t[ 0 ... MSC_MAX] = NULL,\n\n\t[MSC_SERIAL] = \"Serial\",\t[MSC_PULSELED] = \"Pulseled\",\n\n\t[MSC_GESTURE] = \"Gesture\",\t[MSC_RAW] = \"RawData\",\n\n\t[MSC_SCAN] = \"ScanCode\",\n", "file_path": "legacy-c-impl/evtest.c", "rank": 44, "score": 38944.51647094092 }, { "content": "char **names[EV_MAX + 1] = {\n\n\t[0 ... EV_MAX] = NULL,\n\n\t[EV_SYN] = events,\t\t\t[EV_KEY] = keys,\n\n\t[EV_REL] = relatives,\t\t\t[EV_ABS] = absolutes,\n\n\t[EV_MSC] = misc,\t\t\t[EV_LED] = leds,\n\n\t[EV_SND] = sounds,\t\t\t[EV_REP] = repeats,\n", "file_path": "legacy-c-impl/evtest.c", "rank": 45, "score": 38944.51647094092 }, { "content": "char *events[EV_MAX + 1] = {\n\n\t[0 ... EV_MAX] = NULL,\n\n\t[EV_SYN] = \"Sync\",\t\t\t[EV_KEY] = \"Key\",\n\n\t[EV_REL] = \"Relative\",\t\t\t[EV_ABS] = \"Absolute\",\n\n\t[EV_MSC] = \"Misc\",\t\t\t[EV_LED] = \"LED\",\n\n\t[EV_SND] = \"Sound\",\t\t\t[EV_REP] = \"Repeat\",\n\n\t[EV_FF] = \"ForceFeedback\",\t\t[EV_PWR] = \"Power\",\n\n\t[EV_FF_STATUS] = \"ForceFeedbackStatus\",\n", "file_path": "legacy-c-impl/evtest.c", "rank": 46, "score": 38944.51647094092 }, { "content": "char *relatives[REL_MAX + 1] = {\n\n\t[0 ... REL_MAX] = NULL,\n\n\t[REL_X] = \"X\",\t\t\t[REL_Y] = \"Y\",\n\n\t[REL_Z] = \"Z\",\t\t\t[REL_HWHEEL] = \"HWheel\",\n\n\t[REL_DIAL] = \"Dial\",\t\t[REL_WHEEL] = \"Wheel\", \n\n\t[REL_MISC] = \"Misc\",\t\n", "file_path": "legacy-c-impl/evtest.c", "rank": 47, "score": 38944.51647094092 }, { "content": "char *absval[5] = { \"Value\", \"Min \", \"Max \", \"Fuzz \", \"Flat \" };\n", "file_path": "legacy-c-impl/evtest.c", "rank": 48, "score": 38944.51647094092 }, { "content": "char *leds[LED_MAX + 1] = {\n\n\t[0 ... LED_MAX] = NULL,\n\n\t[LED_NUML] = \"NumLock\",\t\t[LED_CAPSL] = \"CapsLock\", \n\n\t[LED_SCROLLL] = \"ScrollLock\",\t[LED_COMPOSE] = \"Compose\",\n\n\t[LED_KANA] = \"Kana\",\t\t[LED_SLEEP] = \"Sleep\", \n\n\t[LED_SUSPEND] = \"Suspend\",\t[LED_MUTE] = \"Mute\",\n\n\t[LED_MISC] = \"Misc\",\n", "file_path": "legacy-c-impl/evtest.c", "rank": 49, "score": 38944.51647094092 }, { "content": "char *absolutes[ABS_MAX + 1] = {\n\n\t[0 ... ABS_MAX] = NULL,\n\n\t[ABS_X] = \"X\",\t\t\t[ABS_Y] = \"Y\",\n\n\t[ABS_Z] = \"Z\",\t\t\t[ABS_RX] = \"Rx\",\n\n\t[ABS_RY] = \"Ry\",\t\t[ABS_RZ] = \"Rz\",\n\n\t[ABS_THROTTLE] = \"Throttle\",\t[ABS_RUDDER] = \"Rudder\",\n\n\t[ABS_WHEEL] = \"Wheel\",\t\t[ABS_GAS] = \"Gas\",\n\n\t[ABS_BRAKE] = \"Brake\",\t\t[ABS_HAT0X] = \"Hat0X\",\n\n\t[ABS_HAT0Y] = \"Hat0Y\",\t\t[ABS_HAT1X] = \"Hat1X\",\n\n\t[ABS_HAT1Y] = \"Hat1Y\",\t\t[ABS_HAT2X] = \"Hat2X\",\n\n\t[ABS_HAT2Y] = \"Hat2Y\",\t\t[ABS_HAT3X] = \"Hat3X\",\n\n\t[ABS_HAT3Y] = \"Hat 3Y\",\t\t[ABS_PRESSURE] = \"Pressure\",\n\n\t[ABS_DISTANCE] = \"Distance\",\t[ABS_TILT_X] = \"XTilt\",\n\n\t[ABS_TILT_Y] = \"YTilt\",\t\t[ABS_TOOL_WIDTH] = \"Tool Width\",\n\n\t[ABS_VOLUME] = \"Volume\",\t[ABS_MISC] = \"Misc\",\n", "file_path": "legacy-c-impl/evtest.c", "rank": 50, "score": 38944.51647094092 }, { "content": "int main (int argc, char **argv)\n\n{\n\n\tint fd, rd, i, j, k;\n\n\tstruct input_event ev[64];\n\n\tint version;\n\n\tunsigned short id[4];\n\n\tunsigned long bit[EV_MAX][NBITS(KEY_MAX)];\n\n\tchar name[256] = \"Unknown\";\n\n\tint abs[5];\n\n\n\n\tif (argc < 2) {\n\n\t\tprintf(\"Usage: evtest /dev/input/eventX\\n\");\n\n\t\tprintf(\"Where X = input device number\\n\");\n\n\t\treturn 1;\n\n\t}\n\n\n\n\tif ((fd = open(argv[argc - 1], O_RDONLY)) < 0) {\n\n\t\tperror(\"evtest\");\n\n\t\treturn 1;\n\n\t}\n\n\n\n\tif (ioctl(fd, EVIOCGVERSION, &version)) {\n\n\t\tperror(\"evtest: can't get version\");\n\n\t\treturn 1;\n\n\t}\n\n\n\n\tprintf(\"Input driver version is %d.%d.%d\\n\",\n\n\t\tversion >> 16, (version >> 8) & 0xff, version & 0xff);\n\n\n\n\tioctl(fd, EVIOCGID, id);\n\n\tprintf(\"Input device ID: bus 0x%x vendor 0x%x product 0x%x version 0x%x\\n\",\n\n\t\tid[ID_BUS], id[ID_VENDOR], id[ID_PRODUCT], id[ID_VERSION]);\n\n\n\n\tioctl(fd, EVIOCGNAME(sizeof(name)), name);\n\n\tprintf(\"Input device name: \\\"%s\\\"\\n\", name);\n\n\n\n\tmemset(bit, 0, sizeof(bit));\n\n\tioctl(fd, EVIOCGBIT(0, EV_MAX), bit[0]);\n\n\tprintf(\"Supported events:\\n\");\n\n\n\n\tfor (i = 0; i < EV_MAX; i++)\n\n\t\tif (test_bit(i, bit[0])) {\n\n\t\t\tprintf(\" Event type %d (%s)\\n\", i, events[i] ? events[i] : \"?\");\n\n\t\t\tif (!i) continue;\n\n\t\t\tioctl(fd, EVIOCGBIT(i, KEY_MAX), bit[i]);\n\n\t\t\tfor (j = 0; j < KEY_MAX; j++) \n\n\t\t\t\tif (test_bit(j, bit[i])) {\n\n\t\t\t\t\tprintf(\" Event code %d (%s)\\n\", j, names[i] ? (names[i][j] ? names[i][j] : \"?\") : \"?\");\n\n\t\t\t\t\tif (i == EV_ABS) {\n\n\t\t\t\t\t\tioctl(fd, EVIOCGABS(j), abs);\n\n\t\t\t\t\t\tfor (k = 0; k < 5; k++)\n\n\t\t\t\t\t\t\tif ((k < 3) || abs[k])\n\n\t\t\t\t\t\t\t\tprintf(\" %s %6d\\n\", absval[k], abs[k]);\n\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\t\t}\n\n\t\t\n\n\n\n\tprintf(\"Testing ... (interrupt to exit)\\n\");\n\n\n\n\twhile (1) {\n\n\t\trd = read(fd, ev, sizeof(struct input_event) * 64);\n\n\n\n\t\tif (rd < (int) sizeof(struct input_event)) {\n\n\t\t\tprintf(\"yyy\\n\");\n\n\t\t\tperror(\"\\nevtest: error reading\");\n\n\t\t\treturn 1;\n\n\t\t}\n\n\n\n\t\tfor (i = 0; i < rd / sizeof(struct input_event); i++)\n\n\n\n\t\t\tif (ev[i].type == EV_SYN) {\n\n\t\t\t\tprintf(\"Event: time %ld.%06ld, -------------- %s ------------\\n\",\n\n\t\t\t\t\tev[i].time.tv_sec, ev[i].time.tv_usec, ev[i].code ? \"Config Sync\" : \"Report Sync\" );\n\n\t\t\t} else if (ev[i].type == EV_MSC && (ev[i].code == MSC_RAW || ev[i].code == MSC_SCAN)) {\n\n\t\t\t\tprintf(\"Event: time %ld.%06ld, type %d (%s), code %d (%s), value %02x\\n\",\n\n\t\t\t\t\tev[i].time.tv_sec, ev[i].time.tv_usec, ev[i].type,\n\n\t\t\t\t\tevents[ev[i].type] ? events[ev[i].type] : \"?\",\n\n\t\t\t\t\tev[i].code,\n\n\t\t\t\t\tnames[ev[i].type] ? (names[ev[i].type][ev[i].code] ? names[ev[i].type][ev[i].code] : \"?\") : \"?\",\n\n\t\t\t\t\tev[i].value);\n\n\t\t\t} else {\n\n\t\t\t\tprintf(\"Event: time %ld.%06ld, type %d (%s), code %d (%s), value %d\\n\",\n\n\t\t\t\t\tev[i].time.tv_sec, ev[i].time.tv_usec, ev[i].type,\n\n\t\t\t\t\tevents[ev[i].type] ? events[ev[i].type] : \"?\",\n\n\t\t\t\t\tev[i].code,\n\n\t\t\t\t\tnames[ev[i].type] ? (names[ev[i].type][ev[i].code] ? names[ev[i].type][ev[i].code] : \"?\") : \"?\",\n\n\t\t\t\t\tev[i].value);\n\n\t\t\t}\t\n\n\n\n\t}\n", "file_path": "legacy-c-impl/evtest.c", "rank": 51, "score": 38944.51647094092 }, { "content": "char *sounds[SND_MAX + 1] = {\n\n\t[0 ... SND_MAX] = NULL,\n\n\t[SND_CLICK] = \"Click\",\t\t[SND_BELL] = \"Bell\",\n\n\t[SND_TONE] = \"Tone\"\n", "file_path": "legacy-c-impl/evtest.c", "rank": 52, "score": 38944.51647094092 }, { "content": "pub fn rgbimage_from_u8_slice(w: u32, h: u32, buff: &[u8]) -> Option<image::RgbImage> {\n\n // rgb565 is the input so it is 16bits (2 bytes) per pixel\n\n let input_bytespp = 2;\n\n let input_line_len = w * input_bytespp;\n\n if h * input_line_len != buff.len() as u32 {\n\n return None;\n\n }\n\n Some(image::ImageBuffer::from_fn(w, h, |x, y| {\n\n let in_index: usize = ((y * input_line_len) + ((input_bytespp * x) as u32)) as usize;\n\n let data = common::color::NATIVE_COMPONENTS(buff[in_index], buff[in_index + 1]).to_rgb8();\n\n image::Rgb(data)\n\n }))\n\n}\n", "file_path": "src/framebuffer/storage.rs", "rank": 53, "score": 38304.24475534554 }, { "content": "pub fn fill_polygon<F>(write_pixel: &mut F, points: &[Point2<i32>]) -> mxcfb_rect\n\nwhere\n\n F: FnMut(Point2<i32>),\n\n{\n\n // This implementation of polygon rasterisation is based on this article:\n\n // https://hackernoon.com/computer-graphics-scan-line-polygon-fill-algorithm-3cb47283df6\n\n\n\n // struct to hold edge data\n\n #[derive(Debug, Copy, Clone)]\n\n struct EdgeBucket {\n\n ymax: i32,\n\n ymin: i32,\n\n x: i32,\n\n sign: i32,\n\n direction: i32,\n\n dx: i32,\n\n dy: i32,\n\n sum: i32,\n\n };\n\n\n", "file_path": "src/framebuffer/graphics.rs", "rank": 54, "score": 38131.38203493596 }, { "content": " unsigned int flags; // 0x0000\n", "file_path": "legacy-c-impl/libremarkable/lib.h", "rank": 55, "score": 37721.64851875819 }, { "content": "\tunsigned char green;\n", "file_path": "legacy-c-impl/libremarkable/bitmap.h", "rank": 56, "score": 37717.59893239218 }, { "content": " uint32_t left;\n", "file_path": "legacy-c-impl/libremarkable/lib.h", "rank": 57, "score": 37717.59893239218 }, { "content": " int temp; // 0x1001 = TEMP_USE_PAPYRUS\n", "file_path": "legacy-c-impl/libremarkable/lib.h", "rank": 58, "score": 37717.59893239218 }, { "content": " unsigned len;\n", "file_path": "legacy-c-impl/libremarkable/lib.h", "rank": 59, "score": 37717.59893239218 }, { "content": " FT_Face face;\n", "file_path": "legacy-c-impl/libremarkable/freetype.c", "rank": 60, "score": 37717.59893239218 }, { "content": "#define B REMARKABLE_DARKEST\n", "file_path": "legacy-c-impl/libremarkable/shapes.h", "rank": 61, "score": 37717.59893239218 }, { "content": " uint32_t top;\n", "file_path": "legacy-c-impl/libremarkable/lib.h", "rank": 62, "score": 37717.59893239218 }, { "content": " struct fb_fix_screeninfo finfo;\n", "file_path": "legacy-c-impl/libremarkable/lib.h", "rank": 63, "score": 37717.59893239218 }, { "content": "unsigned gen = 1;\n", "file_path": "legacy-c-impl/libremarkable/refresh.c", "rank": 64, "score": 37717.59893239218 }, { "content": "\tuint32_t height;\t /* height of entire buffer */\n", "file_path": "legacy-c-impl/libremarkable/lib.h", "rank": 65, "score": 37717.59893239218 }, { "content": " struct fb_var_screeninfo vinfo;\n", "file_path": "legacy-c-impl/libremarkable/lib.h", "rank": 66, "score": 37717.59893239218 }, { "content": "#define LOGBUFSIZE 512\n\n\n", "file_path": "legacy-c-impl/libremarkable/serde.c", "rank": 67, "score": 37717.59893239218 }, { "content": "void hexDump (char *desc, void *addr, int len) {\n\n int i;\n\n unsigned char buff[17];\n\n unsigned char *pc = (unsigned char*)addr;\n\n\n\n // Output description if given.\n\n if (desc != NULL)\n\n printf (\"%s:\\n\", desc);\n\n\n\n if (len == 0) {\n\n printf(\" ZERO LENGTH\\n\");\n\n return;\n\n }\n\n if (len < 0) {\n\n printf(\" NEGATIVE LENGTH: %i\\n\",len);\n\n return;\n\n }\n\n\n\n // Process every byte in the data.\n\n for (i = 0; i < len; i++) {\n\n // Multiple of 16 means new line (with line offset).\n\n\n\n if ((i % 16) == 0) {\n\n // Just don't print ASCII for the zeroth line.\n\n if (i != 0)\n\n printf (\" %s\\n\", buff);\n\n\n\n // Output the offset.\n\n printf (\" %04x \", i);\n\n }\n\n\n\n // Now the hex code for the specific character.\n\n printf (\" %02x\", pc[i]);\n\n\n\n // And store a printable ASCII character for later.\n\n if ((pc[i] < 0x20) || (pc[i] > 0x7e))\n\n buff[i % 16] = '.';\n\n else\n\n buff[i % 16] = pc[i];\n\n buff[(i % 16) + 1] = '\\0';\n\n }\n\n\n\n // Pad out last line if not exactly 16 characters.\n\n while ((i % 16) != 0) {\n\n printf (\" \");\n\n i++;\n\n }\n\n\n\n // And print the final ASCII bit.\n\n printf (\" %s\\n\", buff);\n", "file_path": "legacy-c-impl/spy.c", "rank": 68, "score": 37717.59893239218 }, { "content": " FT_Library library;\n", "file_path": "legacy-c-impl/libremarkable/freetype.c", "rank": 69, "score": 37717.59893239218 }, { "content": "\tuint32_t width; /* width of entire buffer */\n", "file_path": "legacy-c-impl/libremarkable/lib.h", "rank": 70, "score": 37717.59893239218 }, { "content": "#define _GNU_SOURCE\n", "file_path": "legacy-c-impl/spy.c", "rank": 71, "score": 37717.59893239218 }, { "content": "\tunsigned char red;\n", "file_path": "legacy-c-impl/libremarkable/bitmap.h", "rank": 72, "score": 37717.59893239218 }, { "content": "void print_vinfo(struct fb_var_screeninfo* v) {\n\n if (v == NULL) {\n\n printf(\"vinfo is NULL\");\n\n return;\n\n }\n\n printf(\"xres\\t\\t= %u\\tyres\\t\\t= %u\\n\", v->xres, v->yres);\n\n printf(\"xres_virtual\\t= %u\\tyres_virtual\\t= %u\\n\", v->xres_virtual, v->yres_virtual);\n\n printf(\"xoffset\\t\\t= %u\\tyoffset\\t\\t= %u\\n\", v->xoffset, v->yoffset);\n\n printf(\"bits_per_pixel\\t= %u\\tgrayscale\\t= %u\\n\", v->bits_per_pixel, v->grayscale);\n\n printcolor(red);\n\n printcolor(green);\n\n printcolor(blue);\n\n printcolor(transp);\n\n printf(\"nonstd\\t\\t= %u\\n\", v->nonstd);\n\n printf(\"activate\\t= %u\\n\", v->activate);\n\n printf(\"height\\t\\t= 0x%x\\nwidth\\t\\t= 0x%x\\n\", v->height, v->width);\n\n printf(\"accel_flags(OBSOLETE) = %u\\n\", v->accel_flags);\n\n printf(\"pixclock\t= %u\\n\", v->pixclock);\n\n printf(\"left_margin\t= %u\\n\", v->left_margin);\n\n printf(\"right_margin\t= %u\\n\", v->right_margin);\n\n printf(\"upper_margin\t= %u\\n\", v->upper_margin);\n\n printf(\"lower_margin\t= %u\\n\", v->lower_margin);\n\n printf(\"hsync_len\t= %u\\nvsync_len = %u\\n\", v->hsync_len, v->vsync_len);\n\n printf(\"sync\t\t= %u\\n\", v->sync);\n\n printf(\"vmode\t\t= %u\\n\", v->vmode);\n\n printf(\"rotate\t\t= %u\\n\", v->rotate);\n\n printf(\"colorspace \t= %u\\n\", v->colorspace);\n", "file_path": "legacy-c-impl/spy.c", "rank": 73, "score": 37717.59893239218 }, { "content": "#define W REMARKABLE_BRIGHTEST\n\n\n", "file_path": "legacy-c-impl/libremarkable/shapes.h", "rank": 74, "score": 37717.59893239218 }, { "content": "\tunsigned char blue;\n", "file_path": "legacy-c-impl/libremarkable/bitmap.h", "rank": 75, "score": 37717.59893239218 }, { "content": " int fd;\n", "file_path": "legacy-c-impl/libremarkable/lib.h", "rank": 76, "score": 37717.59893239218 }, { "content": " int dither_mode;\n", "file_path": "legacy-c-impl/libremarkable/lib.h", "rank": 77, "score": 36567.160951314036 }, { "content": "\tunsigned int bfOffBits;\n", "file_path": "legacy-c-impl/libremarkable/bitmap.h", "rank": 78, "score": 36565.62652256566 }, { "content": "#define GRAYSCALE_4BIT 0x3\n", "file_path": "legacy-c-impl/libremarkable/lib.h", "rank": 79, "score": 36565.62652256566 }, { "content": " unsigned tOffsetX, tOffsetY;\n", "file_path": "legacy-c-impl/libremarkable/freetype.c", "rank": 80, "score": 36565.62652256566 }, { "content": "\tbmp_pixel **img_pixels;\n", "file_path": "legacy-c-impl/libremarkable/bitmap.h", "rank": 81, "score": 36565.62652256566 }, { "content": "struct remarkable_font {\n\n FT_Library library;\n\n FT_Face face;\n\n unsigned tOffsetX, tOffsetY;\n\n unsigned target_height;\n", "file_path": "legacy-c-impl/libremarkable/freetype.c", "rank": 82, "score": 36565.62652256566 }, { "content": "#define REMARKABLE_DARKEST 0x00\n", "file_path": "legacy-c-impl/libremarkable/lib.h", "rank": 83, "score": 36565.62652256566 }, { "content": "#define GRAYSCALE_8BIT 0x1\n", "file_path": "legacy-c-impl/libremarkable/lib.h", "rank": 84, "score": 36565.62652256566 }, { "content": "struct remarkable_font;\n", "file_path": "legacy-c-impl/libremarkable/lib.h", "rank": 85, "score": 36565.62652256566 }, { "content": " unsigned target_height;\n", "file_path": "legacy-c-impl/libremarkable/freetype.c", "rank": 86, "score": 36565.62652256566 }, { "content": "\tunsigned short biPlanes;\n", "file_path": "legacy-c-impl/libremarkable/bitmap.h", "rank": 87, "score": 36565.62652256566 }, { "content": "\tunsigned int biCompression;\n", "file_path": "legacy-c-impl/libremarkable/bitmap.h", "rank": 88, "score": 36565.62652256566 }, { "content": "\tint biWidth;\n", "file_path": "legacy-c-impl/libremarkable/bitmap.h", "rank": 89, "score": 36565.62652256566 }, { "content": "\tint biHeight;\n", "file_path": "legacy-c-impl/libremarkable/bitmap.h", "rank": 90, "score": 36565.62652256566 }, { "content": "#define FREETYPE_RIGHTSHIFT 6\n\n\n", "file_path": "legacy-c-impl/libremarkable/freetype.c", "rank": 91, "score": 36565.62652256566 }, { "content": "\tbmp_header img_header;\n", "file_path": "legacy-c-impl/libremarkable/bitmap.h", "rank": 92, "score": 36565.62652256566 }, { "content": "\tunsigned int bfReserved;\n", "file_path": "legacy-c-impl/libremarkable/bitmap.h", "rank": 93, "score": 36565.62652256566 }, { "content": "#define BMP_MAGIC 19778\n\n\n", "file_path": "legacy-c-impl/libremarkable/bitmap.h", "rank": 94, "score": 36565.62652256566 }, { "content": "\tunsigned int biSize;\n", "file_path": "legacy-c-impl/libremarkable/bitmap.h", "rank": 95, "score": 36565.62652256566 }, { "content": "#define REMARKABLE_BRIGHTEST 0xFF\n", "file_path": "legacy-c-impl/libremarkable/lib.h", "rank": 96, "score": 36565.62652256566 }, { "content": "\tunsigned int bfSize;\n", "file_path": "legacy-c-impl/libremarkable/bitmap.h", "rank": 97, "score": 36565.62652256566 }, { "content": "use framebuffer::cgmath;\n\nuse framebuffer::common::{DISPLAYHEIGHT, DISPLAYWIDTH, MTHEIGHT, MTWIDTH};\n\n\n\nuse evdev::raw::input_event;\n\nuse input::{InputDeviceState, InputEvent};\n\nuse std::sync::atomic::{AtomicBool, AtomicU16, AtomicU8, Ordering};\n\n\n\nconst MT_HSCALAR: f32 = (DISPLAYWIDTH as f32) / (MTWIDTH as f32);\n\nconst MT_VSCALAR: f32 = (DISPLAYHEIGHT as f32) / (MTHEIGHT as f32);\n\n\n\npub struct MultitouchState {\n\n last_pressure: AtomicU8,\n\n last_touch_size: AtomicU8,\n\n last_touch_id: AtomicU16,\n\n last_x: AtomicU16,\n\n last_y: AtomicU16,\n\n last_finger_id: AtomicU16,\n\n currently_touching: AtomicBool,\n\n}\n\n\n", "file_path": "src/input/multitouch.rs", "rank": 99, "score": 23.443122329298703 } ]
Rust
crypto/src/verify.rs
AllSafeCybercurity/RClient
88aa5fe784621041b05038ae62139398a34b74bc
pub trait USizeExt { fn constrain_value(&self) -> usize; } pub trait SliceExt { fn constrain_value(&self) -> usize; } impl USizeExt for usize { fn constrain_value(&self) -> usize { *self } } impl<T: AsRef<[u8]>> SliceExt for T { fn constrain_value(&self) -> usize { self.as_ref().len() } } #[macro_export] macro_rules! verify_keygen { ($size:expr => $buf:expr) => {{ #[allow(unused_imports)] use $crate::verify::{SliceExt, USizeExt}; let error = if $buf.constrain_value() != $size { Err("Invalid buffer size") } else { Ok(()) }; error.map_err(|e| $crate::Error::CryptoError(e.into()))?; }}; } #[macro_export] macro_rules! verify_auth { ($key:expr => [$key_size:expr], => [$buf:expr, $tag_size:expr]) => {{ #[allow(unused_imports)] use $crate::verify::{SliceExt, USizeExt}; let error = if $key.constrain_value() != $key_size { Err("Invalid key length") } else if $buf.constrain_value() < $tag_size { Err("Buffer is too small") } else { Ok(()) }; error.map_err(|e| $crate::Error::CryptoError(e.into()))?; }}; } #[macro_export] macro_rules! verify_encrypt { ($key:expr => [$key_size:expr], $nonce:expr => [$nonce_size:expr], $plain:expr => [$buf:expr, $plain_limit:expr]) => {{ #[allow(unused_imports)] use $crate::verify::{SliceExt, USizeExt}; let error = if $key.constrain_value() != $key_size { Err("Invalid key length") } else if $nonce.constrain_value() != $nonce_size { Err("Invalid nonce length") } else if $plain.constrain_value() > $plain_limit { Err("Too much data") } else if $plain.constrain_value() > $buf.constrain_value() { Err("Buffer is too small") } else { Ok(()) }; error.map_err(|e| $crate::Error::CryptoError(e.into()))?; }}; } #[macro_export] macro_rules! verify_decrypt { ($key:expr => [$key_size:expr], $nonce:expr => [$nonce_size:expr], $cipher:expr => [$buf:expr, $cipher_limit:expr]) => {{ #[allow(unused_imports)] use $crate::verify::{SliceExt, USizeExt}; let error = if $key.constrain_value() != $key_size { Err("Invalid key length") } else if $nonce.constrain_value() != $nonce_size { Err("Invalid nonce length") } else if $cipher.constrain_value() > $cipher_limit { Err("Too much data") } else if $cipher.constrain_value() > $buf.constrain_value() { Err("Buffer is too small") } else { Ok(()) }; error.map_err(|e| $crate::Error::CryptoError(e.into()))?; }}; } #[macro_export] macro_rules! verify_seal { ($key:expr => [$key_size:expr], $nonce:expr => [$nonce_const:expr], $plain:expr => [$buf:expr, $plain_limit:expr]) => {{ #[allow(unused_imports)] use $crate::verify::{SliceExt, USizeExt}; let error = if $key.constrain_value() != $key_size { Err("Invalid key length") } else if $nonce.constrain_value() != $nonce_const { Err("Invalid nonce length") } else if $plain.constrain_value() > $plain_limit { Err("Too much data") } else if $buf.constrain_value() < $plain.constrain_value() + CHACHAPOLY_TAG { Err("Buffer is too small") } else { Ok(()) }; error.map_err(|e| $crate::Error::CryptoError(e.into()))?; }}; } #[macro_export] macro_rules! verify_open { ($key:expr => [$key_size:expr], $nonce:expr => [$nonce_size:expr], $cipher:expr => [$buf:expr, $tag_size:expr, $cipher_limit:expr]) => {{ #[allow(unused_imports)] use $crate::verify::{SliceExt, USizeExt}; let error = if $key.constrain_value() != $key_size { Err("Invalid key length") } else if $nonce.constrain_value() != $nonce_size { Err("Invalid nonce length") } else if $cipher.constrain_value() > $cipher_limit { Err("Too much data") } else if $cipher.constrain_value() < $tag_size { return Err($crate::Error::InvalidData.into()); } else if $buf.constrain_value() + $tag_size < $cipher.constrain_value() { Err("Buffer is too small") } else { Ok(()) }; error.map_err(|e| $crate::Error::CryptoError(e.into()))?; }}; }
pub trait USizeExt { fn constrain_value(&self) -> usize; } pub trait SliceExt { fn constrain_value(&self) -> usize; } impl USizeExt for usize { fn constrain_value(&self) -> usize { *self } } impl<T: AsRef<[u8]>> SliceExt for T { fn constrain_value(&self) -> usize { self.as_ref().len() } } #[macro_export] macro_rules! verify_keygen { ($size:expr => $buf:expr) => {{ #[allow(unused_imports)] use $crate::v
("Buffer is too small") } else { Ok(()) }; error.map_err(|e| $crate::Error::CryptoError(e.into()))?; }}; } #[macro_export] macro_rules! verify_encrypt { ($key:expr => [$key_size:expr], $nonce:expr => [$nonce_size:expr], $plain:expr => [$buf:expr, $plain_limit:expr]) => {{ #[allow(unused_imports)] use $crate::verify::{SliceExt, USizeExt}; let error = if $key.constrain_value() != $key_size { Err("Invalid key length") } else if $nonce.constrain_value() != $nonce_size { Err("Invalid nonce length") } else if $plain.constrain_value() > $plain_limit { Err("Too much data") } else if $plain.constrain_value() > $buf.constrain_value() { Err("Buffer is too small") } else { Ok(()) }; error.map_err(|e| $crate::Error::CryptoError(e.into()))?; }}; } #[macro_export] macro_rules! verify_decrypt { ($key:expr => [$key_size:expr], $nonce:expr => [$nonce_size:expr], $cipher:expr => [$buf:expr, $cipher_limit:expr]) => {{ #[allow(unused_imports)] use $crate::verify::{SliceExt, USizeExt}; let error = if $key.constrain_value() != $key_size { Err("Invalid key length") } else if $nonce.constrain_value() != $nonce_size { Err("Invalid nonce length") } else if $cipher.constrain_value() > $cipher_limit { Err("Too much data") } else if $cipher.constrain_value() > $buf.constrain_value() { Err("Buffer is too small") } else { Ok(()) }; error.map_err(|e| $crate::Error::CryptoError(e.into()))?; }}; } #[macro_export] macro_rules! verify_seal { ($key:expr => [$key_size:expr], $nonce:expr => [$nonce_const:expr], $plain:expr => [$buf:expr, $plain_limit:expr]) => {{ #[allow(unused_imports)] use $crate::verify::{SliceExt, USizeExt}; let error = if $key.constrain_value() != $key_size { Err("Invalid key length") } else if $nonce.constrain_value() != $nonce_const { Err("Invalid nonce length") } else if $plain.constrain_value() > $plain_limit { Err("Too much data") } else if $buf.constrain_value() < $plain.constrain_value() + CHACHAPOLY_TAG { Err("Buffer is too small") } else { Ok(()) }; error.map_err(|e| $crate::Error::CryptoError(e.into()))?; }}; } #[macro_export] macro_rules! verify_open { ($key:expr => [$key_size:expr], $nonce:expr => [$nonce_size:expr], $cipher:expr => [$buf:expr, $tag_size:expr, $cipher_limit:expr]) => {{ #[allow(unused_imports)] use $crate::verify::{SliceExt, USizeExt}; let error = if $key.constrain_value() != $key_size { Err("Invalid key length") } else if $nonce.constrain_value() != $nonce_size { Err("Invalid nonce length") } else if $cipher.constrain_value() > $cipher_limit { Err("Too much data") } else if $cipher.constrain_value() < $tag_size { return Err($crate::Error::InvalidData.into()); } else if $buf.constrain_value() + $tag_size < $cipher.constrain_value() { Err("Buffer is too small") } else { Ok(()) }; error.map_err(|e| $crate::Error::CryptoError(e.into()))?; }}; }
erify::{SliceExt, USizeExt}; let error = if $buf.constrain_value() != $size { Err("Invalid buffer size") } else { Ok(()) }; error.map_err(|e| $crate::Error::CryptoError(e.into()))?; }}; } #[macro_export] macro_rules! verify_auth { ($key:expr => [$key_size:expr], => [$buf:expr, $tag_size:expr]) => {{ #[allow(unused_imports)] use $crate::verify::{SliceExt, USizeExt}; let error = if $key.constrain_value() != $key_size { Err("Invalid key length") } else if $buf.constrain_value() < $tag_size { Err
random
[ { "content": "/// A Hash interface\n\npub trait Hash {\n\n /// Get the information block that describes the hash\n\n fn info(&self) -> HashInfo;\n\n /// hashes data and returns the hash length. `buf` contains the outgoing hashed data. \n\n fn hash(&self, buf: &mut [u8], data: &[u8]) -> Result<usize, Box<dyn Error + 'static>>;\n\n}\n\n\n", "file_path": "primitives/src/hash.rs", "rank": 2, "score": 113696.80324992278 }, { "content": "/// A PBKDF\n\npub trait Pbkdf {\n\n /// returns the info of the PBKDF\n\n fn info(&self) -> PbkdfInfo;\n\n /// fills the buffer with bytes derived from the password parameterized by the CPU cost.\n\n fn derive(\n\n &self,\n\n buf: &mut [u8],\n\n password: &[u8],\n\n salt: &[u8],\n\n cpu_cost: u64,\n\n ) -> Result<(), Box<dyn Error + 'static>>;\n\n}\n\n\n", "file_path": "primitives/src/pbkdf.rs", "rank": 3, "score": 113696.80324992278 }, { "content": "/// a trait to make types base64 encodable\n\npub trait Base64Encodable {\n\n fn base64(&self) -> String;\n\n}\n\n\n", "file_path": "vault/src/base64.rs", "rank": 4, "score": 111052.60077635106 }, { "content": "/// a Random Number Generator\n\npub trait SecureRng {\n\n /// fills the buffer with secure random data. `buf` is the output buffer.\n\n fn random(&mut self, buf: &mut [u8]) -> Result<(), Box<dyn Error + 'static>>;\n\n}\n\n\n", "file_path": "primitives/src/rng.rs", "rank": 5, "score": 111048.23881307687 }, { "content": "/// A secret key generation algorithm\n\npub trait SecretKeyGen {\n\n /// generate a new private key in the buffer. `buf` is the output buffer.\n\n fn new_secret_key(&self, buf: &mut [u8], rng: &mut dyn SecureRng) -> Result<usize, Box<dyn Error + 'static>>;\n\n}\n\n\n", "file_path": "primitives/src/rng.rs", "rank": 6, "score": 108605.61609902351 }, { "content": "// extension for JsonValue\n\npub trait JsonValueExt {\n\n // decode string\n\n fn check_string(&self) -> String;\n\n // hex-decode string into byte vector\n\n fn check_bytes(&self) -> Vec<u8>;\n\n // check if null\n\n fn check_array_iter(&self) -> Members;\n\n // get usize if not null\n\n fn option_usize(&self, def: usize) -> usize;\n\n // get string if not null\n\n fn option_string(&self, def: impl ToString) -> String;\n\n}\n\n\n\nimpl JsonValueExt for JsonValue {\n\n fn check_string(&self) -> String {\n\n self.as_str().unwrap().to_string()\n\n }\n\n\n\n fn check_bytes(&self) -> Vec<u8> {\n\n let encode = self.as_str().unwrap();\n", "file_path": "crypto/tests/common.rs", "rank": 7, "score": 108605.61609902351 }, { "content": "/// A public key generation algorithm\n\npub trait PublicKeyGen {\n\n /// generate a new public key in the buffer. `buf` is the output buffer.\n\n fn get_pub_key(&self, buf: &mut [u8], secret_key: &[u8]) -> Result<usize, Box<dyn Error + 'static>>;\n\n}\n", "file_path": "primitives/src/rng.rs", "rank": 8, "score": 108605.61609902351 }, { "content": "/// a typed transaction\n\npub trait TypedTransaction {\n\n fn type_id() -> Val;\n\n}\n\n\n\n/// a revocation transaction\n\n#[repr(packed)]\n\n#[derive(Debug)]\n\npub struct RevocationTransaction {\n\n /// transaction type\n\n #[allow(unused)]\n\n pub type_id: Val,\n\n /// owner id\n\n #[allow(unused)]\n\n pub owner: Id,\n\n /// counter\n\n #[allow(unused)]\n\n pub ctr: Val,\n\n /// unique id for transaction\n\n pub id: Id,\n\n}\n", "file_path": "vault/src/types/transactions.rs", "rank": 9, "score": 108605.61609902351 }, { "content": "/// open data and decrypt it in place.\n\npub fn chachapoly_open(\n\n data: &mut [u8],\n\n tag: &[u8],\n\n ad: &[u8],\n\n key: &[u8],\n\n nonce: &[u8],\n\n) -> Result<(), Box<dyn Error + 'static>> {\n\n // build footer\n\n let mut foot = Vec::with_capacity(16);\n\n foot.extend_from_slice(&(ad.len() as u64).to_le_bytes());\n\n foot.extend_from_slice(&(data.len() as u64).to_le_bytes());\n\n\n\n // compute poly key and auth tag\n\n let (mut pkey, mut vfy_tag) = (vec![0; 32], vec![0; 16]);\n\n ChaCha20Ietf::xor(key, nonce, 0, &mut pkey);\n\n Poly1305::chachapoly_auth(&mut vfy_tag, ad, data, &foot, &pkey);\n\n\n\n // validate tags\n\n if eq_const_time!(&tag, &vfy_tag) {\n\n ChaCha20Ietf::xor(key, nonce, 1, data);\n", "file_path": "crypto/src/chachapoly_ietf.rs", "rank": 10, "score": 105453.44542917758 }, { "content": "/// a trait to make types base64 decodable\n\npub trait Base64Decodable: Sized {\n\n fn from_base64(base: impl AsRef<[u8]>) -> crate::Result<Self>;\n\n}\n\n\n\nimpl<T: AsRef<[u8]>> Base64Encodable for T {\n\n fn base64(&self) -> String {\n\n Base64::encode_data(self.as_ref())\n\n }\n\n}\n\n\n\nimpl Base64Decodable for Vec<u8> {\n\n fn from_base64(base: impl AsRef<[u8]>) -> crate::Result<Self> {\n\n Base64::decode_data(base.as_ref())\n\n }\n\n}\n", "file_path": "vault/src/base64.rs", "rank": 11, "score": 104264.28491825165 }, { "content": "/// A memory hardened PBKDF\n\npub trait StatelessPbkdf: Pbkdf {\n\n /// fills the buffer with bytes derived from the password parameterized by the CPU cost.\n\n fn derive_stateless(\n\n &self,\n\n buf: &mut [u8],\n\n password: &[u8],\n\n salt: &[u8],\n\n cpu_cost: u64,\n\n memory_cost: u64,\n\n parallelism: u64,\n\n ) -> Result<(), Box<dyn Error + 'static>>;\n\n}\n", "file_path": "primitives/src/pbkdf.rs", "rank": 12, "score": 104259.92295497749 }, { "content": "pub trait ReadSecret<S>\n\nwhere\n\n S: Zeroize,\n\n{\n\n fn read_secret(&self) -> &S;\n\n}\n\n\n", "file_path": "client/src/secret.rs", "rank": 13, "score": 104259.92295497749 }, { "content": "/// an AEAD Extension for the Cipher\n\npub trait AeadCipher: Cipher {\n\n /// Seals the Plaintext bytes in place with AEAD and returns the Cipher length. `buf` contains the incoming\n\n /// plaintext buffer\n\n fn seal(\n\n &self,\n\n buf: &mut [u8],\n\n plain_len: usize,\n\n ad: &[u8],\n\n key: &[u8],\n\n nonce: &[u8],\n\n ) -> Result<usize, Box<dyn Error + 'static>>;\n\n\n\n /// Seals the plaintext and returns the Cipher's length using AEAD. `buf` contains the incoming plaintext\n\n /// buffer\n\n fn seal_with(\n\n &self,\n\n buf: &mut [u8],\n\n plain: &[u8],\n\n ad: &[u8],\n\n key: &[u8],\n", "file_path": "primitives/src/cipher.rs", "rank": 14, "score": 104259.92295497749 }, { "content": "pub trait SerializeSecret: Serialize {}\n\n\n\npub struct Secret<S>\n\nwhere\n\n S: Zeroize,\n\n{\n\n value: S,\n\n}\n\n\n\nimpl<S> Secret<S>\n\nwhere\n\n S: Zeroize,\n\n{\n\n pub fn new(value: S) -> Self {\n\n Self { value }\n\n }\n\n}\n\n\n\nimpl<S> ReadSecret<S> for Secret<S>\n\nwhere\n", "file_path": "client/src/secret.rs", "rank": 15, "score": 104259.92295497749 }, { "content": "/// A key derivation function interface\n\npub trait KeyDervFunc {\n\n /// returns the information block about the key derivation function\n\n fn info(&self) -> KeyDervFuncInfo;\n\n /// derive bytes from the base key with salt and info. Outputs to the buffer `buf`.\n\n fn derive(&self, buf: &mut [u8], base_key: &[u8], salt: &[u8], info: &[u8])\n\n -> Result<(), Box<dyn Error + 'static>>;\n\n}\n", "file_path": "primitives/src/key_derv_func.rs", "rank": 16, "score": 104249.04838439709 }, { "content": "/// A one shot stateless cipher. Implements the `SecretKeyGen` trait.\n\npub trait Cipher: SecretKeyGen {\n\n /// returns cipher info block\n\n fn info(&self) -> CipherInfo;\n\n /// predicts the max encrypted cipher length given a `plaintext_len` (in bytes)\n\n fn predict_encrypted_max(&self, plain_len: usize) -> usize;\n\n /// encrypts the plaintext in-place and returns the cipher's length. `buf` contains the incoming plaintext buffer.\n\n fn encrypt(\n\n &self,\n\n buf: &mut [u8],\n\n plain_len: usize,\n\n key: &[u8],\n\n nonce: &[u8],\n\n ) -> Result<usize, Box<dyn Error + 'static>>;\n\n\n\n /// encrypts the plaintext and returns the plaintext's length. `buf` contains the incoming plaintext buffer.\n\n fn encrypt_to(\n\n &self,\n\n buf: &mut [u8],\n\n plain: &[u8],\n\n key: &[u8],\n", "file_path": "primitives/src/cipher.rs", "rank": 17, "score": 102004.27126629477 }, { "content": "/// a variable length hash\n\npub trait VarLenHash: Hash {\n\n /// hashes the data and returns the hash length. `buf` contains the outgoing hashed data.\n\n fn var_len_hash(&self, buf: &mut [u8], data: &[u8]) -> Result<usize, Box<dyn Error + 'static>>;\n\n}\n", "file_path": "primitives/src/hash.rs", "rank": 18, "score": 102000.12105742817 }, { "content": "/// A provider interface between the vault and a crypto box. See libsodium's [secretbox](https://libsodium.gitbook.io/doc/secret-key_cryptography/secretbox) for an example.\n\npub trait BoxProvider: Sized {\n\n /// function for the key length of the crypto box\n\n fn box_key_len() -> usize;\n\n /// gets the crypto box's overhead\n\n fn box_overhead() -> usize;\n\n\n\n /// seals some data into the crypto box using the `key` and the `ad`\n\n fn box_seal(key: &Key<Self>, ad: &[u8], data: &[u8]) -> crate::Result<Vec<u8>>;\n\n\n\n /// opens a crypto box to get data using the `key` and the `ad`.\n\n fn box_open(key: &Key<Self>, ad: &[u8], data: &[u8]) -> crate::Result<Vec<u8>>;\n\n\n\n /// fills a buffer `buf` with secure random bytes.\n\n fn random_buf(buf: &mut [u8]) -> crate::Result<()>;\n\n\n\n /// creates a vector with secure random bytes based off of an inputted length `len`.\n\n fn random_vec(len: usize) -> crate::Result<Vec<u8>> {\n\n let mut buf = vec![0; len];\n\n Self::random_buf(&mut buf)?;\n\n Ok(buf)\n", "file_path": "vault/src/crypto_box.rs", "rank": 19, "score": 102000.12105742817 }, { "content": "/// A deterministic Random Number Generator Extension\n\npub trait DeterministicRng: SecureRng {\n\n /// reseeds the random number generator with a seed.\n\n fn reseed(&mut self, seed: &[u8]) -> Result<(), Box<dyn Error + 'static>>;\n\n}\n\n\n", "file_path": "primitives/src/rng.rs", "rank": 20, "score": 102000.12105742817 }, { "content": "// result extension\n\npub trait ResultExt<T, E> {\n\n // unwraps error and panics\n\n fn error_or(self, msg: impl ToString) -> E;\n\n}\n\n\n\nimpl<T, E> ResultExt<T, E> for Result<T, E> {\n\n fn error_or(self, msg: impl ToString) -> E {\n\n match self {\n\n Err(e) => e,\n\n _ => panic!(msg.to_string()),\n\n }\n\n }\n\n}\n", "file_path": "crypto/tests/common.rs", "rank": 21, "score": 98372.9547086505 }, { "content": "pub trait CloneSecret: Clone + Zeroize {}\n\n\n", "file_path": "client/src/secret.rs", "rank": 22, "score": 98372.9547086505 }, { "content": "/// a Message authentication interface (MAC) that is stateless and can be a one shot.\n\npub trait MessageAuthCode: SecretKeyGen {\n\n /// get the info about the MAC\n\n fn info(&self) -> MessageAuthCodeInfo;\n\n /// authenticate the `data` using the `key` through the `buf` buffer. Returns the MAC length in a `Result`\n\n fn auth(&self, buf: &mut [u8], data: &[u8], key: &[u8]) -> Result<usize, Box<dyn Error + 'static>>;\n\n}\n\n\n", "file_path": "primitives/src/auth.rs", "rank": 23, "score": 97952.59615860619 }, { "content": "/// an extension for a Variable length Message Authentication Code (MAC).\n\npub trait VarLenMessageAuthCode: MessageAuthCode {\n\n /// Authenticates the `data` using a `key` through the `buf` buffer. Returns the MAC's length in a `Result`.\n\n fn varlen_auth(&self, buf: &mut [u8], data: &[u8], key: &[u8]) -> Result<usize, Box<dyn Error + 'static>>;\n\n}\n", "file_path": "primitives/src/auth.rs", "rank": 24, "score": 94432.06709448718 }, { "content": "pub trait Sign: SecretKeyGen + PublicKeyGen {\n\n /// returns info about the signer\n\n fn info(&self) -> SignInfo;\n\n /// signs data in the buffer using the secret key and returns the signature length.\n\n fn sign(&self, buf: &mut [u8], data: &[u8], secret_key: &[u8]) -> Result<usize, Box<dyn Error + 'static>>;\n\n /// verify the signature for the data with the public key. Returns an error if the signature was invalid.\n\n fn verify(&self, data: &[u8], sig: &[u8], public_key: &[u8]) -> Result<(), Box<dyn Error + 'static>>;\n\n}\n", "file_path": "primitives/src/signing.rs", "rank": 25, "score": 92505.93997411593 }, { "content": "/// a view over raw data.\n\npub trait AsView<T: Sized>: AsRef<[u8]> {\n\n /// creates a view over `self`.\n\n fn view(&self) -> &T {\n\n // get the bytes\n\n let bytes = self.as_ref();\n\n // validate the bytes\n\n assert!(mem::size_of::<T>() <= bytes.len(), \"Can't create view over this memory\");\n\n // get the pointer\n\n let bytes = bytes.as_ptr();\n\n // validate alignment\n\n assert_eq!(\n\n bytes.align_offset(mem::align_of::<T>()),\n\n 0,\n\n \"View's offset is incorrect\"\n\n );\n\n // cast the pointer\n\n unsafe { bytes.cast::<T>().as_ref() }.unwrap()\n\n }\n\n}\n\n\n", "file_path": "vault/src/types.rs", "rank": 26, "score": 90565.75608998825 }, { "content": "/// get the snapshot dir of the user's device\n\npub fn snapshot_dir() -> crate::Result<PathBuf> {\n\n let home_dir = home_dir()?;\n\n let snapshot_dir = home_dir.join(\"snapshots\");\n\n\n\n verify_or_create(&snapshot_dir)?;\n\n\n\n Ok(snapshot_dir)\n\n}\n\n\n", "file_path": "snapshot/src/files.rs", "rank": 27, "score": 88764.29280683992 }, { "content": "/// get the home directory of the user's device\n\npub fn home_dir() -> crate::Result<PathBuf> {\n\n let home = match std::env::var(\"STRONGHOLD\") {\n\n Ok(h) => h.into(),\n\n Err(_) => dirs::home_dir().unwrap(),\n\n };\n\n let home_dir = home.join(format!(\".{}\", \"engine\"));\n\n\n\n verify_or_create(&home_dir)?;\n\n\n\n Ok(home_dir)\n\n}\n\n\n", "file_path": "snapshot/src/files.rs", "rank": 28, "score": 88764.29280683992 }, { "content": "/// a mutable view over raw data.\n\npub trait AsViewMut<T: Sized>: AsMut<[u8]> {\n\n /// creates a mutable view over `self`.\n\n fn view_mut(&mut self) -> &mut T {\n\n // get bytes\n\n let bytes = self.as_mut();\n\n // validate bytes\n\n assert!(mem::size_of::<T>() <= bytes.len(), \"Can't create view over this memory\");\n\n // get mute pointer\n\n let bytes = bytes.as_mut_ptr();\n\n // validate alignment\n\n assert_eq!(\n\n bytes.align_offset(mem::align_of::<T>()),\n\n 0,\n\n \"View's offset is incorrect\"\n\n );\n\n\n\n // cast mutable pointer\n\n unsafe { bytes.cast::<T>().as_mut() }.unwrap()\n\n }\n\n}\n", "file_path": "vault/src/types.rs", "rank": 29, "score": 88614.99700824339 }, { "content": "// Loop until there is a Result.\n\npub fn send_until_success(req: CRequest) -> CResult {\n\n loop {\n\n match send(req.clone()) {\n\n Some(result) => {\n\n break result;\n\n }\n\n None => thread::sleep(Duration::from_millis(50)),\n\n }\n\n }\n\n}\n", "file_path": "examples/commandline/src/connection.rs", "rank": 30, "score": 85270.8104738264 }, { "content": "// send a request until there is a response - emulates network\n\npub fn send_until_success(req: TransactionRequest) -> TransactionResult {\n\n loop {\n\n match send(req.clone()) {\n\n Some(result) => break result,\n\n None => thread::sleep(Duration::from_millis(Env::retry_delay())),\n\n }\n\n }\n\n}\n", "file_path": "vault/fuzz/src/connection.rs", "rank": 31, "score": 85270.8104738264 }, { "content": "// resolve the requests into responses.\n\npub fn send(req: CRequest) -> Option<CResult> {\n\n let result = match req {\n\n // if the request is a list, get the keys from the map and put them into a ListResult.\n\n CRequest::List => {\n\n let entries = State::storage_map()\n\n .read()\n\n .expect(line_error!())\n\n .keys()\n\n .cloned()\n\n .collect();\n\n\n\n CResult::List(ListResult::new(entries))\n\n }\n\n // on write, write data to the map and send back a Write result.\n\n CRequest::Write(write) => {\n\n State::storage_map()\n\n .write()\n\n .expect(line_error!())\n\n .insert(write.id().to_vec(), write.data().to_vec());\n\n\n", "file_path": "examples/commandline/src/connection.rs", "rank": 32, "score": 82780.63629950256 }, { "content": "/// trait for encryptable data\n\npub trait Encrypt<T: From<Vec<u8>>>: AsRef<[u8]> {\n\n /// encrypts a raw data and creates a type T from the ciphertext\n\n fn encrypt<B: BoxProvider>(&self, key: &Key<B>, ad: &[u8]) -> crate::Result<T> {\n\n let sealed = B::box_seal(key, ad, self.as_ref())?;\n\n Ok(T::from(sealed))\n\n }\n\n}\n\n\n", "file_path": "vault/src/crypto_box.rs", "rank": 33, "score": 82508.63359189167 }, { "content": "pub trait Bucket<P: BoxProvider + Send + Sync + Clone + 'static> {\n\n fn create_record(&mut self, uid: Id, key: Key<P>, payload: Vec<u8>) -> Option<Id>;\n\n fn add_vault(&mut self, key: &Key<P>, uid: Id);\n\n fn read_record(&mut self, uid: Id, key: Key<P>);\n\n fn garbage_collect(&mut self, uid: Id, key: Key<P>);\n\n fn revoke_record(&mut self, uid: Id, tx_id: Id, key: Key<P>);\n\n fn list_all_valid_by_key(&mut self, key: Key<P>);\n\n fn offload_data(self) -> (Vec<Key<P>>, HashMap<Vec<u8>, Vec<u8>>);\n\n}\n\n\n\nimpl<P: BoxProvider + Clone + Send + Sync + 'static> Blob<P> {\n\n pub fn new() -> Self {\n\n let cache = Cache::new();\n\n let vaults = DashMap::new();\n\n\n\n Self { cache, vaults }\n\n }\n\n\n\n pub fn new_from_snapshot(snapshot: Snapshot<P>) -> Self {\n\n let cache = Cache::new();\n", "file_path": "client/src/data.rs", "rank": 34, "score": 80803.11820767868 }, { "content": "pub fn deserialize_from_snapshot(snapshot: &PathBuf, pass: &str) -> Client<Provider> {\n\n let mut buffer = Vec::new();\n\n\n\n let mut file = OpenOptions::new()\n\n .read(true)\n\n .open(snapshot)\n\n .expect(\"Unable to access the snapshot. Make sure it exists.\");\n\n\n\n decrypt_snapshot(&mut file, &mut buffer, pass.as_bytes());\n\n\n\n let snapshot: Snapshot<Provider> = bincode::deserialize(&buffer[..]).expect(\"Unable to deserialize data\");\n\n\n\n Client::<Provider>::new_from_snapshot(snapshot)\n\n}\n\n\n", "file_path": "client/src/snap.rs", "rank": 35, "score": 77146.9804319684 }, { "content": "// deseralize a hashmap\n\npub fn deserialize_buffer(bytes: &[u8]) -> HashMap<Vec<u8>, Vec<u8>> {\n\n let mut map = HashMap::new();\n\n\n\n let mut left = &bytes[..];\n\n while !left.is_empty() {\n\n let k = read_buffer(&mut left);\n\n let v = read_buffer(&mut left);\n\n map.insert(k, v);\n\n }\n\n\n\n map\n\n}\n\n\n", "file_path": "snapshot/src/serialize.rs", "rank": 36, "score": 73991.57527089701 }, { "content": "/// HChaCha20 implementation\n\npub fn h_chacha20_hash(key: &[u8], nonce: &[u8], buf: &mut [u8]) {\n\n // initialize state\n\n let mut state = vec![0u32; 16];\n\n (0..4).for_each(|i| state[i] = BASIS[i]);\n\n (4..12).for_each(|i| state[i] = read32_little_endian!(&key[(i - 4) * 4..]));\n\n (12..16).for_each(|i| state[i] = read32_little_endian!(&nonce[(i - 12) * 4..]));\n\n\n\n // run the rounds\n\n chacha20_rounds(&mut state);\n\n\n\n // write to the output\n\n let (buf_a, buf_b) = buf.split_at_mut(16);\n\n (0..4).for_each(|i| write32_little_endian!(state[i] => &mut buf_a[i* 4..]));\n\n (12..16).for_each(|i| write32_little_endian!(state[i] => &mut buf_b[(i - 12) * 4..]));\n\n}\n\n\n", "file_path": "crypto/src/internal/chacha.rs", "rank": 37, "score": 73991.57527089701 }, { "content": "/// finishes authentication\n\npub fn poly1305_finish(tag: &mut [u8], a: &mut [u32], s: &[u32]) {\n\n // modular reduction\n\n let mut c;\n\n c = shift_right!(a[1], 26);\n\n a[1] = and!(a[1], 0x3ffffff);\n\n a[2] = add!(a[2], c);\n\n c = shift_right!(a[2], 26);\n\n a[2] = and!(a[2], 0x3ffffff);\n\n a[3] = add!(a[3], c);\n\n c = shift_right!(a[3], 26);\n\n a[3] = and!(a[3], 0x3ffffff);\n\n a[4] = add!(a[4], c);\n\n c = shift_right!(a[4], 26);\n\n a[4] = and!(a[4], 0x3ffffff);\n\n a[0] = add!(a[0], mult!(c, 5));\n\n c = shift_right!(a[0], 26);\n\n a[0] = and!(a[0], 0x3ffffff);\n\n a[1] = add!(a[1], c);\n\n\n\n // reduce if values is in the range (2^130-5, 2^130]\n", "file_path": "crypto/src/internal/poly.rs", "rank": 38, "score": 73990.55680977872 }, { "content": "/// Trait for decryptable data\n\npub trait Decrypt<E, T: TryFrom<Vec<u8>, Error = E>>: AsRef<[u8]> {\n\n /// decrypts raw data and creates a new type T from the plaintext\n\n fn decrypt<B: BoxProvider>(&self, key: &Key<B>, ad: &[u8]) -> crate::Result<T> {\n\n let opened = B::box_open(key, ad, self.as_ref())?;\n\n Ok(T::try_from(opened).map_err(|_| crate::Error::DatabaseError(String::from(\"Invalid Entry\")))?)\n\n }\n\n}\n", "file_path": "vault/src/crypto_box.rs", "rank": 39, "score": 71558.20464634217 }, { "content": "/// serialize a hashmap\n\npub fn serialize_map(map: &HashMap<Vec<u8>, Vec<u8>>) -> Vec<u8> {\n\n map.iter().fold(Vec::new(), |mut acc, (k, v)| {\n\n acc.extend(&k.len().to_le_bytes());\n\n acc.extend(k.as_slice());\n\n acc.extend(&v.len().to_le_bytes());\n\n acc.extend(v.as_slice());\n\n acc\n\n })\n\n}\n\n\n", "file_path": "snapshot/src/serialize.rs", "rank": 40, "score": 71108.86541028762 }, { "content": "pub fn serialize_to_snapshot(snapshot: &PathBuf, pass: &str, mut client: Client<Provider>) {\n\n let mut file = OpenOptions::new()\n\n .write(true)\n\n .create(true)\n\n .open(snapshot)\n\n .expect(\"Unable to access snapshot. Make sure that it exists.\");\n\n\n\n file.set_len(0).expect(\"unable to clear the contents of the file file\");\n\n\n\n let snap: Snapshot<Provider> = Snapshot::new(&mut client);\n\n\n\n let data: Vec<u8> = bincode::serialize(&snap).expect(\"Couldn't serialize the client data\");\n\n encrypt_snapshot(data, &mut file, pass.as_bytes()).expect(\"Couldn't write to the snapshot\");\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::data::Blob;\n\n use crate::line_error;\n", "file_path": "client/src/snap.rs", "rank": 41, "score": 71108.86541028762 }, { "content": "/// calculates the nth ChaCha20 block into a buffer\n\npub fn chacha20_block(key: &[u8], nonce: &[u8], n: u64, buf: &mut [u8]) {\n\n // create buffer\n\n let mut state = vec![0u32; 32];\n\n let (init, mixed) = state.split_at_mut(16);\n\n\n\n // initialize buffer\n\n (0..4).for_each(|i| init[i] = BASIS[i]);\n\n (4..12).for_each(|i| init[i] = read32_little_endian!(&key[(i - 4) * 4..]));\n\n split64_little_endian!(n => &mut init[12..]);\n\n (14..16).for_each(|i| init[i] = read32_little_endian!(&nonce[(i - 14) * 4..]));\n\n\n\n // mix the buffer\n\n mixed.copy_from_slice(init);\n\n chacha20_rounds(mixed);\n\n\n\n // write the mixed state into the buffer\n\n (0..16).for_each(|i| mixed[i] = add!(mixed[i], init[i]));\n\n (0..16).for_each(|i| write32_little_endian!(mixed[i] => &mut buf[i * 4..]));\n\n}\n", "file_path": "crypto/src/internal/chacha.rs", "rank": 42, "score": 69778.59063827587 }, { "content": "/// encrypt an input with a password in using secretstream.\n\npub fn encrypt_snapshot(input: Vec<u8>, out: &mut File, password: &[u8]) -> crate::Result<()> {\n\n // convert vector to slice\n\n let mut slice = input.as_slice();\n\n // setup buffer\n\n let mut buf = [0; CHUNK_SIZE];\n\n // get input length\n\n let mut input_len = slice.len();\n\n\n\n // write the signature to the file first.\n\n out.write_all(&SIGN)?;\n\n out.write_all(&VERSION)?;\n\n\n\n // get the salt and write it to the file.\n\n let salt = generate_salt()?;\n\n out.write_all(&salt.0)?;\n\n\n\n // derive a key from the password and salt.\n\n let key = derive_key_from_password(password, &salt)?;\n\n // create the stream and header from the key.\n\n let (mut stream, header) = create_stream(&key)?;\n", "file_path": "snapshot/src/logic.rs", "rank": 43, "score": 68584.8635029445 }, { "content": "/// calculates the nth ChaCha20-IETF block into a buffer\n\npub fn chacha20_ietf_block(key: &[u8], nonce: &[u8], n: u32, buf: &mut [u8]) {\n\n // create buffer\n\n let mut state = vec![0u32; 32];\n\n let (init, mixed) = state.split_at_mut(16);\n\n\n\n // initialize buffer\n\n (0..4).for_each(|i| init[i] = BASIS[i]);\n\n (4..12).for_each(|i| init[i] = read32_little_endian!(&key[(i - 4) * 4..]));\n\n init[12] = n;\n\n (13..16).for_each(|i| init[i] = read32_little_endian!(&nonce[(i - 13) * 4..]));\n\n\n\n // mix the buffer\n\n mixed.copy_from_slice(init);\n\n chacha20_rounds(mixed);\n\n\n\n // write the mixed state into the buffer\n\n (0..16).for_each(|i| mixed[i] = add!(mixed[i], init[i]));\n\n (0..16).for_each(|i| write32_little_endian!(mixed[i] => &mut buf[i * 4..]));\n\n}\n\n\n", "file_path": "crypto/src/internal/chacha.rs", "rank": 44, "score": 68462.23661642088 }, { "content": "/// update a snapshot with the new version by re-encrypting the data.\n\npub fn update_snapshot(input: &mut File, output: &mut File, password: &[u8]) -> crate::Result<()> {\n\n // setup the buffer to read from the old snapshot.\n\n let mut buffer: Vec<u8> = Vec::new();\n\n\n\n // check the file len\n\n check_file_len(input)?;\n\n\n\n // get the salt and ignore the version.\n\n let salt = get_salt(input, false)?;\n\n\n\n // decrypt the file into the buffer\n\n decrypt_file(input, &mut buffer, password, salt)?;\n\n\n\n // re-encrypt the file into a new snapshot.\n\n encrypt_snapshot(buffer, output, password)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "snapshot/src/logic.rs", "rank": 45, "score": 67264.3530499435 }, { "content": "/// Loads a key into r and s and computes the key multipliers\n\npub fn poly1305_init(r: &mut [u32], s: &mut [u32], mu: &mut [u32], key: &[u8]) {\n\n // load key\n\n r[0] = and!(shift_right!(read32_little_endian!(&key[0..]), 0), 0x03FFFFFF);\n\n r[1] = and!(shift_right!(read32_little_endian!(&key[3..]), 2), 0x03FFFF03);\n\n r[2] = and!(shift_right!(read32_little_endian!(&key[6..]), 4), 0x03FFC0FF);\n\n r[3] = and!(shift_right!(read32_little_endian!(&key[9..]), 6), 0x03F03FFF);\n\n r[4] = and!(shift_right!(read32_little_endian!(&key[12..]), 8), 0x000FFFFF);\n\n\n\n s[0] = read32_little_endian!(&key[16..]);\n\n s[1] = read32_little_endian!(&key[20..]);\n\n s[2] = read32_little_endian!(&key[24..]);\n\n s[3] = read32_little_endian!(&key[28..]);\n\n\n\n // compute multipliers\n\n mu[0] = 0;\n\n mu[1] = mult!(r[1], 5);\n\n mu[2] = mult!(r[2], 5);\n\n mu[3] = mult!(r[3], 5);\n\n mu[4] = mult!(r[4], 5);\n\n}\n\n\n", "file_path": "crypto/src/internal/poly.rs", "rank": 46, "score": 66180.03383278563 }, { "content": "/// decrypt file into a vector with a password.\n\npub fn decrypt_snapshot(input: &mut File, output: &mut Vec<u8>, password: &[u8]) -> crate::Result<()> {\n\n // check to see if the file is long enough\n\n check_file_len(input)?;\n\n\n\n // setup signature, salt and version buffers.\n\n let salt = get_salt(input, true)?;\n\n\n\n decrypt_file(input, output, password, salt)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "snapshot/src/logic.rs", "rank": 47, "score": 64937.380760077445 }, { "content": "/// updates the value a with any data using the key and the multipliers\n\n/// pads any incomplete block with 0 bytes.\n\npub fn poly1305_update(a: &mut [u32], r: &[u32], mu: &[u32], mut data: &[u8], is_last: bool) {\n\n let mut buf = vec![0; 16];\n\n let mut w = vec![0; 5];\n\n\n\n // process data\n\n while !data.is_empty() {\n\n // put data into buffer and append 0x01 byte as padding as needed\n\n let buf_len = min(data.len(), buf.len());\n\n if buf_len < 16 {\n\n buf.copy_from_slice(&[0; 16]);\n\n if is_last {\n\n buf[buf_len] = 0x01\n\n }\n\n }\n\n buf[..buf_len].copy_from_slice(&data[..buf_len]);\n\n\n\n // decode next block into an accumulator. Apply high bit if needed.\n\n a[0] = add!(\n\n a[0],\n\n and!(shift_right!(read32_little_endian!(&buf[0..]), 0), 0x03FFFFFF)\n", "file_path": "crypto/src/internal/poly.rs", "rank": 48, "score": 63955.11444478555 }, { "content": "/// encrypts data in place\n\npub fn chachapoly_seal(data: &mut [u8], tag: &mut [u8], ad: &[u8], key: &[u8], nonce: &[u8]) {\n\n // encrypt data\n\n ChaCha20Ietf::xor(key, nonce, 1, data);\n\n\n\n // create footer\n\n let mut foot = Vec::with_capacity(16);\n\n foot.extend_from_slice(&(ad.len() as u64).to_le_bytes());\n\n foot.extend_from_slice(&(data.len() as u64).to_le_bytes());\n\n\n\n // compute poly key and auth tag\n\n let mut pkey = vec![0; 32];\n\n ChaCha20Ietf::xor(key, nonce, 0, &mut pkey);\n\n Poly1305::chachapoly_auth(tag, ad, data, &foot, &pkey);\n\n}\n\n\n", "file_path": "crypto/src/chachapoly_ietf.rs", "rank": 49, "score": 61663.55809163408 }, { "content": "fn main() {\n\n // determine which secure random number generator should be used.\n\n #[allow(unused_assignments)]\n\n let mut secure_random = None;\n\n\n\n #[cfg(any(target_os = \"macos\", target_os = \"ios\"))]\n\n {\n\n secure_random = macos_secrandom()\n\n }\n\n #[cfg(any(target_os = \"freebsd\", target_os = \"openbsd\", target_os = \"netbsd\"))]\n\n {\n\n secure_random = Some(\"USE_ARC4RANDOM\")\n\n }\n\n #[cfg(target_os = \"windows\")]\n\n {\n\n println!(\"cargo:rustc-link-lib=bcrypt\");\n\n secure_random = Some(\"USE_CRYPTGENRANDOM\")\n\n }\n\n #[cfg(target_os = \"linux\")]\n\n {\n", "file_path": "random/build.rs", "rank": 50, "score": 59814.57076490857 }, { "content": "fn main() {\n\n println!(\"Hello, world!\");\n\n}\n", "file_path": "client/src/main.rs", "rank": 51, "score": 58259.01063942617 }, { "content": "#[test]\n\nfn test_crypto() {\n\n for vec in TestVector::load() {\n\n vec.test_mac();\n\n }\n\n}\n\n\n\n// API test vector\n\n#[derive(Default, Clone, Debug)]\n\npub struct ApiTestVector {\n\n id: String,\n\n key_len: usize,\n\n data_len: usize,\n\n buf_len: usize,\n\n error: String,\n\n}\n\n\n\nimpl ApiTestVector {\n\n // load json\n\n pub fn load() -> Vec<Self> {\n\n let json = json::parse(VECTORS).unwrap();\n", "file_path": "crypto/tests/poly.rs", "rank": 52, "score": 56834.60246397934 }, { "content": "fn main() {\n\n let yaml = load_yaml!(\"cli.yml\");\n\n let matches = App::from(yaml).get_matches();\n\n\n\n encrypt_command(&matches);\n\n snapshot_command(&matches);\n\n read_command(&matches);\n\n list_command(&matches);\n\n revoke_command(&matches);\n\n garbage_collect_vault_command(&matches);\n\n take_ownership_command(&matches);\n\n purge_command(&matches);\n\n}\n", "file_path": "examples/commandline/src/main.rs", "rank": 53, "score": 56834.60246397934 }, { "content": "fn main() {\n\n // prepare key and ids\n\n let key = Key::<Provider>::random().expect(\"failed to generate random key\");\n\n let ids: Vec<Id> = (0..Env::client_count())\n\n .map(|_| Id::random::<Provider>().expect(\"Failed to generate random ID\"))\n\n .collect();\n\n\n\n // print info.\n\n eprintln! {\n\n \"Spraying fuzz [{}: {}, {}: {}, {}: {}, {}: {}]...\",\n\n \"Number of Clients\", Env::client_count(),\n\n \"Error rate\", Env::error_rate(),\n\n \"Verification rate\", Env::verify_number(),\n\n \"Retry delay\", Env::retry_delay(),\n\n };\n\n\n\n // start fuzzing\n\n ids.iter().for_each(|id| Client::<Provider>::create_chain(&key, *id));\n\n\n\n loop {\n", "file_path": "vault/fuzz/src/main.rs", "rank": 54, "score": 56834.60246397934 }, { "content": "#[test]\n\nfn test_api() {\n\n for vec in ApiTestVector::load() {\n\n vec.test_encryption().test_decryption();\n\n }\n\n}\n", "file_path": "crypto/tests/xchachapoly.rs", "rank": 55, "score": 56834.60246397934 }, { "content": "#[test]\n\nfn test_crypto() {\n\n for vec in TestVector::load() {\n\n vec.test_encryption().test_decryption();\n\n }\n\n}\n\n\n\n// Mac error Vector\n", "file_path": "crypto/tests/xchachapoly.rs", "rank": 56, "score": 56834.60246397934 }, { "content": "fn main() {\n\n // get the threads from the THREADS enviroment var.\n\n let threads_str = env::var(\"NUM_THREADS\").unwrap_or(num_cpus::get().to_string());\n\n let threads = usize::from_str(&threads_str).expect(\"Invalid value of THREADS\");\n\n\n\n // load the enviroment limit from the VECTOR_LIMIT env var.\n\n let limit_str = env::var(\"VECTOR_LIMIT\").unwrap_or(264.to_string());\n\n let limit = usize::from_str(&limit_str).expect(\"Invalid value of TEST_VECTOR_LIMIT\");\n\n\n\n // fuzz the threads.\n\n for _ in 0..threads {\n\n let mut rng = SecureRng::new();\n\n thread::spawn(move || loop {\n\n ChaChaPolyVector::random(limit, &mut rng).test();\n\n XChaChaPolyVector::random(limit, &mut rng).test()\n\n });\n\n }\n\n\n\n // Show the progress of fuzzing.\n\n println!(\n\n \"Spraying Fuzz [Num Of Threads = {}, Vector Limit = {} bytes]...\",\n\n threads, limit\n\n );\n\n loop {\n\n thread::sleep(Duration::from_secs(5));\n\n println!(\"Performed {} tests...\", COUNTER.load(Relaxed));\n\n }\n\n}\n", "file_path": "crypto/fuzz/src/main.rs", "rank": 57, "score": 56834.60246397934 }, { "content": "#[test]\n\nfn test_error() {\n\n for vec in ErrorTestVector::load() {\n\n vec.test_decryption();\n\n }\n\n}\n\n\n\n// API test vector\n\n#[derive(Default, Clone, Debug)]\n\npub struct ApiTestVector {\n\n id: String,\n\n key_len: usize,\n\n nonce_len: usize,\n\n ad_len: usize,\n\n enc_input_len: usize,\n\n enc_buf_len: usize,\n\n dec_input_len: usize,\n\n dec_buf_len: usize,\n\n error: String,\n\n}\n\nimpl ApiTestVector {\n", "file_path": "crypto/tests/xchachapoly.rs", "rank": 58, "score": 56834.60246397934 }, { "content": "#[test]\n\nfn base64_fail() {\n\n let vectors = [\n\n b\"Rg\".as_ref(),\n\n b\"Rk8\".as_ref(),\n\n b\"Rk9PQk+S\".as_ref(),\n\n b\"Zm9vY/Fy\".as_ref(),\n\n ];\n\n for vec in vectors.iter() {\n\n assert!(Vec::from_base64(vec).is_err());\n\n }\n\n}\n", "file_path": "vault/tests/base64.rs", "rank": 59, "score": 56834.60246397934 }, { "content": "/// decrypts data in-place after validation\n\nfn xchachapoly_open(\n\n data: &mut [u8],\n\n tag: &[u8],\n\n ad: &[u8],\n\n key: &[u8],\n\n nonce: &[u8],\n\n) -> Result<(), Box<dyn Error + 'static>> {\n\n // build footer\n\n let mut foot = Vec::with_capacity(16);\n\n foot.extend_from_slice(&(ad.len() as u64).to_le_bytes());\n\n foot.extend_from_slice(&(data.len() as u64).to_le_bytes());\n\n\n\n // get poly1305 key and auth tag\n\n let (mut pkey, mut verify_tag) = (vec![0; 32], vec![0; 16]);\n\n XChaCha20::xor(key, nonce, 0, &mut pkey);\n\n Poly1305::chachapoly_auth(&mut verify_tag, ad, data, &foot, &pkey);\n\n\n\n // validate the tags.\n\n if !eq_const_time!(&tag, &verify_tag) {\n\n return Err(crate::Error::InvalidData.into());\n", "file_path": "crypto/src/xchachapoly.rs", "rank": 60, "score": 56834.60246397934 }, { "content": "#[test]\n\nfn test_base64() {\n\n let vectors = [\n\n [[0x14, 0xfb, 0x9c, 0x03, 0xd9, 0x7e].as_ref(), b\"FPucA9l-\".as_ref()],\n\n\t\t[b\"\".as_ref(), b\"\".as_ref()],\n\n\t\t[b\"f\".as_ref(), b\"Zg==\".as_ref()], [b\"F\".as_ref(), b\"Rg==\".as_ref()],\n\n\t\t[b\"fo\".as_ref(), b\"Zm8=\".as_ref()], [b\"FO\".as_ref(), b\"Rk8=\".as_ref()],\n\n\t\t[b\"foo\".as_ref(), b\"Zm9v\".as_ref()], [b\"FOO\".as_ref(), b\"Rk9P\".as_ref()],\n\n\t\t[b\"foob\".as_ref(), b\"Zm9vYg==\".as_ref()], [b\"FOOB\".as_ref(), b\"Rk9PQg==\".as_ref()],\n\n\t\t[b\"fooba\".as_ref(), b\"Zm9vYmE=\".as_ref()], [b\"FOOBA\".as_ref(), b\"Rk9PQkE=\".as_ref()],\n\n\t\t[b\"foobar\".as_ref(), b\"Zm9vYmFy\".as_ref()], [b\"FOOBAR\".as_ref(), b\"Rk9PQkFS\".as_ref()],\n\n\t\t[\n\n\t\t\t[0xCA, 0xDD, 0x73, 0xBD, 0x92, 0x1E, 0xB8, 0x3F, 0xF2, 0x80, 0x96, 0x63, 0x17, 0x13, 0xB6, 0xC8, 0x54, 0x22, 0xA5, 0xE5, 0x40, 0xA7, 0x32, 0x5A, 0x6E, 0x41, 0x3F, 0xD5, 0x0B, 0x23, 0xDC, 0xE3, 0x22, 0xB3, 0xB7, 0x59, 0x68, 0xD1, 0xDE, 0x44, 0x31, 0xA3, 0xDF, 0x24, 0x1B, 0x08, 0x8E, 0x17, 0x44, 0xD2, 0xEA, 0x6E, 0x21, 0x72, 0xFB, 0x00, 0x2F, 0x94, 0xC9, 0x59, 0x77, 0x98, 0x78, 0xDD, 0xCB, 0x1F, 0xB9, 0x91, 0x32, 0xD6, 0x38, 0x16, 0x7E, 0xB5, 0xC6, 0x45, 0x9E, 0x50, 0xB8, 0x41, 0x4E, 0xD1, 0x9D, 0xE8, 0x9B, 0xAB, 0x87, 0x9E, 0x43, 0x23, 0xA4, 0x0A, 0x7A, 0x57, 0xEE, 0x35, 0x21, 0xA0, 0xCC, 0xA6, 0xC4, 0xEB, 0x61, 0xC6, 0x31, 0x4B, 0x27, 0x9D, 0xBC, 0x9A, 0x1F, 0x20, 0x15, 0xC8, 0xE1, 0x78, 0xD4, 0xE7, 0x89, 0x3C, 0x17, 0x96, 0x5B, 0x11, 0xFD, 0xA4, 0x41, 0x20, 0x4D, 0x26, 0x27, 0xD5, 0xDD, 0x54, 0x3A, 0x9E, 0x12, 0x17, 0x01, 0x3F, 0xC3, 0x6C, 0x69, 0xB9, 0xDC, 0xEF, 0x89, 0x48, 0xD1, 0x05, 0x4F, 0x56, 0x32, 0x83, 0x05, 0x05, 0x0F, 0x84, 0x62, 0xED, 0x30, 0x6B, 0x5C, 0x77, 0x8B, 0x8A, 0x93, 0xD0, 0x7D, 0xF9, 0x16, 0x96, 0x37, 0x15, 0x13, 0xC2, 0x7D, 0x51, 0x19, 0x0D, 0x7F, 0x55, 0x07, 0x85, 0x7E, 0x9D, 0x09, 0xD0, 0xBF, 0x49, 0x74, 0x7E, 0xA8, 0x01, 0xE4, 0x49, 0x7C, 0x4F, 0x39, 0x9A, 0xF9, 0xF8, 0xC0, 0xCA, 0xB4, 0xB8, 0x3B, 0x91, 0x58, 0xA6, 0x79, 0x90, 0xE3, 0x92, 0xD8, 0x4B, 0x68, 0x57, 0x54, 0xC8, 0x66, 0xA7, 0xD6, 0x3F, 0x4F, 0x0F, 0x0E, 0x42, 0xD3, 0x93, 0x2E, 0x94, 0x31, 0x1E, 0x23, 0xE0, 0x7F, 0x49, 0xBD, 0x46, 0x46, 0x54, 0xE2, 0x7C, 0x8D, 0xE2, 0x54, 0x0C, 0x03, 0x78, 0x2C, 0xBA, 0x5E, 0x73, 0x35, 0x4F, 0x0A, 0x11, 0x21, 0x36, 0x74, 0x0B, 0xD8, 0x81, 0x1F, 0x56, 0x12, 0x0A, 0x80, 0xD4, 0x7D, 0x37, 0xC7, 0x69, 0xE1, 0x6D, 0x64, 0x1C, 0xD9, 0xF5, 0xA3, 0x5C, 0x35, 0x6C, 0x7A, 0xC6, 0x63, 0x3F, 0xDD, 0x8B, 0x46, 0x76, 0xC7, 0x57, 0x9D, 0xE7, 0x26, 0x92, 0xFE, 0x88, 0xB3, 0xB0, 0x77, 0xA9, 0xF5, 0x40, 0xE8, 0x2C, 0x9C, 0xFD, 0x51, 0xDF, 0x5D, 0xE0, 0xC8, 0x3F, 0x18, 0x27, 0xBB, 0xA5, 0x4E, 0xD2, 0xBD, 0xC1, 0xB5, 0xD8, 0x92, 0xE0, 0x7F, 0xB2, 0x3C, 0xE1, 0x41, 0x01, 0x71, 0xEE, 0xEC, 0x9B, 0x38, 0x28, 0x41, 0x10, 0xDA, 0x50, 0xDC, 0x4B, 0x4C, 0xAF, 0x00, 0xFF, 0x3A, 0x01, 0x75, 0xA6, 0x1C, 0xFD, 0x76, 0xA7, 0x0E, 0x85, 0xF4, 0x4B, 0x2D, 0x1B, 0x07, 0xEC, 0x9D, 0xE6, 0x4D, 0x46, 0x22, 0x52, 0xCB, 0xD5, 0xA6, 0x4F, 0x6E, 0x5F, 0xBA, 0x81, 0xA8, 0x9F, 0x64, 0x42, 0xB7, 0x09, 0xCA, 0x0F, 0x73, 0x71, 0x46, 0x4C, 0x63, 0xED, 0x60, 0xD3, 0xAA, 0x1F, 0xAC, 0xAC, 0x88, 0x30, 0xD3, 0x81, 0x3F, 0xD9, 0x9A, 0xFC, 0xA8, 0x09, 0x9B, 0x91, 0x91, 0x81, 0x53, 0xED, 0x11, 0x0D, 0xC0, 0xE4, 0x80, 0xF1, 0x8C, 0x34, 0x07, 0xC5, 0xF1, 0x7A, 0x39, 0x75, 0x68, 0xF7, 0x70, 0xD9, 0x93, 0x92, 0x4C, 0x3E, 0xF8, 0xDE, 0x91, 0x30, 0x67, 0xF0, 0xEB, 0xCF, 0x8C, 0xEC, 0xA8, 0x56, 0x98, 0xB5, 0x05, 0xE7, 0x09, 0x38, 0x77, 0xAE, 0x55, 0x46, 0x1C, 0x6B, 0x89, 0xED, 0xE8, 0x49, 0x77, 0xD5, 0x6D, 0x29, 0xB3, 0x57, 0xED, 0x12, 0x56, 0x73, 0x4E, 0x92, 0xF4, 0x64, 0x0E, 0x44, 0x48, 0x45, 0x8C, 0x2A, 0x14, 0x71, 0xBB, 0xE4, 0x8E, 0x54, 0xFC, 0xE5, 0xD6, 0xA9, 0xD2, 0xE0, 0xC3, 0x58, 0x52, 0xDD, 0xF9, 0x20, 0x80, 0x48, 0x0F, 0xE4, 0x43, 0x62, 0x9F, 0xF1].as_ref(),\n\n\t\t\tb\"yt1zvZIeuD_ygJZjFxO2yFQipeVApzJabkE_1Qsj3OMis7dZaNHeRDGj3yQbCI4XRNLqbiFy-wAvlMlZd5h43csfuZEy1jgWfrXGRZ5QuEFO0Z3om6uHnkMjpAp6V-41IaDMpsTrYcYxSyedvJofIBXI4XjU54k8F5ZbEf2kQSBNJifV3VQ6nhIXAT_DbGm53O-JSNEFT1YygwUFD4Ri7TBrXHeLipPQffkWljcVE8J9URkNf1UHhX6dCdC_SXR-qAHkSXxPOZr5-MDKtLg7kVimeZDjkthLaFdUyGan1j9PDw5C05MulDEeI-B_Sb1GRlTifI3iVAwDeCy6XnM1TwoRITZ0C9iBH1YSCoDUfTfHaeFtZBzZ9aNcNWx6xmM_3YtGdsdXnecmkv6Is7B3qfVA6Cyc_VHfXeDIPxgnu6VO0r3BtdiS4H-yPOFBAXHu7Js4KEEQ2lDcS0yvAP86AXWmHP12pw6F9EstGwfsneZNRiJSy9WmT25fuoGon2RCtwnKD3NxRkxj7WDTqh-srIgw04E_2Zr8qAmbkZGBU-0RDcDkgPGMNAfF8Xo5dWj3cNmTkkw--N6RMGfw68-M7KhWmLUF5wk4d65VRhxrie3oSXfVbSmzV-0SVnNOkvRkDkRIRYwqFHG75I5U_OXWqdLgw1hS3fkggEgP5ENin_E=\".as_ref()\n\n\t\t]\n\n ];\n\n\n\n for vec in vectors.iter() {\n\n assert_eq!(vec[0].base64().as_bytes(), vec[1]);\n\n assert_eq!(Vec::from_base64(vec[1]).unwrap(), vec[0]);\n\n }\n\n}\n\n\n", "file_path": "vault/tests/base64.rs", "rank": 61, "score": 56834.60246397934 }, { "content": "#[test]\n\nfn test_api() {\n\n for vec in ApiTestVector::load() {\n\n vec.test_mac();\n\n }\n\n}\n", "file_path": "crypto/tests/poly.rs", "rank": 62, "score": 56834.60246397934 }, { "content": "#[test]\n\nfn test_api() {\n\n for vec in ApiTestVector::load() {\n\n vec.test_encryption().test_decryption();\n\n }\n\n}\n", "file_path": "crypto/tests/xchacha.rs", "rank": 63, "score": 56834.60246397934 }, { "content": "#[test]\n\nfn test_crypto() {\n\n for vec in TestVector::load() {\n\n vec.test_keystream_encryption().test_keystream_decryption();\n\n }\n\n}\n\n\n\n// API test vector\n\n#[derive(Default, Clone, Debug)]\n\npub struct ApiTestVector {\n\n id: String,\n\n key_len: usize,\n\n nonce_len: usize,\n\n enc_input_len: usize,\n\n enc_buf_len: usize,\n\n dec_input_len: usize,\n\n dec_buf_len: usize,\n\n error: String,\n\n}\n\nimpl ApiTestVector {\n\n // load json vectors\n", "file_path": "crypto/tests/xchacha.rs", "rank": 64, "score": 56834.60246397934 }, { "content": "#[test]\n\nfn testset_full() {\n\n testset(\"full\");\n\n}\n\n\n", "file_path": "vault/tests/preload.rs", "rank": 65, "score": 56834.60246397934 }, { "content": "#[test]\n\nfn testset_partial() {\n\n testset(\"partial\")\n\n}\n", "file_path": "vault/tests/preload.rs", "rank": 66, "score": 56834.60246397934 }, { "content": "#[test]\n\nfn test_crypto() {\n\n for vec in TestVector::load() {\n\n vec.test_encryption().test_decryption();\n\n }\n\n}\n\n\n\n// API test vector\n\n#[derive(Default, Clone, Debug)]\n\npub struct ApiTestVector {\n\n id: String,\n\n key_len: usize,\n\n nonce_len: usize,\n\n enc_input_len: usize,\n\n enc_buf_len: usize,\n\n dec_input_len: usize,\n\n dec_buf_len: usize,\n\n error: String,\n\n}\n\nimpl ApiTestVector {\n\n // load json vectors\n", "file_path": "crypto/tests/chacha_ietf.rs", "rank": 67, "score": 55525.43070193347 }, { "content": "#[test]\n\nfn test_api() {\n\n for vec in ApiTestVector::load() {\n\n vec.test_encryption().test_decryption();\n\n }\n\n}\n", "file_path": "crypto/tests/chacha_ietf.rs", "rank": 68, "score": 55525.43070193347 }, { "content": "#[test]\n\nfn test_crypto() {\n\n for vec in TestVector::load() {\n\n vec.test_encryption().test_decryption();\n\n }\n\n}\n\n\n\n// MAC error vector\n", "file_path": "crypto/tests/chachapoly_ietf.rs", "rank": 69, "score": 55525.43070193347 }, { "content": "#[test]\n\nfn test_error() {\n\n for vec in ErrorVector::load() {\n\n vec.test_decryption();\n\n }\n\n}\n\n\n\n// api vector struct\n\n#[derive(Default, Clone, Debug)]\n\npub struct ApiTestVector {\n\n id: String,\n\n key_len: usize,\n\n nonce_len: usize,\n\n ad_len: usize,\n\n enc_input_len: usize,\n\n enc_buf_len: usize,\n\n dec_input_len: usize,\n\n dec_buf_len: usize,\n\n error: String,\n\n}\n\nimpl ApiTestVector {\n", "file_path": "crypto/tests/chachapoly_ietf.rs", "rank": 70, "score": 55525.43070193347 }, { "content": "#[test]\n\nfn test_api() {\n\n for vec in ApiTestVector::load() {\n\n vec.test_encryption().test_decryption();\n\n }\n\n}\n", "file_path": "crypto/tests/chachapoly_ietf.rs", "rank": 71, "score": 55525.43070193347 }, { "content": "#[test]\n\nfn test_actor_system() {\n\n let key = Key::<Provider>::random().expect(\"Couldn't create key\");\n\n let sys = ActorSystem::new().unwrap();\n\n\n\n let client = sys\n\n .actor_of_args::<Client, _>(\"client\", Id::random::<Provider>().unwrap())\n\n .unwrap();\n\n\n\n sys.actor_of::<Blob<Provider>>(\"blob\").unwrap();\n\n\n\n client.tell(CMsg::AddVault(key.clone()), None);\n\n client.tell(CMsg::CreateRecord((key.clone(), b\"Some data\".to_vec())), None);\n\n\n\n std::thread::sleep(std::time::Duration::from_millis(500));\n\n client.tell(CMsg::ListRecords(key), None);\n\n\n\n std::thread::sleep(std::time::Duration::from_millis(500));\n\n}\n", "file_path": "client/src/actor_test_client.rs", "rank": 72, "score": 53201.046432094416 }, { "content": "fn testset(set: &str) {\n\n let vault = TestVault::from_json(DATA, set);\n\n let view = vault::DBView::load(vault.key().clone(), vault.list()).unwrap();\n\n let records: Vec<_> = view.records().collect();\n\n\n\n let reader = view.reader();\n\n let existing: HashMap<_, _> = records\n\n .into_iter()\n\n .map(|(id, hint)| (reader.prepare_read(id).unwrap(), hint))\n\n .map(|(req, hint)| (vault.read(req).unwrap(), hint))\n\n .map(|(res, hint)| (hint, reader.read(res).unwrap()))\n\n .collect();\n\n\n\n let plain = PlainVault::from_json(DATA, set);\n\n\n\n assert_eq!(existing, plain.records);\n\n}\n\n\n", "file_path": "vault/tests/preload.rs", "rank": 73, "score": 51114.89436867937 }, { "content": "fn get_snapshot_path() -> PathBuf {\n\n let path = snapshot_dir().expect(\"Unable to get the snapshot path\");\n\n\n\n path.join(\"backup.snapshot\")\n\n}\n\n\n", "file_path": "client/src/snap.rs", "rank": 74, "score": 50777.35097042369 }, { "content": "#[cfg(any(target_os = \"macos\", target_os = \"ios\"))]\n\nfn macos_secrandom() -> Option<&'static str> {\n\n println!(\"cargo:rustc-link-lib=framework=Security\");\n\n Some(\"USE_SECRANDOM\")\n\n}\n\n\n\n// checks if the current version of glibc supports the getrandom function\n", "file_path": "random/build.rs", "rank": 75, "score": 48249.18645987897 }, { "content": "// handle the read command.\n\nfn read_command(matches: &ArgMatches) {\n\n if let Some(matches) = matches.subcommand_matches(\"read\") {\n\n if let Some(ref pass) = matches.value_of(\"password\") {\n\n if let Some(ref id) = matches.value_of(\"id\") {\n\n let snapshot = get_snapshot_path();\n\n let client: Client<Provider> = deserialize_from_snapshot(&snapshot, pass);\n\n\n\n let id = Vec::from_base64(id.as_bytes()).expect(\"couldn't convert the id to from base64\");\n\n let id = Id::load(&id).expect(\"Couldn't build a new Id\");\n\n\n\n client.read_record_by_id(id);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/commandline/src/main.rs", "rank": 76, "score": 47754.089552995734 }, { "content": "// create a record with a revoke transaction. Data isn't actually deleted until it is garbage collected.\n\nfn revoke_command(matches: &ArgMatches) {\n\n if let Some(matches) = matches.subcommand_matches(\"revoke\") {\n\n if let Some(ref pass) = matches.value_of(\"password\") {\n\n if let Some(ref id) = matches.value_of(\"id\") {\n\n let snapshot = get_snapshot_path();\n\n let client: Client<Provider> = deserialize_from_snapshot(&snapshot, pass);\n\n\n\n let id = Vec::from_base64(id.as_bytes()).expect(\"couldn't convert the id to from base64\");\n\n let id = Id::load(&id).expect(\"Couldn't build a new Id\");\n\n\n\n client.revoke_record_by_id(id);\n\n\n\n let snapshot = get_snapshot_path();\n\n serialize_to_snapshot(&snapshot, pass, client);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/commandline/src/main.rs", "rank": 77, "score": 47754.089552995734 }, { "content": "// handle the snapshot command.\n\nfn snapshot_command(matches: &ArgMatches) {\n\n if let Some(matches) = matches.subcommand_matches(\"snapshot\") {\n\n if let Some(ref pass) = matches.value_of(\"password\") {\n\n if let Some(ref path) = matches.value_of(\"path\") {\n\n let path = Path::new(path);\n\n\n\n let client: Client<Provider> = deserialize_from_snapshot(&path.to_path_buf(), pass);\n\n\n\n let new_path = path.parent().unwrap().join(\"recomputed.snapshot\");\n\n serialize_to_snapshot(&new_path, pass, client);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/commandline/src/main.rs", "rank": 78, "score": 47754.089552995734 }, { "content": "// Purge a record from the chain: revoke and then garbage collect.\n\nfn purge_command(matches: &ArgMatches) {\n\n if let Some(matches) = matches.subcommand_matches(\"purge\") {\n\n if let Some(ref pass) = matches.value_of(\"password\") {\n\n if let Some(ref id) = matches.value_of(\"id\") {\n\n let snapshot = get_snapshot_path();\n\n let client: Client<Provider> = deserialize_from_snapshot(&snapshot, pass);\n\n\n\n let id = Vec::from_base64(id.as_bytes()).expect(\"couldn't convert the id to from base64\");\n\n let id = Id::load(&id).expect(\"Couldn't build a new Id\");\n\n\n\n client.revoke_record_by_id(id);\n\n client.perform_gc();\n\n\n\n assert!(client.db.take(|db| db.all().find(|i| i == &id).is_none()));\n\n\n\n let snapshot = get_snapshot_path();\n\n serialize_to_snapshot(&snapshot, pass, client);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/commandline/src/main.rs", "rank": 79, "score": 47754.089552995734 }, { "content": "// handle the encryption command.\n\nfn encrypt_command(matches: &ArgMatches) {\n\n let snapshot = get_snapshot_path();\n\n\n\n if let Some(matches) = matches.subcommand_matches(\"encrypt\") {\n\n if let Some(ref pass) = matches.value_of(\"password\") {\n\n if let Some(plain) = matches.value_of(\"plain\") {\n\n let client = if snapshot.exists() {\n\n deserialize_from_snapshot(&get_snapshot_path(), pass)\n\n } else {\n\n let key = Key::<Provider>::random().expect(\"Unable to generate a new key\");\n\n let id = Id::random::<Provider>().expect(\"Unable to generate a new id\");\n\n Client::create_chain(key, id)\n\n };\n\n let id = client.create_record(plain.as_bytes().to_vec());\n\n serialize_to_snapshot(&get_snapshot_path(), pass, client);\n\n println!(\"{:?}\", id);\n\n };\n\n };\n\n }\n\n}\n\n\n", "file_path": "examples/commandline/src/main.rs", "rank": 80, "score": 47754.089552995734 }, { "content": "// handle the list command.\n\nfn list_command(matches: &ArgMatches) {\n\n if let Some(matches) = matches.subcommand_matches(\"list\") {\n\n if let Some(ref pass) = matches.value_of(\"password\") {\n\n let snapshot = get_snapshot_path();\n\n let client: Client<Provider> = deserialize_from_snapshot(&snapshot, pass);\n\n\n\n if matches.is_present(\"all\") {\n\n client.list_all_ids();\n\n } else {\n\n client.list_ids();\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/commandline/src/main.rs", "rank": 81, "score": 47754.089552995734 }, { "content": "#[cfg(target_os = \"linux\")]\n\nfn linux_check_getrandom() -> Option<&'static str> {\n\n use std::{ffi::CStr, os::raw::c_char, str::FromStr};\n\n extern \"C\" {\n\n fn gnu_get_libc_version() -> *const c_char;\n\n }\n\n\n\n let v: Vec<u8> = unsafe { CStr::from_ptr(gnu_get_libc_version()) }\n\n .to_str()\n\n .unwrap()\n\n .split('.')\n\n .map(|s| u8::from_str(s).unwrap())\n\n .collect();\n\n\n\n match (v[0], v[1]) {\n\n (2..=255, 25..=255) => Some(\"USE_GETRANDOM\"),\n\n _ => Some(\"USE_DEV_RANDOM\"),\n\n }\n\n}\n\n\n", "file_path": "random/build.rs", "rank": 82, "score": 47132.17801903001 }, { "content": "// Take ownership of an existing chain. Requires that the new chain owner knows the old key to unlock the data.\n\nfn take_ownership_command(matches: &ArgMatches) {\n\n if let Some(matches) = matches.subcommand_matches(\"take_ownership\") {\n\n if let Some(ref pass) = matches.value_of(\"password\") {\n\n let new_id = Id::random::<Provider>().expect(\"Unable to generate a new id\");\n\n\n\n let snapshot = get_snapshot_path();\n\n let client: Client<Provider> = deserialize_from_snapshot(&snapshot, pass);\n\n let new_client: Client<Provider> = Client::create_chain(client.db.key, new_id);\n\n\n\n new_client.take_ownership(client.id);\n\n\n\n println!(\"Old owner id: {:?}\\nNew owner id: {:?}\", client.id, new_client.id);\n\n\n\n let snapshot = get_snapshot_path();\n\n serialize_to_snapshot(&snapshot, pass, new_client);\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/commandline/src/main.rs", "rank": 83, "score": 46789.839384547406 }, { "content": "/// Does ChaCha20 Rounds over the state\n\nfn chacha20_rounds(state: &mut [u32]) {\n\n for _ in 0..10 {\n\n // macro for a quater round\n\n macro_rules! quarter_round {\n\n ($a:expr, $b:expr, $c:expr, $d:expr) => {{\n\n state[$a] = add!(state[$a], state[$b]);\n\n state[$d] = xor!(state[$d], state[$a]);\n\n state[$d] = or!(shift_left!(state[$d], 16), shift_right!(state[$d], 16));\n\n state[$c] = add!(state[$c], state[$d]);\n\n state[$b] = xor!(state[$b], state[$c]);\n\n state[$b] = or!(shift_left!(state[$b], 12), shift_right!(state[$b], 20));\n\n state[$a] = add!(state[$a], state[$b]);\n\n state[$d] = xor!(state[$d], state[$a]);\n\n state[$d] = or!(shift_left!(state[$d], 8), shift_right!(state[$d], 24));\n\n state[$c] = add!(state[$c], state[$d]);\n\n state[$b] = xor!(state[$b], state[$c]);\n\n state[$b] = or!(shift_left!(state[$b], 7), shift_right!(state[$b], 25));\n\n }};\n\n }\n\n\n", "file_path": "crypto/src/internal/chacha.rs", "rank": 84, "score": 46095.757473185426 }, { "content": "// garbage collect the chain. Remove any revoked data from the chain.\n\nfn garbage_collect_vault_command(matches: &ArgMatches) {\n\n if let Some(matches) = matches.subcommand_matches(\"garbage_collect\") {\n\n if let Some(ref pass) = matches.value_of(\"password\") {\n\n let snapshot = get_snapshot_path();\n\n let client: Client<Provider> = deserialize_from_snapshot(&snapshot, pass);\n\n\n\n client.perform_gc();\n\n client.list_ids();\n\n\n\n let snapshot = get_snapshot_path();\n\n serialize_to_snapshot(&snapshot, pass, client);\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/commandline/src/main.rs", "rank": 85, "score": 45890.474928426745 }, { "content": "/// verify that the folder exists or create it.\n\nfn verify_or_create(dir: &Path) -> crate::Result<()> {\n\n if dir.is_dir() {\n\n return Ok(());\n\n }\n\n Ok(fs::create_dir_all(dir)?)\n\n}\n", "file_path": "snapshot/src/files.rs", "rank": 86, "score": 44690.680874959515 }, { "content": "/// generate the salt for the encryption algorithm.\n\nfn generate_salt() -> crate::Result<pwhash::Salt> {\n\n // generate salt\n\n let salt = pwhash::gen_salt();\n\n // hash salt with sha256\n\n let hash = hash::sha256::hash(&salt.0);\n\n // repack salt\n\n let salt = pwhash::Salt::from_slice(hash.as_ref()).expect(\"Unable to rewrap salt\");\n\n\n\n Ok(salt)\n\n}\n\n\n", "file_path": "snapshot/src/logic.rs", "rank": 87, "score": 44690.680874959515 }, { "content": "/// check the version on the snapshot.\n\nfn check_version(version: &[u8]) -> crate::Result<()> {\n\n // probably shouldn't do this, but if version is incorrect reject snapshot with error\n\n if version != VERSION {\n\n Err(crate::Error::SnapshotError(\"Snapshot version is incorrect\".into()))\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n use sodiumoxide::crypto::secretstream::Tag;\n\n use std::fs::OpenOptions;\n\n\n\n #[test]\n\n fn test_key_derivation() {\n\n let salt = generate_salt().unwrap();\n\n let key_one = derive_key_from_password(b\"some long password\", &salt).unwrap();\n\n let key_two = derive_key_from_password(b\"some long password\", &salt).unwrap();\n", "file_path": "snapshot/src/logic.rs", "rank": 88, "score": 44690.680874959515 }, { "content": "// send a message\n\nfn send(req: TransactionRequest) -> Option<TransactionResult> {\n\n // should request fail or not\n\n if CRng::bool(Env::error_rate()) {\n\n None?\n\n }\n\n\n\n let res = match req {\n\n TransactionRequest::List => {\n\n let records = Env::storage()\n\n .read()\n\n .expect(line_error!())\n\n .keys()\n\n .cloned()\n\n .collect();\n\n\n\n TransactionResult::List(ListResult::new(records))\n\n }\n\n TransactionRequest::Write(write) => {\n\n Env::storage()\n\n .write()\n", "file_path": "vault/fuzz/src/connection.rs", "rank": 89, "score": 42827.06625039052 }, { "content": "/// read the buffer.\n\nfn read_buffer(input: &mut &[u8]) -> Vec<u8> {\n\n let (len, rest) = input.split_at(std::mem::size_of::<usize>());\n\n let len = usize::from_le_bytes(len.try_into().unwrap());\n\n let (v, rest) = rest.split_at(len);\n\n *input = rest;\n\n v.to_vec()\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_serialize_deserialize() {\n\n let mut map = HashMap::new();\n\n map.insert(vec![32, 1, 53], vec![39, 43, 5]);\n\n map.insert(vec![52, 13, 53, 53], vec![31, 1]);\n\n map.insert(vec![142], vec![1, 0, 125, 82, 13, 54, 69]);\n\n\n\n let buf = serialize_map(&map);\n\n let recovered = deserialize_buffer(&buf);\n\n\n\n println!(\"{:?}, {:?}\", buf, recovered);\n\n\n\n assert_eq!(map, recovered);\n\n }\n\n}\n", "file_path": "snapshot/src/serialize.rs", "rank": 90, "score": 42520.710612806775 }, { "content": "/// check to see if the file is long enough.\n\nfn check_file_len(input: &mut File) -> crate::Result<()> {\n\n if input.metadata()?.len() <= (pwhash::SALTBYTES + secretstream::HEADERBYTES + SIGN.len()) as u64 {\n\n return Err(crate::Error::SnapshotError(\"Snapshot is not valid or encrypted\".into()));\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "snapshot/src/logic.rs", "rank": 91, "score": 41621.346156686115 }, { "content": "/// derive key from salt and password.\n\nfn derive_key_from_password(password: &[u8], salt: &pwhash::Salt) -> crate::Result<Key> {\n\n // empty key\n\n let mut key = [0; secretstream::KEYBYTES];\n\n\n\n // derive key from password and salt.\n\n match pwhash::derive_key(\n\n &mut key,\n\n password,\n\n &salt,\n\n pwhash::OPSLIMIT_INTERACTIVE,\n\n pwhash::MEMLIMIT_INTERACTIVE,\n\n ) {\n\n Ok(_) => Ok(Key(key)),\n\n Err(_) => Err(crate::Error::SnapshotError(\"Could not derive key from password\".into())),\n\n }\n\n}\n\n\n", "file_path": "snapshot/src/logic.rs", "rank": 92, "score": 36482.2291042345 }, { "content": "/// create an encryption push stream and a header.\n\nfn create_stream(&Key(ref key): &Key) -> crate::Result<(Stream<Push>, Header)> {\n\n let stream_key = secretstream::Key(key.to_owned());\n\n\n\n Stream::init_push(&stream_key).map_err(|_| crate::Error::SnapshotError(\"Unable to create stream\".into()))\n\n}\n\n\n", "file_path": "snapshot/src/logic.rs", "rank": 93, "score": 35762.77067258219 }, { "content": "/// check the version if the book is set and check the signature then extract the salt.\n\nfn get_salt(input: &mut File, chk_version: bool) -> crate::Result<pwhash::Salt> {\n\n // setup the buffers\n\n let mut sign = [0u8; 5];\n\n let mut version = [0u8; 2];\n\n let mut salt = [0u8; pwhash::SALTBYTES];\n\n\n\n // get signature and version\n\n input.read_exact(&mut sign)?;\n\n input.read_exact(&mut version)?;\n\n\n\n // if bool is set, check the version\n\n if chk_version {\n\n check_version(&version)?;\n\n }\n\n\n\n // if sign is the same expected read in all of the salt.\n\n if sign == SIGN {\n\n input.read_exact(&mut salt)?;\n\n } else {\n\n // otherwise take the bytes from the sign and read the rest as the salt.\n\n salt[..7].copy_from_slice(&sign);\n\n input.read_exact(&mut salt[7..])?;\n\n }\n\n\n\n // create a new salt.\n\n let salt = pwhash::Salt(salt);\n\n\n\n Ok(salt)\n\n}\n\n\n", "file_path": "snapshot/src/logic.rs", "rank": 94, "score": 35066.9882511853 }, { "content": "/// create a decryption pull stream.\n\nfn pull_stream(header: &[u8], &Key(ref key): &Key) -> crate::Result<Stream<Pull>> {\n\n let stream_key = secretstream::Key(key.to_owned());\n\n let header = Header::from_slice(header).expect(\"Invalid Header size\");\n\n\n\n Stream::init_pull(&header, &stream_key).map_err(|_| crate::Error::SnapshotError(\"Unable to open stream\".into()))\n\n}\n\n\n", "file_path": "snapshot/src/logic.rs", "rank": 95, "score": 34422.88108820015 }, { "content": "/// encrypts data in-place and authenticates it\n\nfn xchachapoly_seal(data: &mut [u8], tag: &mut [u8], ad: &[u8], key: &[u8], nonce: &[u8]) {\n\n // xor and encrypt the data.\n\n XChaCha20::xor(key, nonce, 1, data);\n\n\n\n // build a footer\n\n let mut foot = Vec::with_capacity(16);\n\n foot.extend_from_slice(&(ad.len() as u64).to_le_bytes());\n\n foot.extend_from_slice(&(data.len() as u64).to_le_bytes());\n\n\n\n // compute Poly1305 key and auth tag\n\n let mut pkey = vec![0; 32];\n\n XChaCha20::xor(key, nonce, 0, &mut pkey);\n\n Poly1305::chachapoly_auth(tag, ad, data, &foot, &pkey);\n\n}\n\n\n", "file_path": "crypto/src/xchachapoly.rs", "rank": 96, "score": 32043.16189928032 }, { "content": "// logic to decrypt the file.\n\nfn decrypt_file(input: &mut File, output: &mut Vec<u8>, password: &[u8], salt: pwhash::Salt) -> crate::Result<()> {\n\n // setup header buffer and extract it from the file\n\n let mut header = [0u8; secretstream::HEADERBYTES];\n\n input.read_exact(&mut header)?;\n\n\n\n // generate a key from the salt and password.\n\n let key = derive_key_from_password(&password, &salt)?;\n\n\n\n // create buffer.\n\n let mut buf = [0u8; CHUNK_SIZE + secretstream::ABYTES];\n\n // get the pull stream.\n\n let mut stream = pull_stream(&header, &key)?;\n\n\n\n // iterate through the stream until its finalized.\n\n while stream.is_not_finalized() {\n\n // read the input into the buffer\n\n match input.read(&mut buf) {\n\n Ok(bytes_read) if bytes_read > 0 => {\n\n // pull each chunk from the stream and decrypt.\n\n let (decrypt, _tag) = stream.pull(&buf[..bytes_read], None).map_err(|_| {\n", "file_path": "snapshot/src/logic.rs", "rank": 97, "score": 29989.989990514045 }, { "content": "#[derive(Deserialize, Serialize, Debug)]\n\npub struct Provider;\n\n// add Nonce and Tag len to the provider.\n\nimpl Provider {\n\n const NONCE_LEN: usize = 24;\n\n const TAG_LEN: usize = 16;\n\n}\n\n\n\n// implement the BoxProvider trait.\n\nimpl BoxProvider for Provider {\n\n // setup the box key length.\n\n fn box_key_len() -> usize {\n\n 32\n\n }\n\n\n\n // setup the box overhead length.\n\n fn box_overhead() -> usize {\n\n Self::NONCE_LEN + Self::TAG_LEN\n\n }\n\n\n", "file_path": "examples/commandline/src/provider.rs", "rank": 98, "score": 17.498246471981663 }, { "content": " }\n\n\n\n /// get the key's bytes\n\n pub fn bytes(&self) -> &[u8] {\n\n &self.key\n\n }\n\n}\n\n\n\nimpl<T: BoxProvider> Clone for Key<T> {\n\n fn clone(&self) -> Self {\n\n Self {\n\n key: self.key.clone(),\n\n drop_fn: self.drop_fn,\n\n _box_provider: PhantomData,\n\n }\n\n }\n\n}\n\n\n\nimpl<T: BoxProvider> PartialEq for Key<T> {\n\n fn eq(&self, other: &Self) -> bool {\n", "file_path": "vault/src/crypto_box.rs", "rank": 99, "score": 15.253414537722204 } ]
Rust
src/vm/mod.rs
bpandreotti/monkey-rust
970b808530495d94d1bb82a3787c198bcef3b7c7
#[cfg(test)] mod tests; use crate::builtins::{self, BuiltinFn}; use crate::compiler::code::*; use crate::error::{MonkeyError, MonkeyResult, RuntimeError::*}; use crate::lexer::token::Token; use crate::object::*; use std::collections::HashMap; const STACK_SIZE: usize = 2048; pub const GLOBALS_SIZE: usize = 65536; struct Frame { instructions: Instructions, free_vars: Vec<Object>, pc: usize, base_pointer: usize, } struct FrameStack(Vec<Frame>); impl FrameStack { fn top(&self) -> &Frame { self.0.last().expect("No frames in frame stack") } fn top_mut(&mut self) -> &mut Frame { self.0.last_mut().expect("No frames in frame stack") } fn push(&mut self, frame: Frame) { self.0.push(frame); } fn pop(&mut self) { self.0.pop(); } fn read_u16_from_top(&mut self) -> u16 { let value = read_u16(&self.top().instructions.0[self.top().pc + 1..]); self.top_mut().pc += 2; value } fn read_u8_from_top(&mut self) -> u8 { let value = self.top().instructions.0[self.top().pc + 1]; self.top_mut().pc += 1; value } } pub struct VM { stack: Vec<Object>, sp: usize, pub globals: Box<[Object]>, } impl VM { pub fn new() -> VM { let mut globals = Vec::with_capacity(GLOBALS_SIZE); globals.resize(GLOBALS_SIZE, Object::Nil); let globals = globals.into_boxed_slice(); VM { stack: Vec::with_capacity(STACK_SIZE), sp: 0, globals, } } pub fn run(&mut self, bytecode: Bytecode) -> MonkeyResult<()> { let mut frame_stack = FrameStack({ let root_frame = Frame { instructions: bytecode.instructions, free_vars: Vec::new(), pc: 0, base_pointer: 0, }; vec![root_frame] }); let constants = bytecode.constants; loop { if frame_stack.top().pc >= frame_stack.top().instructions.0.len() { if frame_stack.0.len() == 1 { break; } else { panic!("Reached end of instructions in non-root frame") } } use OpCode::*; let op = OpCode::from_byte(frame_stack.top().instructions.0[frame_stack.top().pc]); match op { OpConstant => { let constant_index = frame_stack.read_u16_from_top() as usize; self.push(constants[constant_index].clone())?; } OpPop => { self.pop()?; } OpAdd | OpSub | OpMul | OpDiv | OpExponent | OpModulo | OpEquals | OpNotEquals | OpGreaterThan | OpGreaterEq => self.execute_binary_operation(op)?, OpTrue => self.push(Object::Boolean(true))?, OpFalse => self.push(Object::Boolean(false))?, OpPrefixMinus | OpPrefixNot => self.execute_prefix_operation(op)?, OpJumpNotTruthy => { let pos = frame_stack.read_u16_from_top() as usize; if !Object::is_truthy(&self.pop()?) { frame_stack.top_mut().pc = pos - 1; } } OpJump => { let pos = frame_stack.read_u16_from_top() as usize; frame_stack.top_mut().pc = pos - 1; } OpNil => self.push(Object::Nil)?, OpSetGlobal => { let index = frame_stack.read_u16_from_top() as usize; self.globals[index] = self.pop()?.clone(); } OpGetGlobal => { let index = frame_stack.read_u16_from_top() as usize; self.push(self.globals[index].clone())?; } OpSetLocal => { let index = frame_stack.read_u8_from_top() as usize; self.stack[frame_stack.top().base_pointer + index] = self.pop()?; } OpGetLocal => { let index = frame_stack.read_u8_from_top() as usize; self.push(self.stack[frame_stack.top().base_pointer + index].clone())? } OpArray => { let num_elements = frame_stack.read_u16_from_top() as usize; let arr = self.take(num_elements); self.push(Object::Array(Box::new(arr)))?; } OpHash => { let num_elements = frame_stack.read_u16_from_top() as usize; let entries = self.take(2 * num_elements); let mut map = HashMap::new(); for i in 0..num_elements { let key = &entries[i * 2]; let value = &entries[i * 2 + 1]; let hashable = HashableObject::from_object(key.clone()) .ok_or_else(|| MonkeyError::Vm(HashKeyTypeError(key.type_str())))?; map.insert(hashable, value.clone()); } self.push(Object::Hash(Box::new(map)))?; } OpIndex => { let index = self.pop()?; let obj = self.pop()?; self.execute_index_operation(obj, index)?; } OpCall => { let num_args = frame_stack.read_u8_from_top() as usize; let func = self.stack.remove(self.sp - 1 - num_args); self.sp -= 1; match func { Object::Closure(c) => { self.execute_closure_call(&mut frame_stack, *c, num_args)?; continue; } Object::Builtin(f) => self.execute_builtin_call(f, num_args)?, _ => return Err(MonkeyError::Vm(NotCallable(func.type_str()))), } } OpReturn => { let returned_value = self.pop()?; self.sp = frame_stack.top().base_pointer; self.stack.truncate(self.sp); frame_stack.pop(); self.push(returned_value)?; continue; } OpGetBuiltin => { let index = frame_stack.read_u8_from_top() as usize; let builtin = builtins::ALL_BUILTINS[index].1.clone(); self.push(Object::Builtin(builtin))?; } OpClosure => { let constant_index = frame_stack.read_u16_from_top() as usize; let num_free_vars = frame_stack.read_u8_from_top() as usize; let func = constants[constant_index].clone(); let free_vars = self.take(num_free_vars); if let Object::CompiledFunc(func) = func { let closure = Closure { func: *func, free_vars, }; self.push(Object::Closure(Box::new(closure)))?; } else { panic!("Trying to build closure with non-function object"); } }, OpGetFree => { let index = frame_stack.read_u8_from_top() as usize; self.push(frame_stack.top().free_vars[index].clone())?; }, } frame_stack.top_mut().pc += 1; } Ok(()) } fn push(&mut self, obj: Object) -> MonkeyResult<()> { if self.sp >= STACK_SIZE { Err(MonkeyError::Vm(StackOverflow)) } else { self.stack.push(obj); self.sp += 1; Ok(()) } } pub fn pop(&mut self) -> MonkeyResult<Object> { if self.sp == 0 { Err(MonkeyError::Vm(StackUnderflow)) } else { self.sp -= 1; Ok(self.stack.pop().unwrap()) } } fn take(&mut self, num_items: usize) -> Vec<Object> { let v = self.stack.split_off(self.sp - num_items); self.sp -= num_items; v } fn execute_binary_operation(&mut self, operation: OpCode) -> MonkeyResult<()> { use Object::*; let right = self.pop()?; let left = self.pop()?; match (left, operation, right) { (Integer(l), op, Integer(r)) => self.execute_integer_operation(op, l, r), (Boolean(l), op, Boolean(r)) => self.execute_bool_operation(op, l, r), (Str(l), OpCode::OpAdd, Str(r)) => self.execute_str_concat(&l, &r), (l, op, r) => Err(MonkeyError::Vm(InfixTypeError( l.type_str(), op.equivalent_token().unwrap(), r.type_str(), ))), } } fn execute_integer_operation(&mut self, op: OpCode, left: i64, right: i64) -> MonkeyResult<()> { let result = match op { OpCode::OpAdd => Object::Integer(left + right), OpCode::OpSub => Object::Integer(left - right), OpCode::OpMul => Object::Integer(left * right), OpCode::OpDiv if right == 0 => return Err(MonkeyError::Vm(DivOrModByZero)), OpCode::OpDiv => Object::Integer(left / right), OpCode::OpExponent if right < 0 => return Err(MonkeyError::Vm(NegativeExponent)), OpCode::OpExponent => Object::Integer(left.pow(right as u32)), OpCode::OpModulo if right == 0 => return Err(MonkeyError::Vm(DivOrModByZero)), OpCode::OpModulo => Object::Integer(left % right), OpCode::OpEquals => Object::Boolean(left == right), OpCode::OpNotEquals => Object::Boolean(left != right), OpCode::OpGreaterThan => Object::Boolean(left > right), OpCode::OpGreaterEq => Object::Boolean(left >= right), _ => unreachable!(), }; self.push(result)?; Ok(()) } fn execute_bool_operation(&mut self, op: OpCode, left: bool, right: bool) -> MonkeyResult<()> { let result = match op { OpCode::OpEquals => Object::Boolean(left == right), OpCode::OpNotEquals => Object::Boolean(left != right), _ => { return Err(MonkeyError::Vm(InfixTypeError( "bool", op.equivalent_token().unwrap(), "bool", ))) } }; self.push(result)?; Ok(()) } fn execute_str_concat(&mut self, left: &str, right: &str) -> MonkeyResult<()> { self.push(Object::Str(Box::new(left.to_string() + right))) } fn execute_prefix_operation(&mut self, op: OpCode) -> MonkeyResult<()> { let right = self.pop()?; match op { OpCode::OpPrefixMinus => { if let Object::Integer(i) = right { self.push(Object::Integer(-i))?; } else { return Err(MonkeyError::Vm(PrefixTypeError( Token::Minus, right.type_str(), ))); } } OpCode::OpPrefixNot => { let value = !right.is_truthy(); self.push(Object::Boolean(value))?; } _ => unreachable!(), } Ok(()) } fn execute_index_operation(&mut self, obj: Object, index: Object) -> MonkeyResult<()> { let result = match (obj, index) { (Object::Array(vector), Object::Integer(i)) => { if i < 0 || i >= vector.len() as i64 { Err(IndexOutOfBounds(i)) } else { Ok(vector.into_iter().nth(i as usize).unwrap()) } } (Object::Array(_), other) => Err(IndexTypeError(other.type_str())), (Object::Hash(map), key) => { let key_type = key.type_str(); let key = HashableObject::from_object(key.clone()) .ok_or(MonkeyError::Vm(HashKeyTypeError(key_type)))?; let value = map.get(&key).ok_or(MonkeyError::Vm(KeyError(key)))?; Ok(value.clone()) } (Object::Str(s), Object::Integer(i)) => { let chars = s.chars().collect::<Vec<_>>(); if i < 0 || i >= chars.len() as i64 { Err(IndexOutOfBounds(i)) } else { Ok(Object::Str(Box::new(chars[i as usize].to_string()))) } } (Object::Str(_), other) => Err(IndexTypeError(other.type_str())), (other, _) => Err(IndexingWrongType(other.type_str())), }; let result = result.map_err(MonkeyError::Vm)?; self.push(result) } fn execute_closure_call( &mut self, frame_stack: &mut FrameStack, closure: Closure, num_args: usize, ) -> MonkeyResult<()> { if closure.func.num_params as usize != num_args { return Err(MonkeyError::Vm(WrongNumberOfArgs( closure.func.num_params as usize, num_args, ))); } frame_stack.top_mut().pc += 1; let new_frame = Frame { instructions: closure.func.instructions, free_vars: closure.free_vars, pc: 0, base_pointer: self.sp - num_args, }; frame_stack.push(new_frame); self.sp += closure.func.num_locals as usize; self.stack.resize(self.sp, Object::Nil); Ok(()) } fn execute_builtin_call(&mut self, func: BuiltinFn, num_args: usize) -> MonkeyResult<()> { let args = self.take(num_args); let result = func.0(args).map_err(MonkeyError::Vm)?; self.push(result) } }
#[cfg(test)] mod tests; use crate::builtins::{self, BuiltinFn}; use crate::compiler::code::*; use crate::error::{MonkeyError, MonkeyResult, RuntimeError::*}; use crate::lexer::token::Token; use crate::object::*; use std::collections::HashMap; const STACK_SIZE: usize = 2048; pub const GLOBALS_SIZE: usize = 65536; struct Frame { instructions: Instructions, free_vars: Vec<Object>, pc: usize, base_pointer: usize, } struct FrameStack(Vec<Frame>); impl FrameStack { fn top(&self) -> &Frame { self.0.last().expect("No frames in frame stack") } fn top_mut(&mut self) -> &mut Frame { self.0.last_mut().expect("No frames in frame stack") } fn push(&mut self, frame: Frame) { self.0.push(frame); } fn pop(&mut self) { self.0.pop(); } fn read_u16_from_top(&mut self) -> u16 { let value = read_u16(&self.top().instructions.0[self.top().pc + 1..]); self.top_mut().pc += 2; value } fn read_u8_from_top(&mut self) -> u8 { let value = self.top().instructions.0[self.top().pc + 1]; self.top_mut().pc += 1; value } } pub struct VM { stack: Vec<Object>, sp: usize, pub globals: Box<[Object]>, } impl VM { pub fn new() -> VM { let mut globals = Vec::with_capacity(GLOBALS_SIZE); globals.resize(GLOBALS_SIZE, Object::Nil); let globals = globals.into_boxed_slice(); VM { stack: Vec::with_capacity(STACK_SIZE), sp: 0, globals, } } pub fn run(&mut self, bytecode: Bytecode) -> MonkeyResult<()> { let mut frame_stack = FrameStack({ let root_frame = Frame { instructions: bytecode.instructions, free_vars: Vec::new(), pc: 0, base_pointer: 0, }; vec![root_frame] }); let constants = bytecode.constants; loop { if frame_stack.top().pc >= frame_stack.top().instructions.0.len() { if frame_stack.0.len() == 1 { break; } else { panic!("Reached end of instructions in non-root frame") } } use OpCode::*; let op = OpCode::from_byte(frame_stack.top().instructions.0[frame_stack.top().pc]); match op { OpConstant => { let constant_index = frame_stack.read_u16_from_top() as usize; self.push(constants[constant_index].clone())?; } OpPop => { self.pop()?; } OpAdd | OpSub | OpMul | OpDiv | OpExponent | OpModulo | OpEquals | OpNotEquals | OpGreaterThan | OpGreaterEq => self.execute_binary_operation(op)?, OpTrue => self.push(Object::Boolean(true))?, OpFalse => self.push(Object::Boolean(false))?, OpPrefixMinus | OpPrefixNot => self.execute_prefix_operation(op)?, OpJumpNotTruthy => { let pos = frame_stack.read_u16_from_top() as usize; if !Object::is_truthy(&self.pop()?) { frame_stack.top_mut().pc = pos - 1; } } OpJump => { let pos = frame_stack.read_u16_from_top() as usize; frame_stack.top_mut().pc = pos - 1; } OpNil => self.push(Object::Nil)?, OpSetGlobal => { let index = frame_stack.read_u16_from_top() as usize; self.globals[index] = self.pop()?.clone(); } OpGetGlobal => { let index = frame_stack.read_u16_from_top() as usize; self.push(self.globals[index].clone())?; } OpSetLocal => { let index = frame_stack.read_u8_from_top() as usize; self.stack[frame_stack.top().base_pointer + index] = self.pop()?; } OpGetLocal => { let index = frame_stack.read_u8_from_top() as usize; self.push(self.stack[frame_stack.top().base_pointer + index].clone())? } OpArray => { let num_elements = frame_stack.read_u16_from_top() as usize; let arr = self.take(num_elements); self.push(Object::Array(Box::new(arr)))?; } OpHash => { let num_elements = frame_stack.read_u16_from_top() as usize; let entries = self.take(2 * num_elements); let mut map = HashMap::new(); for i in 0..num_elements { let key = &entries[i * 2]; let value = &entries[i * 2 + 1]; let hashable = HashableObject::from_object(key.clone()) .ok_or_else(|| MonkeyError::Vm(HashKeyTypeError(key.type_str())))?; map.insert(hashable, value.clone()); } self.push(Object::Hash(Box::new(map)))?; } OpIndex => { let index = self.pop()?; let obj = self.pop()?; self.execute_index_operation(obj, index)?; } OpCall => { let num_args = frame_stack.read_u8_from_top() as usize; let func = self.stack.remove(self.sp - 1 - num_args); self.sp -= 1; match func { Object::Closure(c) => { self.execute_closure_call(&mut frame_stack, *c, num_args)?; continue; } Object::Builtin(f) => self.execute_builtin_call(f, num_args)?, _ => return Err(MonkeyError::Vm(NotCallable(func.type_str()))), } } OpReturn => { let returned_value = self.pop()?; self.sp = frame_stack.top().base_pointer; self.stack.truncate(self.sp); frame_stack.pop(); self.push(returned_value)?; continue; } OpGetBuiltin => { let index = frame_stack.read_u8_from_top() as usize; let builtin = builtins::ALL_BUILTINS[index].1.clone(); self.push(Object::Builtin(builtin))?; } OpClosure => { let constant_index = frame_stack.read_u16_from_top() as usize; let num_free_vars = frame_stack.read_u8_from_top() as usize; let func = constants[constant_index].clone(); let free_vars = self.take(num_free_vars); if let Object::CompiledFunc(func) = func { let closure = Closure { func: *func, free_vars, }; self.push(Object::Closure(Box::new(closure)))?; } else { panic!("Trying to build closure with non-function object"); } }, OpGetFree => { let index = frame_stack.read_u8_from_top() as usize; self.push(frame_stack.top().free_vars[index].clone())?; }, } frame_stack.top_mut().pc += 1; } Ok(()) } fn push(&mut self, obj: Object) -> MonkeyResult<()> { if self.sp >= STACK_SIZE { Err(MonkeyError::Vm(StackOverflow)) } else { self.stack.push(obj); self.sp += 1; Ok(()) } } pub fn pop(&mut self) -> MonkeyResult<Object> { if self.sp == 0 { Err(MonkeyError::Vm(StackUnderflow)) } else { self.sp -= 1; Ok(self.stack.pop().unwrap()) } } fn take(&mut self, num_items: usize) -> Vec<Object> { let v = self.stack.split_off(self.sp - num_items); self.sp -= num_items; v } fn execute_binary_operation(&mut self, operation: OpCode) -> MonkeyResult<()> { use Object::*; let right = self.pop()?; let left = self.pop()?; match (left, operation, right) { (Integer(l), op, Integer(r)) => self.execute_integer_operation(op, l, r), (Boolean(l), op, Boolean(r)) => self.execute_bool_operation(op, l, r), (Str(l), OpCode::OpAdd, Str(r)) => self.execute_str_concat(&l, &r), (l, op, r) => Err(MonkeyError::Vm(InfixTypeError( l.type_str(), op.equivalent_token().unwrap(), r.type_str(), ))), } } fn execute_integer_operation(&mut self, op: OpCode, left: i64, right: i64) -> MonkeyResult<()> { let result = match op { OpCode::OpAdd => Object::Integer(left + right), OpCode::OpSub => Object::Integer(left - right), OpCode::OpMul => Object::Integer(left * right), OpCode::OpDiv if right == 0 => return Err(MonkeyError::Vm(DivOrModByZero)), OpCode::OpDiv => Object::Integer(left / right), OpCode::OpExponent if right < 0 => return Err(MonkeyError::Vm(NegativeExponent)), OpCode::OpExponent => Object::Integer(left.pow(right as u32)), OpCode::OpModulo if right == 0 => return Err(MonkeyError::Vm(DivOrModByZero)), OpCode::OpModulo => Object::Integer(left % right), OpCode::OpEquals => Object::Boolean(left == right), OpCode::OpNotEquals => Object::Boolean(left != right), OpCode::OpGreaterThan => Object::Boolean(left > right), OpCode::OpGreaterEq => Object::Boolean(left >= right), _ => unreachable!(), }; self.push(result)?; Ok(()) } fn execute_bool_operation(&mut self, op: OpCode, left: bool, right: bool) -> MonkeyResult<()> { let result = match op { OpCode::OpEquals => Object::Boolean(left == right), OpCode::OpNotEquals => Object::Boolean(left != right), _ => { return Err(MonkeyError::Vm(InfixTypeError( "bool", op.equivalent_token().unwrap(), "bool", ))) } }; self.push(result)?; Ok(()) } fn execute_str_concat(&mut self, left: &str, right: &str) -> MonkeyResult<()> { self.push(Object::Str(Box::new(left.to_string() + right))) } fn execute_prefix_operation(&mut self, op: OpCode) -> MonkeyResult<()> { let right = self.pop()?; match op { OpCode::OpPrefixMinus => { if let Object::Integer(i) = right { self.push(Object::Integer(-i))?; } else { return Err(MonkeyError::Vm(PrefixTypeError( Token::Minus, right.type_str(), ))); } } OpCode::OpPrefixNot => { let value = !right.is_truthy(); self.push(Object::Boolean(value))?; } _ => unreachable!(), } Ok(()) } fn execute_index_operation(&mut self, obj: Object, index: Object) -> MonkeyResult<()> { let result = match (obj, index) { (Object::Array(vector), Object::Integer(i)) => { if i < 0 || i >= vector.len() as i64 { Err(IndexOutOfBounds(i)) } else { Ok(vector.into_iter().nth(i as usize).unwrap()) } } (Object::Array(_), other) => Err(IndexTypeError(other.type_str())), (Object::Hash(map), key) => { let key_type = key.type_str(); let key = HashableObject::from_object(key.clone()) .ok_or(MonkeyError::Vm(HashKeyTypeError(key_type)))?; let value = map.get(&key).ok_or(MonkeyError::Vm(KeyError(key)))?; Ok(value.clone()) } (Object::Str(s), Object::Integer(i)) => { let chars = s.chars().collect::<Vec<_>>(); if i < 0 || i >= chars.len() as i64 { Err(IndexOutOfBounds(i)) } else { Ok(Object::Str(Box::new(chars[i as usize].to_string()))) } } (Object::Str(_), other) => Err(IndexTypeError(other.type_str())), (other, _) => Err(IndexingWrongType(other.type_str())), }; let result = result.map_err(MonkeyError::Vm)?; self.push(result) } fn execute_closure_call( &mut self, frame_stack: &mut FrameStack, closure: Closure, num_args: usize, ) -> MonkeyResult<()> { if closure.func.num_params as usize != num_args { return Err(MonkeyError::Vm(WrongNumberOfArgs( closure.func.num_params as usize, num_args, ))); } frame_stack.top_mut().
self.stack.resize(self.sp, Object::Nil); Ok(()) } fn execute_builtin_call(&mut self, func: BuiltinFn, num_args: usize) -> MonkeyResult<()> { let args = self.take(num_args); let result = func.0(args).map_err(MonkeyError::Vm)?; self.push(result) } }
pc += 1; let new_frame = Frame { instructions: closure.func.instructions, free_vars: closure.free_vars, pc: 0, base_pointer: self.sp - num_args, }; frame_stack.push(new_frame); self.sp += closure.func.num_locals as usize;
function_block-random_span
[ { "content": "pub fn eval_index_expression(object: &Object, index: &Object) -> Result<Object, RuntimeError> {\n\n // This function is pub because the \"get\" built-in needs to call it\n\n match (object, index) {\n\n (Object::Array(vector), Object::Integer(i)) => {\n\n if *i < 0 || *i >= vector.len() as i64 {\n\n Err(IndexOutOfBounds(*i))\n\n } else {\n\n Ok(vector[*i as usize].clone())\n\n }\n\n }\n\n (Object::Array(_), other) => Err(IndexTypeError(other.type_str())),\n\n (Object::Hash(map), key) => {\n\n let key_type = key.type_str();\n\n let key = HashableObject::from_object(key.clone())\n\n .ok_or_else(|| HashKeyTypeError(key_type))?;\n\n let value = map.get(&key).ok_or_else(|| KeyError(key))?;\n\n Ok(value.clone())\n\n }\n\n (Object::Str(s), Object::Integer(i)) => {\n\n let chars = s.chars().collect::<Vec<_>>();\n", "file_path": "src/interpreter/mod.rs", "rank": 0, "score": 235623.8932976615 }, { "content": "fn assert_object_type_integer(obj: &Object) -> Result<&i64, RuntimeError> {\n\n if let Object::Integer(i) = obj {\n\n Ok(i)\n\n } else {\n\n Err(RuntimeError::TypeError(\n\n Object::Integer(0).type_str(),\n\n obj.type_str(),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/builtins.rs", "rank": 1, "score": 233077.96739002428 }, { "content": "fn eval_prefix_expression(operator: &Token, right: &Object) -> Result<Object, RuntimeError> {\n\n match (operator, right) {\n\n (Token::Minus, Object::Integer(i)) => Ok(Object::Integer(-i)),\n\n (Token::Bang, obj) => Ok(Object::Boolean(!obj.is_truthy())),\n\n _ => Err(PrefixTypeError(operator.clone(), right.type_str())),\n\n }\n\n}\n\n\n", "file_path": "src/interpreter/mod.rs", "rank": 3, "score": 202278.48891999727 }, { "content": "fn assert_object_type_array(obj: &Object) -> Result<&Vec<Object>, RuntimeError> {\n\n if let Object::Array(a) = obj {\n\n Ok(a)\n\n } else {\n\n Err(RuntimeError::TypeError(\n\n Object::Array(Box::new(vec![])).type_str(),\n\n obj.type_str(),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "src/builtins.rs", "rank": 4, "score": 197689.38212128158 }, { "content": "pub fn read_u16(instructions: &[u8]) -> u16 {\n\n u16::from_be_bytes(instructions[..2].try_into().unwrap())\n\n}\n", "file_path": "src/compiler/code.rs", "rank": 5, "score": 195157.10027640813 }, { "content": "pub fn get_builtin(name: &str) -> Option<Object> {\n\n ALL_BUILTINS\n\n .iter()\n\n .find(|(s, _)| s == &name)\n\n .map(|(_, f)| Object::Builtin(f.clone()))\n\n}\n\n\n", "file_path": "src/builtins.rs", "rank": 6, "score": 188602.50857545034 }, { "content": "pub fn read_operands(op: OpCode, instructions: &[u8]) -> (Vec<usize>, usize) {\n\n // @PERFORMANCE: Maybe taking a &mut &[u8] would be faster?\n\n let mut operands = Vec::with_capacity(op.operand_widths().len());\n\n let mut offset = 0;\n\n for width in op.operand_widths() {\n\n match width {\n\n 1 => operands.push(instructions[offset] as usize),\n\n 2 => {\n\n let operand = read_u16(&instructions[offset..]) as usize;\n\n operands.push(operand);\n\n }\n\n _ => panic!(\"unsupported operand width\"),\n\n }\n\n offset += width;\n\n }\n\n (operands, offset)\n\n}\n\n\n", "file_path": "src/compiler/code.rs", "rank": 7, "score": 187812.88487900415 }, { "content": "pub fn parse_and_compile(program: &str) -> Result<code::Bytecode, MonkeyError> {\n\n let parsed = parser::parse(program.into())?;\n\n let mut comp = compiler::Compiler::new();\n\n comp.compile_block(parsed)?;\n\n Ok(comp.bytecode())\n\n}\n", "file_path": "src/test_utils.rs", "rank": 9, "score": 173288.4898414513 }, { "content": "fn builtin_tail(args: Vec<Object>) -> Result<Object, RuntimeError> {\n\n assert_num_arguments(&args, 1)?;\n\n let array = assert_object_type_array(&args[0])?;\n\n match array.get(1..) {\n\n Some(tail) => Ok(Object::Array(Box::new(tail.to_vec()))),\n\n None => Ok(Object::Nil),\n\n }\n\n}\n\n\n", "file_path": "src/builtins.rs", "rank": 10, "score": 170413.28561283622 }, { "content": "fn builtin_assert(args: Vec<Object>) -> Result<Object, RuntimeError> {\n\n assert_num_arguments(&args, 1)?;\n\n if args[0].is_truthy() {\n\n Ok(Object::Nil)\n\n } else {\n\n Err(RuntimeError::Custom(format!(\n\n \"Assertion failed on value {}\",\n\n args[0]\n\n )))\n\n }\n\n}\n", "file_path": "src/builtins.rs", "rank": 11, "score": 170413.28561283622 }, { "content": "fn builtin_head(args: Vec<Object>) -> Result<Object, RuntimeError> {\n\n assert_num_arguments(&args, 1)?;\n\n let array = assert_object_type_array(&args[0])?;\n\n if let Some(obj) = array.get(0) {\n\n Ok(obj.clone())\n\n } else {\n\n Ok(Object::Nil)\n\n }\n\n}\n\n\n", "file_path": "src/builtins.rs", "rank": 12, "score": 170413.28561283622 }, { "content": "fn builtin_range(args: Vec<Object>) -> Result<Object, RuntimeError> {\n\n if args.is_empty() {\n\n return Err(RuntimeError::WrongNumberOfArgs(1, 0));\n\n } else if args.len() > 3 {\n\n return Err(RuntimeError::WrongNumberOfArgs(3, args.len()));\n\n }\n\n\n\n let mut end = *assert_object_type_integer(&args[0])?;\n\n\n\n let mut start = 0;\n\n if args.len() >= 2 {\n\n start = end;\n\n end = *assert_object_type_integer(&args[1])?;\n\n }\n\n\n\n let step = if args.len() >= 3 {\n\n *assert_object_type_integer(&args[2])?\n\n } else {\n\n 1\n\n };\n", "file_path": "src/builtins.rs", "rank": 13, "score": 170413.28561283622 }, { "content": "fn builtin_len(args: Vec<Object>) -> Result<Object, RuntimeError> {\n\n assert_num_arguments(&args, 1)?;\n\n\n\n let length = match &args[0] {\n\n Object::Str(s) => s.chars().count(),\n\n Object::Array(a) => a.len(),\n\n o => {\n\n return Err(RuntimeError::Custom(format!(\n\n \"'{}' object has no length\",\n\n o.type_str()\n\n )))\n\n }\n\n };\n\n\n\n Ok(Object::Integer(length as i64))\n\n}\n\n\n", "file_path": "src/builtins.rs", "rank": 14, "score": 170413.28561283622 }, { "content": "fn builtin_cons(args: Vec<Object>) -> Result<Object, RuntimeError> {\n\n assert_num_arguments(&args, 2)?;\n\n let tail = assert_object_type_array(&args[1])?;\n\n let mut new = vec![args[0].clone()];\n\n new.extend_from_slice(tail);\n\n Ok(Object::Array(Box::new(new)))\n\n}\n\n\n", "file_path": "src/builtins.rs", "rank": 15, "score": 170413.28561283622 }, { "content": "fn builtin_type(args: Vec<Object>) -> Result<Object, RuntimeError> {\n\n assert_num_arguments(&args, 1)?;\n\n Ok(Object::from(args[0].type_str()))\n\n}\n\n\n", "file_path": "src/builtins.rs", "rank": 16, "score": 170413.28561283622 }, { "content": "fn builtin_puts(args: Vec<Object>) -> Result<Object, RuntimeError> {\n\n if args.is_empty() {\n\n return Err(RuntimeError::WrongNumberOfArgs(1, 0));\n\n }\n\n\n\n for arg in &args[..args.len() - 1] {\n\n print!(\"{} \", arg);\n\n }\n\n println!(\"{}\", args[args.len() - 1]);\n\n\n\n Ok(Object::Nil)\n\n}\n\n\n", "file_path": "src/builtins.rs", "rank": 17, "score": 170413.28561283622 }, { "content": "fn builtin_push(args: Vec<Object>) -> Result<Object, RuntimeError> {\n\n assert_num_arguments(&args, 2)?;\n\n let mut array = assert_object_type_array(&args[0])?.clone();\n\n array.push(args[1].clone());\n\n Ok(Object::Array(Box::new(array)))\n\n}\n\n\n", "file_path": "src/builtins.rs", "rank": 18, "score": 170413.28561283622 }, { "content": "pub fn make(op: OpCode, operands: &[usize]) -> Box<[u8]> {\n\n let instruction_len = 1 + op.operand_widths().iter().sum::<usize>();\n\n assert_eq!(operands.len(), op.operand_widths().len());\n\n let mut instruction = Vec::with_capacity(instruction_len);\n\n instruction.push(op as u8);\n\n for (&operand, width) in operands.iter().zip(op.operand_widths()) {\n\n match width {\n\n 1 => instruction.push(operand as u8),\n\n 2 => instruction.extend_from_slice(&(operand as u16).to_be_bytes()),\n\n _ => panic!(\"unsupported operand width\"),\n\n }\n\n }\n\n instruction.into_boxed_slice()\n\n}\n\n\n", "file_path": "src/compiler/code.rs", "rank": 19, "score": 169216.84538924388 }, { "content": "fn assert_vm_runs(input: &[&str], expected: &[Object]) {\n\n for (program, exp) in input.iter().zip(expected) {\n\n let bytecode =\n\n test_utils::parse_and_compile(program).expect(\"Parser or compiler error during test\");\n\n let mut vm = VM::new();\n\n vm.run(bytecode).unwrap();\n\n assert_eq!(exp, &vm.pop().unwrap());\n\n }\n\n}\n\n\n", "file_path": "src/vm/tests.rs", "rank": 20, "score": 166864.6017967115 }, { "content": "pub fn start(compiled: bool) -> MonkeyResult<()> {\n\n eprintln!(\"Now with an even fancier REPL!\");\n\n eprintln!(\n\n \"(running using {})\",\n\n if compiled {\n\n \"compiler and VM\"\n\n } else {\n\n \"interpreter\"\n\n }\n\n );\n\n let mut rl = rustyline::Editor::<ReplHelper>::new();\n\n rl.set_helper(Some(ReplHelper {}));\n\n\n\n // Unbind Tab\n\n rl.unbind_sequence(rustyline::KeyPress::Tab);\n\n // Bind Tab to insert 4 spaces\n\n rl.bind_sequence(\n\n rustyline::KeyPress::Tab,\n\n rustyline::Cmd::Insert(1, \" \".into()),\n\n );\n\n let res = if compiled {\n\n start_compiled(rl)\n\n } else {\n\n start_interpreted(rl)\n\n };\n\n eprintln!(\"Goodbye!\");\n\n res.map_err(|e| e.into())\n\n}\n\n\n", "file_path": "src/repl.rs", "rank": 21, "score": 165031.08589843672 }, { "content": "fn assert_num_arguments(args: &[Object], expected: usize) -> Result<(), RuntimeError> {\n\n if args.len() != expected {\n\n Err(RuntimeError::WrongNumberOfArgs(expected, args.len()))\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/builtins.rs", "rank": 22, "score": 161158.33350796747 }, { "content": "fn assert_eval(input: &str, expected: &[object::Object]) {\n\n let parsed = parser::parse(input.into()).expect(\"Parser error during test\");\n\n assert_eq!(parsed.len(), expected.len());\n\n let env = Rc::new(RefCell::new(environment::Environment::empty()));\n\n\n\n // Eval program statements and compare with expected\n\n for (statement, exp) in parsed.into_iter().zip(expected) {\n\n let got = eval_statement(&statement, &env).expect(\"Runtime error during test\");\n\n assert_eq!(exp, &got);\n\n }\n\n}\n\n\n", "file_path": "src/interpreter/tests.rs", "rank": 23, "score": 149189.7281545267 }, { "content": "pub fn eval_expression(expression: &NodeExpression, env: &EnvHandle) -> MonkeyResult<Object> {\n\n match &expression.expression {\n\n Expression::Identifier(s) => {\n\n // Note: This clones the object\n\n match env.borrow().get(&s) {\n\n Some(value) => Ok(value),\n\n None => Err(MonkeyError::Interpreter(\n\n expression.position,\n\n IdenNotFound(s.clone()),\n\n )),\n\n }\n\n }\n\n Expression::IntLiteral(i) => Ok(Object::Integer(*i)),\n\n Expression::Boolean(b) => Ok(Object::Boolean(*b)),\n\n Expression::StringLiteral(s) => Ok(Object::Str(Box::new(s.clone()))),\n\n Expression::ArrayLiteral(v) => {\n\n let mut elements = Vec::with_capacity(v.len());\n\n for exp in v {\n\n elements.push(eval_expression(exp, env)?);\n\n }\n", "file_path": "src/interpreter/mod.rs", "rank": 24, "score": 146392.35300830557 }, { "content": "pub fn eval_statement(statement: &NodeStatement, env: &EnvHandle) -> MonkeyResult<Object> {\n\n match &statement.statement {\n\n Statement::ExpressionStatement(exp) => eval_expression(exp, env),\n\n Statement::Return(exp) => {\n\n let value = eval_expression(exp, env)?;\n\n Err(MonkeyError::Interpreter(\n\n statement.position,\n\n RuntimeError::ReturnValue(Box::new(value)),\n\n ))\n\n }\n\n Statement::Let(let_statement) => {\n\n let (name, exp) = &**let_statement;\n\n let value = eval_expression(&exp, env)?;\n\n env.borrow_mut().insert(name.clone(), value);\n\n Ok(Object::Nil)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/interpreter/mod.rs", "rank": 25, "score": 146392.35300830557 }, { "content": "type PrefixParseFn = fn(&mut Parser) -> MonkeyResult<NodeExpression>;\n", "file_path": "src/parser/mod.rs", "rank": 26, "score": 132188.95837227133 }, { "content": "#[test]\n\nfn test_closures() {\n\n let input = [\n\n \"fn(a) { fn(b) { fn(c) { a + b + c } } }(1)(3)(5)\",\n\n\n\n \"let make_closure = fn(a) {\n\n fn() { a }\n\n };\n\n let closure = make_closure(4);\n\n closure()\",\n\n\n\n \"let new_adder = fn(a, b) {\n\n fn(c) { a + b + c }\n\n };\n\n new_adder(1, 2)(8)\",\n\n \"let new_adder = fn(a, b) {\n\n let c = a + b;\n\n fn(d) { c + d }\n\n };\n\n new_adder(1, 2)(8)\",\n\n\n", "file_path": "src/vm/tests.rs", "rank": 27, "score": 130809.41973729747 }, { "content": "pub fn run_program(program: Vec<NodeStatement>) -> MonkeyResult<()> {\n\n let env = Rc::new(RefCell::new(Environment::empty()));\n\n for statement in program {\n\n eval_statement(&statement, &env)?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/interpreter/mod.rs", "rank": 28, "score": 125790.29823642226 }, { "content": "#[test]\n\nfn test_global_assignment() {\n\n let input = [\n\n \"let one = 1; one\",\n\n \"let one = 1; let two = 2; one + two\",\n\n \"let one = 1; let two = one + one; one + two\",\n\n ];\n\n let expected = [Object::Integer(1), Object::Integer(3), Object::Integer(3)];\n\n assert_vm_runs(&input, &expected);\n\n}\n\n\n", "file_path": "src/vm/tests.rs", "rank": 29, "score": 125712.09152839362 }, { "content": "#[test]\n\nfn test_index_expressions() {\n\n let input = [\n\n \"[1, 2, 3][1]\",\n\n \"[1, 2, 3][0 + 2]\",\n\n \"[[1, 1, 1]][0][0]\",\n\n \"#{ 1: 1, 2: 2 }[1]\",\n\n \"#{ 1: 1, 2: 2 }[2]\",\n\n ];\n\n let expected = [\n\n Object::Integer(2),\n\n Object::Integer(3),\n\n Object::Integer(1),\n\n Object::Integer(1),\n\n Object::Integer(2),\n\n ];\n\n assert_vm_runs(&input, &expected);\n\n}\n\n\n", "file_path": "src/vm/tests.rs", "rank": 30, "score": 125679.89946742887 }, { "content": "#[test]\n\nfn test_builtin_functions() {\n\n let input = [\n\n \"len(\\\"\\\")\",\n\n \"len(\\\"four\\\")\",\n\n \"len(\\\"hello world\\\")\",\n\n \"len([1, 2, 3])\",\n\n \"len([])\",\n\n \"puts(\\\"hi\\\")\",\n\n \"head([1, 2, 3, 4])\",\n\n \"head([])\",\n\n \"tail([1, 2, 3, 4])\",\n\n \"tail([])\",\n\n \"tail([0])\",\n\n \"push([], 0)\",\n\n \"push([1, 2], 0)\",\n\n ];\n\n let expected = [\n\n Object::Integer(0),\n\n Object::Integer(4),\n\n Object::Integer(11),\n", "file_path": "src/vm/tests.rs", "rank": 31, "score": 125456.93916968997 }, { "content": "pub fn parse(input: String) -> MonkeyResult<Vec<NodeStatement>> {\n\n let lex = Lexer::from_string(input)?;\n\n let mut pars = Parser::new(lex)?;\n\n pars.parse_program()\n\n}\n\n\n", "file_path": "src/parser/mod.rs", "rank": 32, "score": 122830.32260565742 }, { "content": "#[test]\n\nfn test_stack_cleaning_after_call() {\n\n let input = \"\n\n let foo = fn() {\n\n 5 + (if true { return 2; })\n\n };\n\n foo();\n\n \";\n\n let bytecode =\n\n test_utils::parse_and_compile(input).expect(\"Parser or compiler error during test\");\n\n let mut vm = VM::new();\n\n vm.run(bytecode).unwrap();\n\n assert_eq!(vm.stack.len(), 1);\n\n}\n\n\n", "file_path": "src/vm/tests.rs", "rank": 33, "score": 121034.41408788216 }, { "content": "type InfixParseFn = fn(&mut Parser, Box<NodeExpression>) -> MonkeyResult<NodeExpression>;\n\n\n", "file_path": "src/parser/mod.rs", "rank": 34, "score": 118795.38987853978 }, { "content": "fn eval_block(block: &[NodeStatement], env: &EnvHandle) -> MonkeyResult<Object> {\n\n let mut last = Object::Nil;\n\n let new_env = Rc::new(RefCell::new(Environment::extend(env)));\n\n for s in block {\n\n last = eval_statement(s, &new_env)?;\n\n }\n\n Ok(last)\n\n}\n\n\n", "file_path": "src/interpreter/mod.rs", "rank": 35, "score": 116720.71790185517 }, { "content": "fn assert_parse(input: &str, expected: &[&str]) {\n\n let output = parse(input.into()).expect(\"Parser error during test\");\n\n assert_eq!(output.len(), expected.len());\n\n for i in 0..output.len() {\n\n assert_eq!(format!(\"{:?}\", output[i]), expected[i]);\n\n }\n\n}\n\n\n", "file_path": "src/parser/tests.rs", "rank": 36, "score": 114032.50709801711 }, { "content": "pub fn eval_call_expression(\n\n obj: Object,\n\n args: Vec<Object>,\n\n call_position: (usize, usize), // We need the caller position to properly report errors\n\n) -> MonkeyResult<Object> {\n\n match obj {\n\n Object::InterpreterFunc(fo) => call_function_object(*fo, args, call_position),\n\n Object::Builtin(b) => {\n\n b.0(args).map_err(|e| MonkeyError::Interpreter(call_position, e))\n\n }\n\n other => Err(MonkeyError::Interpreter(\n\n call_position,\n\n NotCallable(other.type_str()),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/interpreter/mod.rs", "rank": 37, "score": 111008.4151123075 }, { "content": "fn assert_runtime_error(input: &str, expected_errors: &[&str]) {\n\n let parsed = parser::parse(input.into()).expect(\"Parser error during test\");\n\n let env = Rc::new(RefCell::new(environment::Environment::empty()));\n\n for (statement, &error) in parsed.iter().zip(expected_errors) {\n\n let got = eval_statement(statement, &env).expect_err(\"No runtime error encountered\");\n\n match got {\n\n MonkeyError::Interpreter(_, e) => assert_eq!(format!(\"{}\", e), error),\n\n _ => panic!(\"Wrong error type\"),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/interpreter/tests.rs", "rank": 38, "score": 108771.49892135922 }, { "content": "fn assert_parse_fails(input: &str) {\n\n assert!(parse(input.into()).is_err());\n\n}\n\n\n", "file_path": "src/parser/tests.rs", "rank": 39, "score": 104311.35325926467 }, { "content": "fn run_program_file(compiled: bool, path: String) -> Result<(), MonkeyError> {\n\n let reader = BufReader::new(File::open(path)?);\n\n let lexer = lexer::Lexer::new(Box::new(reader))?;\n\n let parsed_program = parser::Parser::new(lexer)?.parse_program()?;\n\n if compiled {\n\n let mut comp = compiler::Compiler::new();\n\n comp.compile_block(parsed_program)?;\n\n let code = comp.bytecode();\n\n let mut vm = vm::VM::new();\n\n vm.run(code)?;\n\n } else {\n\n interpreter::run_program(parsed_program)?;\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/main.rs", "rank": 40, "score": 103348.81156591067 }, { "content": "#[test]\n\nfn test_closures() {\n\n let input = \"\n\n let make_adder = fn(x) {\n\n let adder = fn(y) { x + y };\n\n return adder;\n\n };\n\n let add_3 = make_adder(3);\n\n add_3(5);\n\n\n\n let foo = fn() {\n\n let outer = 1;\n\n {\n\n let inner = 2;\n\n return fn() { outer + inner };\n\n }\n\n };\n\n foo()();\n\n \";\n\n let expected = [Nil, Nil, Integer(8), Nil, Integer(3)];\n\n assert_eval(input, &expected);\n\n}\n\n\n", "file_path": "src/interpreter/tests.rs", "rank": 41, "score": 103007.08679174152 }, { "content": "#[test]\n\nfn test_operators() {\n\n let input = \"= ! + - * / ^ % < > == != <= >=\";\n\n let expected = [\n\n Token::Assign,\n\n Token::Bang,\n\n Token::Plus,\n\n Token::Minus,\n\n Token::Asterisk,\n\n Token::Slash,\n\n Token::Exponent,\n\n Token::Modulo,\n\n Token::LessThan,\n\n Token::GreaterThan,\n\n Token::Equals,\n\n Token::NotEquals,\n\n Token::LessEq,\n\n Token::GreaterEq,\n\n Token::EOF,\n\n ];\n\n assert_lex(input, &expected);\n\n}\n\n\n", "file_path": "src/lexer/tests.rs", "rank": 42, "score": 103007.08679174152 }, { "content": "#[test]\n\nfn test_closures() {\n\n let outer_func = Object::CompiledFunc(Box::new(CompiledFunction {\n\n instructions: instructions! {\n\n (OpCode::OpGetLocal, 0),\n\n (OpCode::OpClosure, 0, 1),\n\n (OpCode::OpReturn),\n\n },\n\n num_locals: 1,\n\n num_params: 1,\n\n }));\n\n let inner_func = Object::CompiledFunc(Box::new(CompiledFunction {\n\n instructions: instructions! {\n\n (OpCode::OpGetFree, 0),\n\n (OpCode::OpGetLocal, 0),\n\n (OpCode::OpAdd),\n\n (OpCode::OpReturn),\n\n },\n\n num_locals: 1,\n\n num_params: 1,\n\n }));\n", "file_path": "src/compiler/tests.rs", "rank": 43, "score": 103007.08679174152 }, { "content": "#[test]\n\nfn test_builtins() {\n\n assert_compile(\n\n \"len([]); push([], 1);\",\n\n vec![Object::Integer(1)],\n\n instructions! {\n\n (OpCode::OpGetBuiltin, 2),\n\n (OpCode::OpArray, 0),\n\n (OpCode::OpCall, 1),\n\n (OpCode::OpPop),\n\n (OpCode::OpGetBuiltin, 3),\n\n (OpCode::OpArray, 0),\n\n (OpCode::OpConstant, 0),\n\n (OpCode::OpCall, 2),\n\n },\n\n );\n\n let expected_func = Object::CompiledFunc(Box::new(CompiledFunction {\n\n instructions: instructions! {\n\n (OpCode::OpGetBuiltin, 2),\n\n (OpCode::OpArray, 0),\n\n (OpCode::OpCall, 1),\n", "file_path": "src/compiler/tests.rs", "rank": 44, "score": 102756.13313311334 }, { "content": "#[test]\n\nfn test_arrays() {\n\n let input = [\"[]\", \"[1, 2, 3]\", \"[1 + 2, 3 - 4, 5 * 6]\"];\n\n let expected = [\n\n monkey_array![],\n\n monkey_array![Object::Integer(1), Object::Integer(2), Object::Integer(3)],\n\n monkey_array![Object::Integer(3), Object::Integer(-1), Object::Integer(30)],\n\n ];\n\n assert_vm_runs(&input, &expected);\n\n}\n\n\n", "file_path": "src/vm/tests.rs", "rank": 45, "score": 102329.40136219026 }, { "content": "#[test]\n\nfn test_hashes() {\n\n let input = [\"#{}\", \"#{ 1: 2, 2: 3 }\", \"#{ 1 + 1: 2 * 2, 3 + 3: 4 * 4 }\"];\n\n let expected = [\n\n monkey_hash! {},\n\n monkey_hash! {\n\n HashableObject::Integer(1) => Object::Integer(2),\n\n HashableObject::Integer(2) => Object::Integer(3)\n\n },\n\n monkey_hash! {\n\n HashableObject::Integer(2) => Object::Integer(4),\n\n HashableObject::Integer(6) => Object::Integer(16)\n\n },\n\n ];\n\n assert_vm_runs(&input, &expected)\n\n}\n\n\n", "file_path": "src/vm/tests.rs", "rank": 46, "score": 102329.40136219024 }, { "content": "#[test]\n\nfn test_strings() {\n\n let input = [\n\n r#\"\"monkey\"\"#,\n\n r#\"\"mon\" + \"key\"\"#,\n\n r#\"\"mon\" + \"key\" + \"banana\"\"#,\n\n ];\n\n let expected = [\n\n Object::from(\"monkey\"),\n\n Object::from(\"monkey\"),\n\n Object::from(\"monkeybanana\"),\n\n ];\n\n assert_vm_runs(&input, &expected);\n\n}\n\n\n", "file_path": "src/vm/tests.rs", "rank": 47, "score": 102329.40136219024 }, { "content": "#[test]\n\nfn test_let_statements() {\n\n let input = \"\n\n { let a = 5; a }\n\n { let a = 5 * 5; a }\n\n { let a = 5; let b = a; b }\n\n { let a = 5; let b = a; let c = a + b + 5; c }\n\n { let a = 5; { let a = 0; } a }\n\n \";\n\n let expected = [Integer(5), Integer(25), Integer(5), Integer(15), Integer(5)];\n\n assert_eval(input, &expected);\n\n}\n\n\n", "file_path": "src/interpreter/tests.rs", "rank": 48, "score": 99246.47390944448 }, { "content": "#[test]\n\nfn test_let_statements() {\n\n assert_parse(\"let a = 1;\", &[\"Let((\\\"a\\\", IntLiteral(1)))\"]);\n\n assert_parse_fails(\"let 2 = 3;\");\n\n assert_parse_fails(\"let foo whatever 3;\");\n\n assert_parse_fails(\"let bar = ;\");\n\n assert_parse_fails(\"let baz;\");\n\n}\n\n\n", "file_path": "src/parser/tests.rs", "rank": 49, "score": 99246.47390944448 }, { "content": "#[test]\n\nfn test_global_assignment() {\n\n assert_compile(\n\n \"let one = 1; let two = 2\",\n\n vec![Object::Integer(1), Object::Integer(2)],\n\n instructions! {\n\n (OpCode::OpConstant, 0),\n\n (OpCode::OpSetGlobal, 0),\n\n (OpCode::OpConstant, 1),\n\n (OpCode::OpSetGlobal, 1),\n\n (OpCode::OpNil),\n\n },\n\n );\n\n assert_compile(\n\n \"let one = 1; one\",\n\n vec![Object::Integer(1)],\n\n instructions! {\n\n (OpCode::OpConstant, 0),\n\n (OpCode::OpSetGlobal, 0),\n\n (OpCode::OpGetGlobal, 0),\n\n },\n", "file_path": "src/compiler/tests.rs", "rank": 50, "score": 99246.47390944448 }, { "content": "#[test]\n\nfn test_bool_expressions() {\n\n let input = r#\"\n\n false;\n\n !true;\n\n !!true;\n\n 1 < 2;\n\n 2 <= 0;\n\n 1 > 2;\n\n 2 >= 0;\n\n 0 == 0;\n\n 1 != 0;\n\n true == true;\n\n false == false;\n\n false != false;\n\n true != false;\n\n !(-9);\n\n !0;\n\n !\"string\";\n\n !nil;\n\n \"#;\n", "file_path": "src/interpreter/tests.rs", "rank": 51, "score": 99230.20954558412 }, { "content": "#[test]\n\nfn test_return_statements() {\n\n // Not much to test here, to be honest\n\n assert_parse(\"return 0;\", &[\"Return(IntLiteral(0))\"]);\n\n assert_parse(\"return;\", &[\"Return(Nil)\"]);\n\n}\n\n\n", "file_path": "src/parser/tests.rs", "rank": 52, "score": 99230.20954558412 }, { "content": "#[test]\n\nfn test_string_operations() {\n\n let input = r#\"\n\n \"abc\" + \"\";\n\n \"\" + \"abc\";\n\n \"abc\" + \"def\";\n\n \"foo\"[0];\n\n \"foobar\"[5];\n\n \"#;\n\n let expected = [\n\n Object::from(\"abc\"),\n\n Object::from(\"abc\"),\n\n Object::from(\"abcdef\"),\n\n Object::from(\"f\"),\n\n Object::from(\"r\"),\n\n ];\n\n assert_eval(input, &expected);\n\n}\n\n\n", "file_path": "src/interpreter/tests.rs", "rank": 53, "score": 99230.20954558412 }, { "content": "#[test]\n\nfn test_return_in_expressions() {\n\n // One thing that makes return statements very problematic is the fact that they can\n\n // appear inside block expressions, meaning they can appear in any expression context.\n\n // The following tests make sure everything works properly in these contexts.\n\n let input = r#\"\n\n // Inside prefix and infix expressions\n\n fn() {\n\n !{ return 0; }\n\n }();\n\n fn() {\n\n 0 + { return 1; }\n\n }();\n\n\n\n // Inside let statements\n\n fn() {\n\n let a = { return 2; false; };\n\n }();\n\n\n\n // Inside if conditions\n\n fn() {\n", "file_path": "src/interpreter/tests.rs", "rank": 54, "score": 99230.20954558412 }, { "content": "#[test]\n\nfn test_instruction_printing() {\n\n let input = instructions! {\n\n (OpCode::OpAdd),\n\n (OpCode::OpConstant, 2),\n\n (OpCode::OpConstant, 65535),\n\n (OpCode::OpClosure, 65534, 42),\n\n };\n\n let expected = \"\\\n\n 0000 OpAdd\\n\\\n\n 0001 OpConstant 2\\n\\\n\n 0004 OpConstant 65535\\n\\\n\n 0007 OpClosure 65534 42\\n\\\n\n \";\n\n assert_eq!(expected, format!(\"{}\", input));\n\n}\n\n\n", "file_path": "src/compiler/tests.rs", "rank": 55, "score": 99230.20954558412 }, { "content": "#[test]\n\nfn test_return_statements() {\n\n let input = r#\"\n\n fn() { 1; return 0; }();\n\n fn() { 4; return 1; 9; }();\n\n fn() { 16; return 2; return 25; 36; }();\n\n fn() {\n\n if true {\n\n if true {\n\n return 3;\n\n }\n\n return 49;\n\n }\n\n }();\n\n fn() {\n\n if false {\n\n return 64;\n\n } else {\n\n return 4;\n\n }\n\n }();\n", "file_path": "src/interpreter/tests.rs", "rank": 56, "score": 99230.20954558412 }, { "content": "#[test]\n\nfn test_index_expression() {\n\n let input = \"\n\n a[0];\n\n [nil][0];\n\n \";\n\n let expected = [\n\n \"ExpressionStatement(IndexExpression(Identifier(\\\"a\\\"), IntLiteral(0)))\",\n\n \"ExpressionStatement(IndexExpression(ArrayLiteral([Nil]), IntLiteral(0)))\",\n\n ];\n\n assert_parse(input, &expected);\n\n\n\n assert_parse_fails(\"array[]\");\n\n assert_parse_fails(\"array[i\");\n\n assert_parse_fails(\"array[only, one, index, man]\");\n\n}\n\n\n", "file_path": "src/parser/tests.rs", "rank": 57, "score": 99214.28184847973 }, { "content": "#[test]\n\nfn test_index_expressions() {\n\n assert_compile(\n\n \"[1, 2, 3][1 + 1]\",\n\n vec![\n\n Object::Integer(1),\n\n Object::Integer(2),\n\n Object::Integer(3),\n\n Object::Integer(1),\n\n Object::Integer(1),\n\n ],\n\n instructions! {\n\n (OpCode::OpConstant, 0),\n\n (OpCode::OpConstant, 1),\n\n (OpCode::OpConstant, 2),\n\n (OpCode::OpArray, 3),\n\n (OpCode::OpConstant, 3),\n\n (OpCode::OpConstant, 4),\n\n (OpCode::OpAdd),\n\n (OpCode::OpIndex),\n\n },\n", "file_path": "src/compiler/tests.rs", "rank": 58, "score": 99214.28184847973 }, { "content": "#[test]\n\nfn test_recursive_fibonacci() {\n\n let input = [\"\n\n let fibonacci = fn(n) {\n\n if n < 2 {\n\n n\n\n } else {\n\n fibonacci(n - 1) + fibonacci(n - 2)\n\n }\n\n };\n\n fibonacci(30)\n\n \"];\n\n let expected = [Object::Integer(610)];\n\n assert_vm_runs(&input, &expected);\n\n}\n", "file_path": "src/vm/tests.rs", "rank": 59, "score": 98585.10672347347 }, { "content": "#[test]\n\nfn test_function_arguments() {\n\n let input = [\n\n \"let id = fn(x) { x }; id(4)\",\n\n \"let sum = fn(a, b) { a + b }; sum(1, 2)\",\n\n \"let global_num = 10;\n\n let sum = fn(a, b) {\n\n let c = a + b;\n\n c + global_num;\n\n };\n\n let outer = fn() {\n\n sum(1, 2) + sum(3, 4) + global_num;\n\n };\n\n outer() + global_num;\",\n\n ];\n\n let expected = [Object::Integer(4), Object::Integer(3), Object::Integer(50)];\n\n assert_vm_runs(&input, &expected);\n\n}\n\n\n", "file_path": "src/vm/tests.rs", "rank": 60, "score": 98585.10672347348 }, { "content": "#[test]\n\nfn test_integer_arithmetic() {\n\n let input = [\"2 + 3\", \"-3\"];\n\n let expected = [Object::Integer(5), Object::Integer(-3)];\n\n assert_vm_runs(&input, &expected);\n\n}\n\n\n", "file_path": "src/vm/tests.rs", "rank": 61, "score": 98585.10672347347 }, { "content": "#[test]\n\nfn test_conditional_expressions() {\n\n let input = [\n\n \"if true { 10 }\",\n\n \"if true { 10 } else { 20 }\",\n\n \"if false { 10 } else { 20 }\",\n\n \"if 1 > 2 { 10 } else { 20 }\",\n\n \"if 1 > 2 { 10 }\",\n\n ];\n\n let expected = [\n\n Object::Integer(10),\n\n Object::Integer(10),\n\n Object::Integer(20),\n\n Object::Integer(20),\n\n Object::Nil,\n\n ];\n\n assert_vm_runs(&input, &expected);\n\n}\n\n\n", "file_path": "src/vm/tests.rs", "rank": 62, "score": 98585.10672347347 }, { "content": "#[test]\n\nfn test_local_bindings() {\n\n let input = [\n\n \"let one = fn() { let one = 1; one }; one()\",\n\n \"let one_and_two = fn() { let one = 1; let two = 2; one + two; }; one_and_two();\",\n\n \"let one_and_two = fn() { let one = 1; let two = 2; one + two; };\n\n let three_and_four = fn() { let three = 3; let four = 4; three + four; };\n\n one_and_two() + three_and_four();\",\n\n \"let first_foobar = fn() { let foobar = 50; foobar; };\n\n let second_foobar = fn() { let foobar = 100; foobar; };\n\n first_foobar() + second_foobar();\",\n\n \"let global_seed = 50;\n\n let minus_one = fn() {\n\n let num = 1;\n\n global_seed - num;\n\n };\n\n let minus_two = fn() {\n\n let num = 2;\n\n global_seed - num;\n\n };\n\n minus_one() + minus_two();\",\n", "file_path": "src/vm/tests.rs", "rank": 63, "score": 98585.10672347347 }, { "content": "#[test]\n\nfn test_boolean_expressions() {\n\n let input = [\n\n \"true\",\n\n \"false\",\n\n \"2 >= 3 == true\",\n\n \"false != 1 < 2\",\n\n \"!false\",\n\n \"!(if false { 3 })\",\n\n ];\n\n let expected = [\n\n Object::Boolean(true),\n\n Object::Boolean(false),\n\n Object::Boolean(false),\n\n Object::Boolean(true),\n\n Object::Boolean(true),\n\n Object::Boolean(true),\n\n ];\n\n assert_vm_runs(&input, &expected);\n\n}\n\n\n", "file_path": "src/vm/tests.rs", "rank": 64, "score": 98585.10672347348 }, { "content": "#[test]\n\nfn test_function_calls() {\n\n let input = [\n\n \"let foo = fn() { 5 + 10; }; foo()\",\n\n \"let foo = fn() { return 5 + 10; }; foo()\",\n\n \"\n\n let one = fn() { 1 };\n\n let two = fn() { 1 + one() };\n\n let three = fn() { two() + one() };\n\n (one() + three()) * two();\n\n \",\n\n \"let nothing = fn() {}; nothing()\",\n\n ];\n\n let expected = [\n\n Object::Integer(15),\n\n Object::Integer(15),\n\n Object::Integer(8),\n\n Object::Nil,\n\n ];\n\n assert_vm_runs(&input, &expected);\n\n}\n\n\n", "file_path": "src/vm/tests.rs", "rank": 65, "score": 98585.1067234735 }, { "content": "fn assert_lex(input: &str, expected: &[Token]) {\n\n let mut lex = Lexer::from_string(input.into()).expect(\"Lexer error during test\");\n\n for ex in expected {\n\n let got = lex.next_token().expect(\"Lexer error during test\");\n\n assert_eq!(ex, &got);\n\n }\n\n}\n\n\n", "file_path": "src/lexer/tests.rs", "rank": 66, "score": 96207.43103561433 }, { "content": "fn start_compiled(mut rl: rustyline::Editor<ReplHelper>) -> Result<(), std::io::Error> {\n\n let mut comp = compiler::Compiler::new();\n\n let mut vm = vm::VM::new();\n\n\n\n let mut run_line = |line: String| -> MonkeyResult<Vec<object::Object>> {\n\n let parsed = parser::parse(line)?;\n\n comp.compile_block(parsed)?;\n\n let new_bytecode = comp.reset_instructions();\n\n vm.run(new_bytecode)?;\n\n Ok(vec![vm.pop()?])\n\n };\n\n\n\n loop {\n\n match read_line(&mut rl) {\n\n Some(line) => print_results(run_line(line)),\n\n None => return Ok(()),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/repl.rs", "rank": 67, "score": 92059.84924803254 }, { "content": "fn start_interpreted(mut rl: rustyline::Editor<ReplHelper>) -> Result<(), std::io::Error> {\n\n let env = Rc::new(RefCell::new(environment::Environment::empty()));\n\n let run_line = |line: String| -> MonkeyResult<Vec<object::Object>> {\n\n parser::parse(line)?\n\n .into_iter()\n\n .map(|s| interpreter::eval_statement(&s, &env))\n\n .collect()\n\n };\n\n\n\n loop {\n\n match read_line(&mut rl) {\n\n Some(line) => print_results(run_line(line)),\n\n None => return Ok(()),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/repl.rs", "rank": 68, "score": 92059.84924803254 }, { "content": "fn call_function_object(\n\n fo: InterpreterFunctionObject,\n\n args: Vec<Object>,\n\n call_pos: (usize, usize),\n\n) -> MonkeyResult<Object> {\n\n if fo.parameters.len() != args.len() {\n\n return Err(MonkeyError::Interpreter(\n\n call_pos,\n\n WrongNumberOfArgs(fo.parameters.len(), args.len()),\n\n ));\n\n }\n\n let mut call_env = fo.environment.borrow().clone();\n\n for (name, value) in fo.parameters.into_iter().zip(args) {\n\n call_env.insert(name, value);\n\n }\n\n let result = eval_block(&fo.body, &Rc::new(RefCell::new(call_env)));\n\n result.or_else(|e| {\n\n if let MonkeyError::Interpreter(_, ReturnValue(obj)) = e {\n\n Ok(*obj)\n\n } else {\n\n Err(e)\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/interpreter/mod.rs", "rank": 69, "score": 90259.71142172755 }, { "content": "fn assert_lexer_error(input: &str, expected_error: LexerError) {\n\n let mut lex = Lexer::from_string(input.into()).unwrap();\n\n loop {\n\n match lex.next_token() {\n\n Ok(Token::EOF) => panic!(\"No lexer errors encountered\"),\n\n Err(e) => {\n\n match e {\n\n MonkeyError::Lexer(_, got) => assert_eq!(expected_error, got),\n\n _ => panic!(\"Wrong error type\"),\n\n }\n\n return;\n\n }\n\n _ => continue,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/lexer/tests.rs", "rank": 70, "score": 88673.74762357365 }, { "content": "type LexerLine = Peekable<std::vec::IntoIter<(usize, char)>>;\n", "file_path": "src/lexer/mod.rs", "rank": 71, "score": 81393.43870027502 }, { "content": "fn print_results<T: std::fmt::Display>(results: MonkeyResult<Vec<T>>) {\n\n match results {\n\n Ok(values) => {\n\n for v in values {\n\n println!(\"{}\", v);\n\n }\n\n }\n\n Err(e) => eprintln!(\"{}\", e),\n\n }\n\n}\n", "file_path": "src/repl.rs", "rank": 72, "score": 76084.79644478804 }, { "content": "#[test]\n\nfn test_arrays() {\n\n assert_compile(\n\n \"[]\",\n\n vec![],\n\n Instructions([make!(OpCode::OpArray, 0)].concat()),\n\n );\n\n assert_compile(\n\n \"[1, 2, 3]\",\n\n vec![Object::Integer(1), Object::Integer(2), Object::Integer(3)],\n\n instructions! {\n\n (OpCode::OpConstant, 0),\n\n (OpCode::OpConstant, 1),\n\n (OpCode::OpConstant, 2),\n\n (OpCode::OpArray, 3),\n\n },\n\n );\n\n assert_compile(\n\n \"[1 + 2, 3 - 4, 5 * 6]\",\n\n vec![\n\n Object::Integer(1),\n", "file_path": "src/compiler/tests.rs", "rank": 73, "score": 74527.0684166343 }, { "content": "#[test]\n\nfn test_hashes() {\n\n let input = r#\"\n\n #{};\n\n #{\"a\": true, \"b\": [], \"c\": 3};\n\n #{\"nested\": #{}};\n\n let h = #{\n\n \"something\": nil,\n\n 1 + 2: 5 - 1,\n\n !true: \"indeed\"\n\n };\n\n h;\n\n h[\"something\"];\n\n h[3];\n\n h[false];\n\n \"#;\n\n let expected = [\n\n monkey_hash! {},\n\n monkey_hash! {\n\n HashableObject::from(\"a\") => Object::Boolean(true),\n\n HashableObject::from(\"b\") => monkey_array![],\n", "file_path": "src/interpreter/tests.rs", "rank": 74, "score": 74527.0684166343 }, { "content": "#[test]\n\nfn test_arrays() {\n\n let input = \"\n\n [];\n\n [0, nil, false];\n\n [0, [1]];\n\n let arr = [0, 1, 1, 2, 3, 5, 8, 13];\n\n arr[5];\n\n let arr = [[0], [0, 1], [0, 1, 2]];\n\n arr[2][2];\n\n \";\n\n let expected = [\n\n monkey_array![],\n\n monkey_array![Integer(0), Nil, Boolean(false)],\n\n monkey_array![\n\n Integer(0),\n\n monkey_array![Integer(1)],\n\n ],\n\n Nil,\n\n Integer(5),\n\n Nil,\n\n Integer(2),\n\n ];\n\n assert_eval(input, &expected);\n\n}\n\n\n", "file_path": "src/interpreter/tests.rs", "rank": 75, "score": 74527.0684166343 }, { "content": "#[test]\n\nfn test_strings() {\n\n let input = r#\"\n\n \"string\"\n\n \"escape sequences: \\\\ \\n \\t \\r \\\" \"\n\n \"whitespace\n\n inside strings\"\n\n \"#;\n\n let expected = [\n\n Token::Str(\"string\".into()),\n\n Token::Str(\"escape sequences: \\\\ \\n \\t \\r \\\" \".into()),\n\n Token::Str(\"whitespace\\n inside strings\".into()),\n\n Token::EOF,\n\n ];\n\n assert_lex(input, &expected);\n\n}\n\n\n", "file_path": "src/lexer/tests.rs", "rank": 76, "score": 74527.0684166343 }, { "content": "#[test]\n\nfn test_hashes() {\n\n assert_compile(\"#{}\", vec![], instructions! { (OpCode::OpHash, 0) });\n\n assert_compile(\n\n \"#{ 1: 2, 3: 4 }\",\n\n vec![\n\n Object::Integer(1),\n\n Object::Integer(2),\n\n Object::Integer(3),\n\n Object::Integer(4),\n\n ],\n\n instructions! {\n\n (OpCode::OpConstant, 0),\n\n (OpCode::OpConstant, 1),\n\n (OpCode::OpConstant, 2),\n\n (OpCode::OpConstant, 3),\n\n (OpCode::OpHash, 2),\n\n },\n\n );\n\n assert_compile(\n\n \"#{ 1: 2 + 3, 4: 5 * 6 }\",\n", "file_path": "src/compiler/tests.rs", "rank": 77, "score": 74527.0684166343 }, { "content": "#[test]\n\nfn test_if_expressions() {\n\n let input = \"\n\n if true { 10 }\n\n if false { 10 }\n\n if 1 { 10 }\n\n if 0 { 10 }\n\n if 2 < 5 { 10 }\n\n if true { 10 } else { 20 }\n\n if false { 10 } else { 20 }\n\n \";\n\n let expected = [\n\n Integer(10),\n\n Nil,\n\n Integer(10),\n\n Nil,\n\n Integer(10),\n\n Integer(10),\n\n Integer(20),\n\n ];\n\n assert_eval(input, &expected);\n\n}\n\n\n", "file_path": "src/interpreter/tests.rs", "rank": 78, "score": 74527.0684166343 }, { "content": "#[test]\n\nfn test_conditionals() {\n\n assert_compile(\n\n \"if true { 10 }; 3333\",\n\n vec![Object::Integer(10), Object::Integer(3333)],\n\n instructions! {\n\n (OpCode::OpTrue),\n\n (OpCode::OpJumpNotTruthy, 10),\n\n (OpCode::OpConstant, 0),\n\n (OpCode::OpJump, 11),\n\n (OpCode::OpNil),\n\n (OpCode::OpPop),\n\n (OpCode::OpConstant, 1),\n\n },\n\n );\n\n assert_compile(\n\n \"if true { 10 } else { 20 }; 3333\",\n\n vec![\n\n Object::Integer(10),\n\n Object::Integer(20),\n\n Object::Integer(3333),\n", "file_path": "src/compiler/tests.rs", "rank": 79, "score": 74527.0684166343 }, { "content": "#[test]\n\nfn test_literals() {\n\n let input = r#\"\n\n 0;\n\n 17;\n\n true;\n\n false;\n\n nil;\n\n \"brown is dark orange\";\n\n \"hello world\";\n\n [];\n\n [0, false, nil];\n\n #{\n\n first : \"entry\",\n\n second : 1,\n\n nil : []\n\n };\n\n fn(x, y, z) {\n\n return x;\n\n };\n\n \"#;\n", "file_path": "src/parser/tests.rs", "rank": 80, "score": 74527.0684166343 }, { "content": "#[test]\n\nfn test_functions() {\n\n let input = \"\n\n let id = fn(x) { x };\n\n id(5);\n\n\n\n let neg = fn(x) { -x };\n\n neg(10);\n\n\n\n let sqr = fn(x) { x * x };\n\n sqr(17);\n\n\n\n let and = fn(p, q) {\n\n if p {\n\n q\n\n } else {\n\n false\n\n }\n\n };\n\n and(false, true);\n\n and(true, false);\n", "file_path": "src/interpreter/tests.rs", "rank": 81, "score": 74527.0684166343 }, { "content": "#[test]\n\nfn test_recursion() {\n\n let input = \"\n\n let accumulate = fn(n) {\n\n if n <= 0 {\n\n 0\n\n } else {\n\n 1 + accumulate(n - 1)\n\n }\n\n };\n\n accumulate(50);\n\n\n\n let fib = fn(n) {\n\n if n <= 1 {\n\n n\n\n } else {\n\n fib(n - 1) + fib(n - 2)\n\n }\n\n };\n\n fib(13);\n\n\n", "file_path": "src/interpreter/tests.rs", "rank": 82, "score": 74527.0684166343 }, { "content": "#[test]\n\nfn test_strings() {\n\n assert_compile(\n\n \"\\\"monkey\\\"\",\n\n vec![],\n\n Instructions([make!(OpCode::OpConstant, 0)].concat()),\n\n );\n\n assert_compile(\n\n \"\\\"mon\\\" + \\\"key\\\"\",\n\n vec![],\n\n instructions! {\n\n (OpCode::OpConstant, 0),\n\n (OpCode::OpConstant, 1),\n\n (OpCode::OpAdd),\n\n },\n\n );\n\n}\n\n\n", "file_path": "src/compiler/tests.rs", "rank": 83, "score": 74527.0684166343 }, { "content": "#[test]\n\nfn test_delimiters() {\n\n let input = \", ; : () {} [] #{}\";\n\n let expected = [\n\n Token::Comma,\n\n Token::Semicolon,\n\n Token::Colon,\n\n Token::OpenParen,\n\n Token::CloseParen,\n\n Token::OpenCurlyBrace,\n\n Token::CloseCurlyBrace,\n\n Token::OpenSquareBracket,\n\n Token::CloseSquareBracket,\n\n Token::OpenHash,\n\n Token::CloseCurlyBrace,\n\n Token::EOF,\n\n ];\n\n assert_lex(input, &expected);\n\n}\n\n\n", "file_path": "src/lexer/tests.rs", "rank": 84, "score": 74527.0684166343 }, { "content": "#[test]\n\nfn test_blocks() {\n\n let input = \"\n\n { 5 }\n\n { 2; false }\n\n {\n\n { true; 3; }\n\n }\n\n let a = {\n\n let b = 9;\n\n b * (b - 1) * (b - 2);\n\n };\n\n a;\n\n let c = 2;\n\n let d = {\n\n let c = 3;\n\n c;\n\n };\n\n d;\n\n \";\n\n let expected = [\n", "file_path": "src/interpreter/tests.rs", "rank": 85, "score": 74527.0684166343 }, { "content": "#[test]\n\nfn test_make() {\n\n assert_eq!(\n\n &[OpCode::OpConstant as u8, 255, 254],\n\n &*make!(OpCode::OpConstant, 65534)\n\n );\n\n assert_eq!(\n\n &[OpCode::OpGetLocal as u8, 255],\n\n &*make!(OpCode::OpGetLocal, 255)\n\n );\n\n assert_eq!(&[OpCode::OpAdd as u8], &*make!(OpCode::OpAdd));\n\n assert_eq!(\n\n &[OpCode::OpClosure as u8, 255, 254, 42],\n\n &*make!(OpCode::OpClosure, 65534, 42)\n\n );\n\n}\n\n\n", "file_path": "src/compiler/tests.rs", "rank": 86, "score": 74527.0684166343 }, { "content": "#[test]\n\nfn test_identifiers() {\n\n let input = \"foo bar two_words _ _foo2 back2thefuture 3different_ones olá 統一碼 यूनिकोड\";\n\n let expected = [\n\n iden!(\"foo\"),\n\n iden!(\"bar\"),\n\n iden!(\"two_words\"),\n\n iden!(\"_\"),\n\n iden!(\"_foo2\"),\n\n iden!(\"back2thefuture\"),\n\n Token::Int(3),\n\n iden!(\"different_ones\"),\n\n iden!(\"olá\"),\n\n iden!(\"統一碼\"),\n\n iden!(\"यूनिकोड\"),\n\n Token::EOF,\n\n ];\n\n assert_lex(input, &expected);\n\n\n\n // Test keywords\n\n let input = \"fn let true false if else return nil\";\n", "file_path": "src/lexer/tests.rs", "rank": 87, "score": 74527.0684166343 }, { "content": "#[test]\n\nfn test_if_expressions() {\n\n let input = \"\n\n if 1 { 1 } else { 0 }\n\n if 2 { 2 }\n\n if (true) {}\n\n if nil {} else if nil {} else {}\n\n \";\n\n let expected = [\n\n \"ExpressionStatement(IfExpression { condition: IntLiteral(1), consequence: \\\n\n [ExpressionStatement(IntLiteral(1))], alternative: [ExpressionStatement(\\\n\n IntLiteral(0))] })\",\n\n \"ExpressionStatement(IfExpression { condition: IntLiteral(2), consequence: \\\n\n [ExpressionStatement(IntLiteral(2))], alternative: [] })\",\n\n \"ExpressionStatement(IfExpression { condition: Boolean(true), consequence: \\\n\n [], alternative: [] })\",\n\n \"ExpressionStatement(IfExpression { condition: Nil, consequence: [], alternative: \\\n\n [ExpressionStatement(IfExpression { condition: Nil, consequence: [], alternative: [] \\\n\n })] })\",\n\n ];\n\n assert_parse(input, &expected);\n\n\n\n assert_parse_fails(\"if true\");\n\n assert_parse_fails(\"if { return 1; }\");\n\n assert_parse_fails(\"if true {} else\");\n\n}\n\n\n", "file_path": "src/parser/tests.rs", "rank": 88, "score": 74527.0684166343 }, { "content": "#[test]\n\nfn test_comments() {\n\n let input = r\"\n\n // comments\n\n foo // bar\n\n // Unicode! 中文 Português हिन्दी Français Español\n\n //\n\n baz\n\n \";\n\n let expected = [iden!(\"foo\"), iden!(\"baz\"), Token::EOF];\n\n assert_lex(input, &expected);\n\n}\n\n\n", "file_path": "src/lexer/tests.rs", "rank": 89, "score": 74527.0684166343 }, { "content": "#[test]\n\nfn test_function_calls() {\n\n let expected_func = Object::CompiledFunc(Box::new(CompiledFunction {\n\n instructions: instructions! {\n\n (OpCode::OpConstant, 0),\n\n (OpCode::OpReturn),\n\n },\n\n num_locals: 0,\n\n num_params: 0,\n\n }));\n\n assert_compile(\n\n \"fn() { 24 }()\",\n\n vec![Object::Integer(24), expected_func.clone()],\n\n instructions! { (OpCode::OpClosure, 1, 0), (OpCode::OpCall, 0) },\n\n );\n\n assert_compile(\n\n \"let foo = fn() { 24 }; foo()\",\n\n vec![Object::Integer(24), expected_func],\n\n instructions! {\n\n (OpCode::OpClosure, 1, 0),\n\n (OpCode::OpSetGlobal, 0),\n", "file_path": "src/compiler/tests.rs", "rank": 90, "score": 72119.48910452434 }, { "content": "#[test]\n\nfn test_integer_arithmetic() {\n\n assert_compile(\n\n \"1 + 2\",\n\n vec![Object::Integer(1), Object::Integer(2)],\n\n instructions! {\n\n (OpCode::OpConstant, 0),\n\n (OpCode::OpConstant, 1),\n\n (OpCode::OpAdd),\n\n },\n\n );\n\n assert_compile(\n\n \"1; 2\",\n\n vec![Object::Integer(1), Object::Integer(2)],\n\n instructions! {\n\n (OpCode::OpConstant, 0),\n\n (OpCode::OpPop),\n\n (OpCode::OpConstant, 1),\n\n },\n\n );\n\n assert_compile(\n", "file_path": "src/compiler/tests.rs", "rank": 91, "score": 72119.48910452434 }, { "content": "#[test]\n\nfn test_function_literals() {\n\n let expected_func = Object::CompiledFunc(Box::new(CompiledFunction {\n\n instructions: instructions! {\n\n (OpCode::OpConstant, 0),\n\n (OpCode::OpConstant, 1),\n\n (OpCode::OpAdd),\n\n (OpCode::OpReturn),\n\n },\n\n num_locals: 0,\n\n num_params: 0,\n\n }));\n\n assert_compile(\n\n \"fn() { return 5 + 10; }\",\n\n vec![Object::Integer(5), Object::Integer(10), expected_func],\n\n instructions! { (OpCode::OpClosure, 2, 0) },\n\n );\n\n let expected_func = Object::CompiledFunc(Box::new(CompiledFunction {\n\n instructions: instructions! {\n\n (OpCode::OpConstant, 0),\n\n (OpCode::OpReturn),\n", "file_path": "src/compiler/tests.rs", "rank": 92, "score": 72119.48910452434 }, { "content": "#[test]\n\nfn test_block_expressions() {\n\n let input = \"\n\n { let foo = 2; return 1; }\n\n { return 0; }\n\n {}\n\n \";\n\n let expected = [\n\n \"ExpressionStatement(BlockExpression([Let((\\\"foo\\\", IntLiteral(2))), \\\n\n Return(IntLiteral(1))]))\",\n\n \"ExpressionStatement(BlockExpression([Return(IntLiteral(0))]))\",\n\n \"ExpressionStatement(BlockExpression([]))\",\n\n ];\n\n assert_parse(input, &expected);\n\n\n\n assert_parse_fails(\"{ return 0\");\n\n}\n", "file_path": "src/parser/tests.rs", "rank": 93, "score": 72119.48910452434 }, { "content": "#[test]\n\nfn test_infix_expressions() {\n\n let input = \"1 + 2; 4 * 5 - 2 / 3; 1 >= 2 == 2 < 3 != true;\";\n\n let expected = [\n\n \"ExpressionStatement(InfixExpression(IntLiteral(1), Plus, IntLiteral(2)))\",\n\n \"ExpressionStatement(InfixExpression(InfixExpression(IntLiteral(4), Asterisk, \\\n\n IntLiteral(5)), Minus, InfixExpression(IntLiteral(2), Slash, IntLiteral(3))))\",\n\n \"ExpressionStatement(InfixExpression(InfixExpression(InfixExpression(IntLiteral(1), \\\n\n GreaterEq, IntLiteral(2)), Equals, InfixExpression(IntLiteral(2), LessThan, \\\n\n IntLiteral(3))), NotEquals, Boolean(true)))\",\n\n ];\n\n assert_parse(input, &expected);\n\n\n\n assert_parse_fails(\"1 + 2 -\");\n\n assert_parse_fails(\"1 == + 2\");\n\n assert_parse_fails(\"> 1 + 2\");\n\n}\n\n\n", "file_path": "src/parser/tests.rs", "rank": 94, "score": 72119.48910452434 }, { "content": "#[test]\n\nfn test_binding_scopes() {\n\n let expected_func = Object::CompiledFunc(Box::new(CompiledFunction {\n\n instructions: instructions! {\n\n (OpCode::OpGetGlobal, 0),\n\n (OpCode::OpReturn),\n\n },\n\n num_locals: 0,\n\n num_params: 0,\n\n }));\n\n assert_compile(\n\n \"let num = 55; fn() { num }\",\n\n vec![Object::Integer(55), expected_func],\n\n instructions! {\n\n (OpCode::OpConstant, 0),\n\n (OpCode::OpSetGlobal, 0),\n\n (OpCode::OpClosure, 1, 0),\n\n },\n\n );\n\n let expected_func = Object::CompiledFunc(Box::new(CompiledFunction {\n\n instructions: instructions! {\n", "file_path": "src/compiler/tests.rs", "rank": 95, "score": 72119.48910452434 }, { "content": "#[test]\n\nfn test_boolean_expressions() {\n\n assert_compile(\"true\", vec![], instructions! { (OpCode::OpTrue) });\n\n assert_compile(\"false\", vec![], instructions! { (OpCode::OpFalse) });\n\n assert_compile(\n\n \"1 > 2\",\n\n vec![Object::Integer(1), Object::Integer(2)],\n\n instructions! {\n\n (OpCode::OpConstant, 0),\n\n (OpCode::OpConstant, 1),\n\n (OpCode::OpGreaterThan),\n\n },\n\n );\n\n assert_compile(\n\n \"1 < 2\",\n\n vec![Object::Integer(2), Object::Integer(1)],\n\n instructions! {\n\n (OpCode::OpConstant, 0),\n\n (OpCode::OpConstant, 1),\n\n (OpCode::OpGreaterThan),\n\n },\n", "file_path": "src/compiler/tests.rs", "rank": 96, "score": 72119.48910452434 }, { "content": "#[test]\n\nfn test_prefix_expressions() {\n\n let input = \"-5; !true; --!!-foo;\";\n\n let expected = [\n\n \"ExpressionStatement(PrefixExpression(Minus, IntLiteral(5)))\",\n\n \"ExpressionStatement(PrefixExpression(Bang, Boolean(true)))\",\n\n \"ExpressionStatement(PrefixExpression(Minus, PrefixExpression(Minus, PrefixExpression(\\\n\n Bang, PrefixExpression(Bang, PrefixExpression(Minus, Identifier(\\\"foo\\\")))))))\",\n\n ];\n\n assert_parse(input, &expected);\n\n\n\n assert_parse_fails(\"!;\");\n\n assert_parse_fails(\"-\");\n\n}\n\n\n", "file_path": "src/parser/tests.rs", "rank": 97, "score": 72119.48910452434 }, { "content": "#[test]\n\nfn test_call_expressions() {\n\n let input = \"foo(); foo(x); foo(x, y, z); fn(x) { x; }(5);\";\n\n let expected = [\n\n \"ExpressionStatement(CallExpression { function: Identifier(\\\"foo\\\"), arguments: [] })\",\n\n \"ExpressionStatement(CallExpression { function: Identifier(\\\"foo\\\"), arguments: \\\n\n [Identifier(\\\"x\\\")] })\",\n\n \"ExpressionStatement(CallExpression { function: Identifier(\\\"foo\\\"), arguments: \\\n\n [Identifier(\\\"x\\\"), Identifier(\\\"y\\\"), Identifier(\\\"z\\\")] })\",\n\n \"ExpressionStatement(CallExpression { function: FunctionLiteral { parameters: \\\n\n [\\\"x\\\"], body: [ExpressionStatement(Identifier(\\\"x\\\"))] }, arguments: \\\n\n [IntLiteral(5)] })\",\n\n ];\n\n assert_parse(input, &expected);\n\n\n\n assert_parse_fails(\"foo(x, y,)\");\n\n assert_parse_fails(\"foo(\");\n\n assert_parse_fails(\"foo(x y)\");\n\n}\n\n\n", "file_path": "src/parser/tests.rs", "rank": 98, "score": 72119.48910452434 }, { "content": "#[test]\n\nfn test_grouped_expression() {\n\n let input = \"(2 + 3) * (5 + 7); (1 + (1 + (1 + 1)));\";\n\n let expected = [\n\n \"ExpressionStatement(InfixExpression(InfixExpression(IntLiteral(2), Plus, \\\n\n IntLiteral(3)), Asterisk, InfixExpression(IntLiteral(5), Plus, IntLiteral(7))))\",\n\n \"ExpressionStatement(InfixExpression(IntLiteral(1), Plus, InfixExpression(\\\n\n IntLiteral(1), Plus, InfixExpression(IntLiteral(1), Plus, IntLiteral(1)))))\",\n\n ];\n\n assert_parse(input, &expected);\n\n\n\n assert_parse_fails(\"(1 + 1\");\n\n assert_parse_fails(\"1 + 1)\");\n\n assert_parse_fails(\")(\");\n\n}\n\n\n", "file_path": "src/parser/tests.rs", "rank": 99, "score": 72119.48910452434 } ]
Rust
language/vm/src/proptest_types/types.rs
w3f-community/sp-move-vm
1c94891be56ec67eb04a8d1bd21775219d526f48
use crate::{ file_format::{ FieldDefinition, IdentifierIndex, ModuleHandleIndex, SignatureToken, StructDefinition, StructFieldInformation, StructHandle, StructHandleIndex, TableIndex, TypeSignature, }, proptest_types::signature::{KindGen, SignatureTokenGen}, }; use proptest::{ collection::{vec, SizeRange}, option, prelude::*, sample::Index as PropIndex, std_facade::hash_set::HashSet, }; use std::{cmp::max, collections::BTreeSet}; #[derive(Debug)] struct TypeSignatureIndex(u16); #[derive(Debug)] pub struct StDefnMaterializeState { pub identifiers_len: usize, pub struct_handles: Vec<StructHandle>, pub new_handles: BTreeSet<(ModuleHandleIndex, IdentifierIndex)>, } impl StDefnMaterializeState { pub fn new(identifiers_len: usize, struct_handles: Vec<StructHandle>) -> Self { Self { identifiers_len, struct_handles, new_handles: BTreeSet::new(), } } fn add_struct_handle(&mut self, handle: StructHandle) -> Option<StructHandleIndex> { if self.new_handles.insert((handle.module, handle.name)) { self.struct_handles.push(handle); Some(StructHandleIndex((self.struct_handles.len() - 1) as u16)) } else { None } } fn contains_nominal_resource(&self, signature: &SignatureToken) -> bool { use SignatureToken::*; match signature { Signer => true, Struct(struct_handle_index) => { self.struct_handles[struct_handle_index.0 as usize].is_nominal_resource } StructInstantiation(struct_handle_index, type_args) => { self.struct_handles[struct_handle_index.0 as usize].is_nominal_resource || type_args.iter().any(|t| self.contains_nominal_resource(t)) } Vector(targ) => self.contains_nominal_resource(targ), Reference(token) | MutableReference(token) => self.contains_nominal_resource(token), Bool | U8 | U64 | U128 | Address | TypeParameter(_) => false, } } } #[derive(Clone, Debug)] pub struct StructHandleGen { module_idx: PropIndex, name_idx: PropIndex, is_nominal_resource: bool, type_parameters: Vec<KindGen>, } impl StructHandleGen { pub fn strategy(kind_count: impl Into<SizeRange>) -> impl Strategy<Value = Self> { ( any::<PropIndex>(), any::<PropIndex>(), any::<bool>(), vec(KindGen::strategy(), kind_count), ) .prop_map( |(module_idx, name_idx, is_nominal_resource, type_parameters)| Self { module_idx, name_idx, is_nominal_resource, type_parameters, }, ) } pub fn materialize(self, module_len: usize, identifiers_len: usize) -> StructHandle { let idx = max(self.module_idx.index(module_len) as TableIndex, 1); let mut type_parameters = vec![]; for type_param in self.type_parameters { type_parameters.push(type_param.materialize()); } StructHandle { module: ModuleHandleIndex(idx as TableIndex), name: IdentifierIndex(self.name_idx.index(identifiers_len) as TableIndex), is_nominal_resource: self.is_nominal_resource, type_parameters, } } } #[derive(Clone, Debug)] pub struct StructDefinitionGen { name_idx: PropIndex, is_nominal_resource: bool, type_parameters: Vec<KindGen>, is_public: bool, field_defs: Option<Vec<FieldDefinitionGen>>, } impl StructDefinitionGen { pub fn strategy( field_count: impl Into<SizeRange>, kind_count: impl Into<SizeRange>, ) -> impl Strategy<Value = Self> { ( any::<PropIndex>(), any::<bool>(), vec(KindGen::strategy(), kind_count), any::<bool>(), option::of(vec(FieldDefinitionGen::strategy(), field_count)), ) .prop_map( |(name_idx, is_nominal_resource, type_parameters, is_public, field_defs)| Self { name_idx, is_nominal_resource, type_parameters, is_public, field_defs, }, ) } pub fn materialize( self, state: &mut StDefnMaterializeState, ) -> (Option<StructDefinition>, usize) { let mut field_names = HashSet::new(); let mut fields = vec![]; match self.field_defs { None => (), Some(field_defs_gen) => { for fd_gen in field_defs_gen { let field = fd_gen.materialize(state); if field_names.insert(field.name) { fields.push(field); } } } }; let is_nominal_resource = if fields.is_empty() { self.is_nominal_resource } else { self.is_nominal_resource || fields.iter().any(|field| { let field_sig = &field.signature.0; state.contains_nominal_resource(field_sig) }) }; let handle = StructHandle { module: ModuleHandleIndex(0), name: IdentifierIndex(self.name_idx.index(state.identifiers_len) as TableIndex), is_nominal_resource, type_parameters: self .type_parameters .into_iter() .map(|kind| kind.materialize()) .collect(), }; match state.add_struct_handle(handle) { Some(struct_handle) => { if fields.is_empty() { ( Some(StructDefinition { struct_handle, field_information: StructFieldInformation::Native, }), 0, ) } else { let field_count = fields.len(); let field_information = StructFieldInformation::Declared(fields); ( Some(StructDefinition { struct_handle, field_information, }), field_count, ) } } None => (None, 0), } } } #[derive(Clone, Debug)] struct FieldDefinitionGen { name_idx: PropIndex, signature_gen: SignatureTokenGen, } impl FieldDefinitionGen { fn strategy() -> impl Strategy<Value = Self> { (any::<PropIndex>(), SignatureTokenGen::atom_strategy()).prop_map( |(name_idx, signature_gen)| Self { name_idx, signature_gen, }, ) } fn materialize(self, state: &StDefnMaterializeState) -> FieldDefinition { FieldDefinition { name: IdentifierIndex(self.name_idx.index(state.identifiers_len) as TableIndex), signature: TypeSignature(self.signature_gen.materialize(&state.struct_handles)), } } }
use crate::{ file_format::{ FieldDefinition, IdentifierIndex, ModuleHandleIndex, SignatureToken, StructDefinition, StructFieldInformation, StructHandle, StructHandleIndex, TableIndex, TypeSignature, }, proptest_types::signature::{KindGen, SignatureTokenGen}, }; use proptest::{ collection::{vec, SizeRange}, option, prelude::*, sample::Index as PropIndex, std_facade::hash_set::HashSet, }; use std::{cmp::max, collections::BTreeSet}; #[derive(Debug)] struct TypeSignatureIndex(u16); #[derive(Debug)] pub struct StDefnMaterializeState { pub identifiers_len: usize, pub struct_handles: Vec<StructHandle>, pub new_handles: BTreeSet<(ModuleHandleIndex, IdentifierIndex)>, } impl StDefnMaterializeState { pub fn new(identifiers_len: usize, struct_handles: Vec<StructHandle>) -> Self { Self { identifiers_len, struct_handles, new_handles: BTreeSet::new(), } } fn add_struct_handle(&mut self, handle: StructHandle) -> Option<StructHandleIndex> { if self.new_handles.insert((handle.module, handle.name)) { self.struct_handles.push(handle); Some(StructHandleIndex((self.struct_handles.len() - 1) as u16)) } else { None } } fn contains_nominal_resource(&self, signature: &SignatureToken) -> bool { use SignatureToken::*; match signature { Signer => true, Struct(struct_handle_index) => { self.struct_handles[struct_handle_index.0 as usize].is_nominal_resource } StructInstantiation(struct_handle_index, type_args) => { self.struct_handles[struct_handle_index.0 as usize].is_nominal_resource || type_args.iter().any(|t| self.contains_nominal_resource(t)) }
inal_resource, type_parameters, is_public, field_defs, }, ) } pub fn materialize( self, state: &mut StDefnMaterializeState, ) -> (Option<StructDefinition>, usize) { let mut field_names = HashSet::new(); let mut fields = vec![]; match self.field_defs { None => (), Some(field_defs_gen) => { for fd_gen in field_defs_gen { let field = fd_gen.materialize(state); if field_names.insert(field.name) { fields.push(field); } } } }; let is_nominal_resource = if fields.is_empty() { self.is_nominal_resource } else { self.is_nominal_resource || fields.iter().any(|field| { let field_sig = &field.signature.0; state.contains_nominal_resource(field_sig) }) }; let handle = StructHandle { module: ModuleHandleIndex(0), name: IdentifierIndex(self.name_idx.index(state.identifiers_len) as TableIndex), is_nominal_resource, type_parameters: self .type_parameters .into_iter() .map(|kind| kind.materialize()) .collect(), }; match state.add_struct_handle(handle) { Some(struct_handle) => { if fields.is_empty() { ( Some(StructDefinition { struct_handle, field_information: StructFieldInformation::Native, }), 0, ) } else { let field_count = fields.len(); let field_information = StructFieldInformation::Declared(fields); ( Some(StructDefinition { struct_handle, field_information, }), field_count, ) } } None => (None, 0), } } } #[derive(Clone, Debug)] struct FieldDefinitionGen { name_idx: PropIndex, signature_gen: SignatureTokenGen, } impl FieldDefinitionGen { fn strategy() -> impl Strategy<Value = Self> { (any::<PropIndex>(), SignatureTokenGen::atom_strategy()).prop_map( |(name_idx, signature_gen)| Self { name_idx, signature_gen, }, ) } fn materialize(self, state: &StDefnMaterializeState) -> FieldDefinition { FieldDefinition { name: IdentifierIndex(self.name_idx.index(state.identifiers_len) as TableIndex), signature: TypeSignature(self.signature_gen.materialize(&state.struct_handles)), } } }
Vector(targ) => self.contains_nominal_resource(targ), Reference(token) | MutableReference(token) => self.contains_nominal_resource(token), Bool | U8 | U64 | U128 | Address | TypeParameter(_) => false, } } } #[derive(Clone, Debug)] pub struct StructHandleGen { module_idx: PropIndex, name_idx: PropIndex, is_nominal_resource: bool, type_parameters: Vec<KindGen>, } impl StructHandleGen { pub fn strategy(kind_count: impl Into<SizeRange>) -> impl Strategy<Value = Self> { ( any::<PropIndex>(), any::<PropIndex>(), any::<bool>(), vec(KindGen::strategy(), kind_count), ) .prop_map( |(module_idx, name_idx, is_nominal_resource, type_parameters)| Self { module_idx, name_idx, is_nominal_resource, type_parameters, }, ) } pub fn materialize(self, module_len: usize, identifiers_len: usize) -> StructHandle { let idx = max(self.module_idx.index(module_len) as TableIndex, 1); let mut type_parameters = vec![]; for type_param in self.type_parameters { type_parameters.push(type_param.materialize()); } StructHandle { module: ModuleHandleIndex(idx as TableIndex), name: IdentifierIndex(self.name_idx.index(identifiers_len) as TableIndex), is_nominal_resource: self.is_nominal_resource, type_parameters, } } } #[derive(Clone, Debug)] pub struct StructDefinitionGen { name_idx: PropIndex, is_nominal_resource: bool, type_parameters: Vec<KindGen>, is_public: bool, field_defs: Option<Vec<FieldDefinitionGen>>, } impl StructDefinitionGen { pub fn strategy( field_count: impl Into<SizeRange>, kind_count: impl Into<SizeRange>, ) -> impl Strategy<Value = Self> { ( any::<PropIndex>(), any::<bool>(), vec(KindGen::strategy(), kind_count), any::<bool>(), option::of(vec(FieldDefinitionGen::strategy(), field_count)), ) .prop_map( |(name_idx, is_nominal_resource, type_parameters, is_public, field_defs)| Self { name_idx, is_nom
random
[ { "content": "fn struct_handle(token: &SignatureToken) -> Option<StructHandleIndex> {\n\n use SignatureToken::*;\n\n\n\n match token {\n\n Struct(sh_idx) => Some(*sh_idx),\n\n StructInstantiation(sh_idx, _) => Some(*sh_idx),\n\n Reference(token) | MutableReference(token) => struct_handle(token),\n\n Bool | U8 | U64 | U128 | Address | Signer | Vector(_) | TypeParameter(_) => None,\n\n }\n\n}\n", "file_path": "language/bytecode-verifier/invalid-mutations/src/bounds.rs", "rank": 0, "score": 363741.79941367434 }, { "content": "#[inline]\n\npub fn pick_slice_idxs(max: usize, indexes: &[impl AsRef<PropIndex>]) -> Vec<usize> {\n\n pick_idxs(max, indexes, indexes.len())\n\n}\n\n\n\n/// Wrapper for `proptest`'s [`Index`][proptest::sample::Index] that allows `AsRef` to work.\n\n///\n\n/// There is no blanket `impl<T> AsRef<T> for T`, so `&[PropIndex]` doesn't work with\n\n/// `&[impl AsRef<PropIndex>]` (unless an impl gets added upstream). `Index` does.\n\n#[derive(Arbitrary, Clone, Copy, Debug)]\n\npub struct Index(PropIndex);\n\n\n\nimpl AsRef<PropIndex> for Index {\n\n fn as_ref(&self) -> &PropIndex {\n\n &self.0\n\n }\n\n}\n\n\n\nimpl Deref for Index {\n\n type Target = PropIndex;\n\n\n\n fn deref(&self) -> &PropIndex {\n\n &self.0\n\n }\n\n}\n", "file_path": "common/proptest-helpers/src/lib.rs", "rank": 1, "score": 315402.2574815988 }, { "content": "fn materialize_type(struct_handle: StructHandleIndex, type_args: &Signature) -> SignatureToken {\n\n if type_args.is_empty() {\n\n ST::Struct(struct_handle)\n\n } else {\n\n ST::StructInstantiation(struct_handle, type_args.0.clone())\n\n }\n\n}\n\n\n", "file_path": "language/bytecode-verifier/src/type_safety.rs", "rank": 2, "score": 290310.63687553874 }, { "content": "pub fn impl_enum_signature(\n\n name: &Ident,\n\n public_key_type: syn::LitStr,\n\n private_key_type: syn::LitStr,\n\n variants: &DataEnum,\n\n) -> TokenStream {\n\n let priv_kt: syn::Type = private_key_type.parse().unwrap();\n\n let pub_kt: syn::Type = public_key_type.parse().unwrap();\n\n let mut res = impl_enum_tryfrom(name, variants);\n\n let to_bytes_arms = match_enum_to_bytes(name, variants);\n\n\n\n let mut match_arms = quote! {};\n\n for variant in variants.variants.iter() {\n\n let variant_ident = &variant.ident;\n\n\n\n match_arms.extend(quote! {\n\n (#name::#variant_ident(sig), #pub_kt::#variant_ident(pk)) => {\n\n sig.verify_arbitrary_msg(message, pk)\n\n }\n\n })\n", "file_path": "crypto/crypto-derive/src/unions.rs", "rank": 4, "score": 260054.75681773666 }, { "content": "pub fn is_human_readable() -> bool {\n\n let mut output = Vec::new();\n\n let serializer = Serializer::new(&mut output, crate::MAX_CONTAINER_DEPTH);\n\n ser::Serializer::is_human_readable(&serializer)\n\n}\n\n\n", "file_path": "bcs/src/ser.rs", "rank": 5, "score": 252257.42613271164 }, { "content": "fn ty_to_sig(ty: &MoveTypeLayout) -> Option<SignatureToken> {\n\n match ty {\n\n MoveTypeLayout::Address => Some(SignatureToken::Address),\n\n MoveTypeLayout::Signer => Some(SignatureToken::Signer),\n\n MoveTypeLayout::U8 => Some(SignatureToken::U8),\n\n MoveTypeLayout::U64 => Some(SignatureToken::U64),\n\n MoveTypeLayout::U128 => Some(SignatureToken::U128),\n\n MoveTypeLayout::Vector(v) => Some(SignatureToken::Vector(Box::new(ty_to_sig(v.as_ref())?))),\n\n MoveTypeLayout::Struct(_) => None,\n\n MoveTypeLayout::Bool => Some(SignatureToken::Bool),\n\n }\n\n}\n\n\n\nimpl Constant {\n\n pub fn serialize_constant(ty: &MoveTypeLayout, v: &MoveValue) -> Option<Self> {\n\n Some(Self {\n\n type_: ty_to_sig(ty)?,\n\n data: v.simple_serialize()?,\n\n })\n\n }\n\n\n\n pub fn deserialize_constant(&self) -> Option<MoveValue> {\n\n let ty = sig_to_ty(&self.type_)?;\n\n MoveValue::simple_deserialize(&self.data, &ty).ok()\n\n }\n\n}\n", "file_path": "language/vm/src/constant.rs", "rank": 6, "score": 226826.7790555035 }, { "content": "fn sig_to_ty(sig: &SignatureToken) -> Option<MoveTypeLayout> {\n\n match sig {\n\n SignatureToken::Signer => Some(MoveTypeLayout::Signer),\n\n SignatureToken::Address => Some(MoveTypeLayout::Address),\n\n SignatureToken::Bool => Some(MoveTypeLayout::Bool),\n\n SignatureToken::U8 => Some(MoveTypeLayout::U8),\n\n SignatureToken::U64 => Some(MoveTypeLayout::U64),\n\n SignatureToken::U128 => Some(MoveTypeLayout::U128),\n\n SignatureToken::Vector(v) => Some(MoveTypeLayout::Vector(Box::new(sig_to_ty(v.as_ref())?))),\n\n SignatureToken::Reference(_)\n\n | SignatureToken::MutableReference(_)\n\n | SignatureToken::Struct(_)\n\n | SignatureToken::TypeParameter(_)\n\n | SignatureToken::StructInstantiation(_, _) => None,\n\n }\n\n}\n\n\n", "file_path": "language/vm/src/constant.rs", "rank": 7, "score": 226826.7790555035 }, { "content": "/// Given a maximum value `max` and a list of [`Index`](proptest::sample::Index) instances, picks\n\n/// integers in the range `[0, max)` uniformly randomly and without duplication.\n\n///\n\n/// If `indexes_len` is greater than `max`, all indexes will be returned.\n\n///\n\n/// This function implements [Robert Floyd's F2\n\n/// algorithm](https://blog.acolyer.org/2018/01/30/a-sample-of-brilliance/) for sampling without\n\n/// replacement.\n\npub fn pick_idxs<T, P>(max: usize, indexes: &T, indexes_len: usize) -> Vec<usize>\n\nwhere\n\n T: OpsIndex<usize, Output = P> + ?Sized,\n\n P: AsRef<PropIndex>,\n\n{\n\n // See https://blog.acolyer.org/2018/01/30/a-sample-of-brilliance/ (the F2 algorithm)\n\n // for a longer explanation. This is a variant that works with zero-indexing.\n\n let mut selected = BTreeSet::new();\n\n let to_select = indexes_len.min(max);\n\n for (iter_idx, choice) in ((max - to_select)..max).enumerate() {\n\n // \"RandInt(1, J)\" in the original algorithm means a number between 1\n\n // and choice, both inclusive. `PropIndex::index` picks a number between 0 and\n\n // whatever's passed in, with the latter exclusive. Pass in \"+1\" to ensure the same\n\n // range of values is picked from. (This also ensures that if choice is 0 then `index`\n\n // doesn't panic.\n\n let idx = indexes[iter_idx].as_ref().index(choice + 1);\n\n if !selected.insert(idx) {\n\n selected.insert(choice);\n\n }\n\n }\n", "file_path": "common/proptest-helpers/src/lib.rs", "rank": 8, "score": 221442.1191315905 }, { "content": "pub fn native_ed25519_signature_verification(\n\n context: &impl NativeContext,\n\n _ty_args: Vec<Type>,\n\n mut arguments: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(_ty_args.is_empty());\n\n debug_assert!(arguments.len() == 3);\n\n\n\n let msg = pop_arg!(arguments, Vec<u8>);\n\n let pubkey = pop_arg!(arguments, Vec<u8>);\n\n let signature = pop_arg!(arguments, Vec<u8>);\n\n\n\n let cost = native_gas(\n\n context.cost_table(),\n\n NativeCostIndex::ED25519_VERIFY,\n\n msg.len(),\n\n );\n\n\n\n let sig = match ed25519::Ed25519Signature::try_from(signature.as_slice()) {\n\n Ok(sig) => sig,\n", "file_path": "language/move-vm/natives/src/signature.rs", "rank": 9, "score": 219531.95092085947 }, { "content": "/// Compute the Serde name of a container.\n\npub fn trace_name<'de, T>() -> Option<&'static str>\n\nwhere\n\n T: serde::de::Deserialize<'de>,\n\n{\n\n match T::deserialize(SerdeName) {\n\n Err(SerdeNameError(name)) => name,\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n", "file_path": "crypto/crypto/src/serde_name.rs", "rank": 10, "score": 216112.1266694632 }, { "content": "pub fn unwrap_u256(u256: Struct) -> PartialVMResult<U256> {\n\n u256.unpack()?\n\n .next()\n\n .ok_or_else(|| {\n\n PartialVMError::new(StatusCode::TYPE_MISMATCH)\n\n .with_sub_status(0)\n\n .with_message(\"Expected U256 struct.\".to_owned())\n\n })\n\n .and_then(|field| field.value_as::<Vec<u8>>())\n\n .and_then(|value| {\n\n if value.len() != 32 {\n\n Err(PartialVMError::new(StatusCode::TYPE_MISMATCH)\n\n .with_sub_status(1)\n\n .with_message(\"Expected vector with length of 32.\".to_owned()))\n\n } else {\n\n Ok(U256::from_little_endian(&value))\n\n }\n\n })\n\n}\n\n\n", "file_path": "language/move-vm/natives/src/u256.rs", "rank": 11, "score": 215582.60955011367 }, { "content": "pub fn impl_enum_verifyingkey(\n\n name: &Ident,\n\n private_key_type: syn::LitStr,\n\n signature_type: syn::LitStr,\n\n _variants: &DataEnum,\n\n) -> TokenStream {\n\n let pkt: syn::Type = private_key_type.parse().unwrap();\n\n let st: syn::Type = signature_type.parse().unwrap();\n\n let res = quote! {\n\n impl diem_crypto::VerifyingKey for #name {\n\n type SigningKeyMaterial = #pkt;\n\n type SignatureMaterial = #st;\n\n }\n\n impl diem_crypto::private::Sealed for #name {}\n\n };\n\n res.into()\n\n}\n\n\n", "file_path": "crypto/crypto-derive/src/unions.rs", "rank": 12, "score": 212478.37915642612 }, { "content": "pub fn impl_enum_signingkey(\n\n name: &Ident,\n\n public_key_type: syn::LitStr,\n\n signature_type: syn::LitStr,\n\n variants: &DataEnum,\n\n) -> TokenStream {\n\n let pkt: syn::Type = public_key_type.parse().unwrap();\n\n let st: syn::Type = signature_type.parse().unwrap();\n\n\n\n let mut match_arms_arbitrary = quote! {};\n\n let mut match_struct_arms = quote! {};\n\n for variant in variants.variants.iter() {\n\n let variant_ident = &variant.ident;\n\n\n\n match_struct_arms.extend(quote! {\n\n #name::#variant_ident(key) => Self::SignatureMaterial::#variant_ident(key.sign(message)),\n\n });\n\n match_arms_arbitrary.extend(quote! {\n\n #name::#variant_ident(key) => Self::SignatureMaterial::#variant_ident(key.sign_arbitrary_message(message)),\n\n });\n", "file_path": "crypto/crypto-derive/src/unions.rs", "rank": 13, "score": 212478.37915642612 }, { "content": "pub fn impl_enum_publickey(\n\n name: &Ident,\n\n private_key_type: syn::LitStr,\n\n variants: &DataEnum,\n\n) -> TokenStream {\n\n let pkt: syn::Type = private_key_type.parse().unwrap();\n\n let mut from_match_arms = quote! {};\n\n for variant in variants.variants.iter() {\n\n let variant_ident = &variant.ident;\n\n\n\n from_match_arms.extend(quote! {\n\n #pkt::#variant_ident(key) => #name::#variant_ident(key.into()),\n\n });\n\n }\n\n let mut res = quote! {\n\n impl From<&#pkt> for #name {\n\n fn from(public_key: &#pkt) -> Self {\n\n match public_key {\n\n #from_match_arms\n\n }\n", "file_path": "crypto/crypto-derive/src/unions.rs", "rank": 14, "score": 212478.37915642612 }, { "content": "pub fn impl_enum_privatekey(\n\n name: &Ident,\n\n public_key_type: syn::LitStr,\n\n _variants: &DataEnum,\n\n) -> TokenStream {\n\n let pkt: syn::Type = public_key_type.parse().unwrap();\n\n let res = quote! {\n\n impl diem_crypto::PrivateKey for #name {\n\n type PublicKeyMaterial = #pkt;\n\n }\n\n };\n\n res.into()\n\n}\n\n\n", "file_path": "crypto/crypto-derive/src/unions.rs", "rank": 15, "score": 212478.37915642612 }, { "content": "pub fn native_destroy_signer(\n\n context: &mut impl NativeContext,\n\n ty_args: Vec<Type>,\n\n arguments: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(ty_args.is_empty());\n\n debug_assert!(arguments.len() == 1);\n\n\n\n let cost = native_gas(context.cost_table(), NativeCostIndex::DESTROY_SIGNER, 0);\n\n Ok(NativeResult::ok(cost, vec![]))\n\n}\n\n\n", "file_path": "language/move-vm/natives/src/account.rs", "rank": 16, "score": 210333.46459578993 }, { "content": "pub fn native_create_signer(\n\n context: &mut impl NativeContext,\n\n ty_args: Vec<Type>,\n\n mut arguments: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(ty_args.is_empty());\n\n debug_assert!(arguments.len() == 1);\n\n\n\n let address = pop_arg!(arguments, AccountAddress);\n\n let cost = native_gas(context.cost_table(), NativeCostIndex::CREATE_SIGNER, 0);\n\n Ok(NativeResult::ok(cost, vec![Value::signer(address)]))\n\n}\n\n\n", "file_path": "language/move-vm/natives/src/account.rs", "rank": 17, "score": 210333.46459578993 }, { "content": "pub fn native_borrow_address(\n\n context: &impl NativeContext,\n\n _ty_args: Vec<Type>,\n\n mut arguments: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(_ty_args.is_empty());\n\n debug_assert!(arguments.len() == 1);\n\n\n\n let signer_reference = pop_arg!(arguments, SignerRef);\n\n let cost = native_gas(context.cost_table(), NativeCostIndex::SIGNER_BORROW, 1);\n\n\n\n Ok(NativeResult::ok(\n\n cost,\n\n vec![signer_reference.borrow_signer()?],\n\n ))\n\n}\n", "file_path": "language/move-vm/natives/src/signer.rs", "rank": 18, "score": 210333.46459578993 }, { "content": "pub fn signer_module() -> ModuleTx {\n\n ModuleTx::new(\n\n include_bytes!(\"../assets/target/modules/Signer.mv\").to_vec(),\n\n CORE_CODE_ADDRESS,\n\n )\n\n}\n\n\n", "file_path": "mvm/tests/common/assets.rs", "rank": 19, "score": 208511.80962003506 }, { "content": "pub fn native_ed25519_publickey_validation(\n\n context: &impl NativeContext,\n\n _ty_args: Vec<Type>,\n\n mut arguments: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(_ty_args.is_empty());\n\n debug_assert!(arguments.len() == 1);\n\n\n\n let key_bytes = pop_arg!(arguments, Vec<u8>);\n\n\n\n let cost = native_gas(\n\n context.cost_table(),\n\n NativeCostIndex::ED25519_VALIDATE_KEY,\n\n key_bytes.len(),\n\n );\n\n\n\n // This deserialization performs point-on-curve and small subgroup checks\n\n let valid = ed25519::Ed25519PublicKey::try_from(&key_bytes[..]).is_ok();\n\n let return_values = vec![Value::bool(valid)];\n\n Ok(NativeResult::ok(cost, return_values))\n\n}\n\n\n", "file_path": "language/move-vm/natives/src/signature.rs", "rank": 20, "score": 207681.2115982359 }, { "content": "fn serialize_struct_handle_index(binary: &mut BinaryData, idx: &StructHandleIndex) -> Result<()> {\n\n write_as_uleb128(binary, idx.0, STRUCT_HANDLE_INDEX_MAX)\n\n}\n\n\n", "file_path": "language/vm/src/serializer.rs", "rank": 21, "score": 203434.99510698178 }, { "content": "fn load_struct_handle_index(cursor: &mut Cursor<&[u8]>) -> BinaryLoaderResult<StructHandleIndex> {\n\n Ok(StructHandleIndex(read_uleb_internal(\n\n cursor,\n\n STRUCT_HANDLE_INDEX_MAX,\n\n )?))\n\n}\n\n\n", "file_path": "language/vm/src/deserializer.rs", "rank": 22, "score": 201658.87998020003 }, { "content": "// TODO: \"<SELF>\" only passes the validator for identifiers because it is special cased. Whenever\n\n// \"<SELF>\" is removed, so should the special case in identifier.rs.\n\npub fn self_module_name() -> &'static IdentStr {\n\n IdentStr::ref_cast(\"<SELF>\")\n\n}\n\n\n\n/// Index 0 into the LocalsSignaturePool, which is guaranteed to be an empty list.\n\n/// Used to represent function/struct instantiation with no type arguments -- effectively\n\n/// non-generic functions and structs.\n\npub const NO_TYPE_ARGUMENTS: SignatureIndex = SignatureIndex(0);\n\n\n\n// HANDLES:\n\n// Handles are structs that accompany opcodes that need references: a type reference,\n\n// or a function reference (a field reference being available only within the module that\n\n// defines the field can be a definition).\n\n// Handles refer to both internal and external \"entities\" and are embedded as indexes\n\n// in the instruction stream.\n\n// Handles define resolution. Resolution is assumed to be by (name, signature)\n\n\n\n/// A `ModuleHandle` is a reference to a MOVE module. It is composed by an `address` and a `name`.\n\n///\n\n/// A `ModuleHandle` uniquely identifies a code resource in the blockchain.\n", "file_path": "language/vm/src/file_format.rs", "rank": 23, "score": 197148.38566642013 }, { "content": "/// Creates a new thread with a larger stack size.\n\n///\n\n/// Generating some proptest values can overflow the stack. This allows test authors to work around\n\n/// this limitation.\n\n///\n\n/// This is expected to be used with closure-style proptest invocations:\n\n///\n\n/// ```\n\n/// use proptest::prelude::*;\n\n/// use diem_proptest_helpers::with_stack_size;\n\n///\n\n/// with_stack_size(4 * 1024 * 1024, || proptest!(|(x in 0usize..128)| {\n\n/// // assertions go here\n\n/// prop_assert!(x >= 0 && x < 128);\n\n/// }));\n\n/// ```\n\npub fn with_stack_size<'a, F, T>(size: usize, f: F) -> Result<T, Box<dyn Any + 'static + Send>>\n\nwhere\n\n F: FnOnce() -> T + Send + 'a,\n\n T: Send + 'a,\n\n{\n\n thread::scope(|s| {\n\n let handle = s.builder().stack_size(size).spawn(|_| f()).map_err(|err| {\n\n let any: Box<dyn Any + 'static + Send> = Box::new(err);\n\n any\n\n })?;\n\n handle.join()\n\n })?\n\n}\n\n\n", "file_path": "common/proptest-helpers/src/lib.rs", "rank": 24, "score": 196353.15764656605 }, { "content": "// Return the length of the quoted string, or None if there is no closing quote.\n\nfn get_string_len(text: &str) -> Option<usize> {\n\n let mut pos = 0;\n\n let mut iter = text.chars();\n\n while let Some(chr) = iter.next() {\n\n if chr == '\\\\' {\n\n // Skip over the escaped character (e.g., a quote or another backslash)\n\n if iter.next().is_some() {\n\n pos += 1;\n\n }\n\n } else if chr == '\"' {\n\n return Some(pos);\n\n }\n\n pos += 1;\n\n }\n\n None\n\n}\n\n\n", "file_path": "language/move-lang/src/parser/lexer.rs", "rank": 25, "score": 192843.22121416754 }, { "content": "pub fn leq<Lbl: Eq>(lhs: &PathSlice<Lbl>, rhs: &PathSlice<Lbl>) -> bool {\n\n lhs.len() <= rhs.len() && lhs.iter().zip(rhs).all(|(l, r)| l == r)\n\n}\n\n\n", "file_path": "language/borrow-graph/src/paths.rs", "rank": 26, "score": 191659.75576960034 }, { "content": "/// This function checks the extra requirements on the signature of the main function of a script.\n\npub fn verify_main_signature(script: &CompiledScript) -> VMResult<()> {\n\n verify_main_signature_impl(script).map_err(|e| e.finish(Location::Script))\n\n}\n\n\n", "file_path": "language/bytecode-verifier/src/verifier.rs", "rank": 27, "score": 188943.61010181325 }, { "content": "#[proc_macro_derive(Signature, attributes(PublicKeyType, PrivateKeyType))]\n\npub fn derive_enum_signature(input: TokenStream) -> TokenStream {\n\n let ast = parse_macro_input!(input as DeriveInput);\n\n\n\n let name = &ast.ident;\n\n let public_key_type = get_type_from_attrs(&ast.attrs, \"PublicKeyType\").unwrap();\n\n let private_key_type = get_type_from_attrs(&ast.attrs, \"PrivateKeyType\").unwrap();\n\n match ast.data {\n\n Data::Enum(ref variants) => {\n\n impl_enum_signature(name, public_key_type, private_key_type, variants)\n\n }\n\n Data::Struct(_) | Data::Union(_) => {\n\n panic!(\"#[derive(PrivateKey)] is only defined for enums\")\n\n }\n\n }\n\n}\n\n\n\n// There is a unit test for this logic in the crypto crate, at\n\n// diem_crypto::unit_tests::cryptohasher — you may have to modify it if you\n\n// edit the below.\n", "file_path": "crypto/crypto-derive/src/lib.rs", "rank": 28, "score": 188943.473797806 }, { "content": "#[derive(Debug, Default)]\n\nstruct SignatureState {\n\n signatures: Vec<Signature>,\n\n signature_map: HashMap<Signature, SignatureIndex>,\n\n}\n\n\n\nimpl SignatureState {\n\n fn new(signatures: Vec<Signature>) -> Self {\n\n let mut state = Self::default();\n\n for sig in signatures {\n\n state.add_signature(sig);\n\n }\n\n state\n\n }\n\n\n\n fn signatures(self) -> Vec<Signature> {\n\n self.signatures\n\n }\n\n\n\n fn add_signature(&mut self, sig: Signature) -> SignatureIndex {\n\n precondition!(self.signatures.len() < TableSize::max_value() as usize);\n\n if let Some(idx) = self.signature_map.get(&sig) {\n\n return *idx;\n\n }\n\n let idx = SignatureIndex(self.signatures.len() as u16);\n\n self.signatures.push(sig.clone());\n\n self.signature_map.insert(sig, idx);\n\n idx\n\n }\n\n}\n\n\n", "file_path": "language/vm/src/proptest_types/functions.rs", "rank": 29, "score": 188438.31314541382 }, { "content": "fn basic_ops_impl(repeat_vec: impl RepeatVecMethods<&'static str>) {\n\n let mut repeat_vec = repeat_vec;\n\n\n\n repeat_vec.extend(\"foo\", 3);\n\n repeat_vec.extend(\"bar\", 4);\n\n repeat_vec.extend(\"baz\", 0);\n\n assert_eq!(repeat_vec.len(), 7);\n\n\n\n // Basic queries work.\n\n assert_eq!(repeat_vec.get(0), Some((&\"foo\", 0)));\n\n assert_eq!(repeat_vec.get(1), Some((&\"foo\", 1)));\n\n assert_eq!(repeat_vec.get(2), Some((&\"foo\", 2)));\n\n assert_eq!(repeat_vec.get(3), Some((&\"bar\", 0)));\n\n assert_eq!(repeat_vec.get(4), Some((&\"bar\", 1)));\n\n assert_eq!(repeat_vec.get(5), Some((&\"bar\", 2)));\n\n assert_eq!(repeat_vec.get(6), Some((&\"bar\", 3)));\n\n assert_eq!(repeat_vec.get(7), None);\n\n\n\n // Removing an element shifts all further elements to the left.\n\n repeat_vec.remove(1);\n", "file_path": "common/proptest-helpers/src/unit_tests/repeat_vec_tests.rs", "rank": 30, "score": 187730.6307116386 }, { "content": "/// Serializes a `StructHandle`.\n\n///\n\n/// A `StructHandle` gets serialized as follows:\n\n/// - `StructHandle.module` as a ULEB128 (index into the `ModuleHandle` table)\n\n/// - `StructHandle.name` as a ULEB128 (index into the `IdentifierPool`)\n\n/// - `StructHandle.is_nominal_resource` as a 1 byte boolean (0 for false, 1 for true)\n\nfn serialize_struct_handle(binary: &mut BinaryData, struct_handle: &StructHandle) -> Result<()> {\n\n serialize_module_handle_index(binary, &struct_handle.module)?;\n\n serialize_identifier_index(binary, &struct_handle.name)?;\n\n serialize_nominal_resource_flag(binary, struct_handle.is_nominal_resource)?;\n\n serialize_kinds(binary, &struct_handle.type_parameters)\n\n}\n\n\n", "file_path": "language/vm/src/serializer.rs", "rank": 31, "score": 186560.20005589793 }, { "content": "#[derive(Debug, Default)]\n\n#[allow(unused)]\n\nstruct FieldHandleState {\n\n field_handles: Vec<FieldHandle>,\n\n field_map: HashMap<FieldHandle, FieldHandleIndex>,\n\n}\n\n\n\nimpl FieldHandleState {\n\n #[allow(unused)]\n\n pub fn field_handles(self) -> Vec<FieldHandle> {\n\n self.field_handles\n\n }\n\n\n\n #[allow(unused)]\n\n fn add_field_handle(&mut self, fh: FieldHandle) -> FieldHandleIndex {\n\n precondition!(self.field_handles.len() < TableSize::max_value() as usize);\n\n if let Some(idx) = self.field_map.get(&fh) {\n\n return *idx;\n\n }\n\n let idx = FieldHandleIndex(self.field_handles.len() as u16);\n\n self.field_handles.push(fh.clone());\n\n self.field_map.insert(fh, idx);\n\n idx\n\n }\n\n}\n\n\n", "file_path": "language/vm/src/proptest_types/functions.rs", "rank": 32, "score": 185862.45042009646 }, { "content": "fn verify_constant_type(idx: usize, type_: &SignatureToken) -> PartialVMResult<()> {\n\n if type_.is_valid_for_constant() {\n\n Ok(())\n\n } else {\n\n Err(verification_error(\n\n StatusCode::INVALID_CONSTANT_TYPE,\n\n IndexKind::ConstantPool,\n\n idx as TableIndex,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "language/bytecode-verifier/src/constants.rs", "rank": 33, "score": 183016.55923342094 }, { "content": "pub fn make_loc(file: &'static str, start: usize, end: usize) -> Loc {\n\n Loc::new(file, Span::new(start as u32, end as u32))\n\n}\n\n\n", "file_path": "language/move-lang/src/parser/syntax.rs", "rank": 34, "score": 182535.663041941 }, { "content": "/// Builds the `StructHandle` table.\n\nfn load_struct_handles(\n\n binary: &[u8],\n\n table: &Table,\n\n struct_handles: &mut Vec<StructHandle>,\n\n) -> BinaryLoaderResult<()> {\n\n let start = table.offset as usize;\n\n let end = start + table.count as usize;\n\n let mut cursor = Cursor::new(&binary[start..end]);\n\n while cursor.position() < table.count as u64 {\n\n let module = load_module_handle_index(&mut cursor)?;\n\n let name = load_identifier_index(&mut cursor)?;\n\n let is_nominal_resource = load_nominal_resource_flag(&mut cursor)?;\n\n let type_parameters = load_kinds(&mut cursor)?;\n\n struct_handles.push(StructHandle {\n\n module,\n\n name,\n\n is_nominal_resource,\n\n type_parameters,\n\n });\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "language/vm/src/deserializer.rs", "rank": 35, "score": 181459.2456153806 }, { "content": "pub fn account_address(value: &ValueImpl) -> PartialVMResult<AccountAddress> {\n\n fn find_address(container: &Container) -> PartialVMResult<AccountAddress> {\n\n match container {\n\n Container::Locals(values)\n\n | Container::VecR(values)\n\n | Container::VecC(values)\n\n | Container::StructR(values)\n\n | Container::StructC(values) => {\n\n let values = values.borrow();\n\n if values.len() != 1 {\n\n Err(PartialVMError::new(StatusCode::INTERNAL_TYPE_ERROR)\n\n .with_message(\"Invalid signer value.\".to_owned()))\n\n } else {\n\n account_address(&values[0])\n\n }\n\n }\n\n Container::VecAddress(_)\n\n | Container::VecU8(_)\n\n | Container::VecU64(_)\n\n | Container::VecU128(_)\n", "file_path": "language/move-vm/natives/src/types.rs", "rank": 36, "score": 179298.87820273708 }, { "content": "fn mutate_field(token: &SignatureToken) -> SignatureToken {\n\n SignatureToken::Reference(Box::new(token.clone()))\n\n}\n", "file_path": "language/bytecode-verifier/invalid-mutations/src/signature.rs", "rank": 37, "score": 179085.3692043576 }, { "content": "#[derive(Clone, Debug, Eq, Hash, PartialEq)]\n\nstruct Counter(usize);\n\n\n\nimpl Counter {\n\n fn next() -> Self {\n\n static COUNTER_NEXT: AtomicUsize = AtomicUsize::new(0);\n\n\n\n Counter(COUNTER_NEXT.fetch_add(1, Ordering::AcqRel))\n\n }\n\n\n\n fn strategy() -> impl Strategy<Value = Self> {\n\n // Note that this isn't Just(Self::next()) because that will keep generating a\n\n // single value over and over again.\n\n Self::next as fn() -> Self\n\n }\n\n}\n\n\n\n/// An operation on a RepeatVec.\n", "file_path": "common/proptest-helpers/src/unit_tests/repeat_vec_tests.rs", "rank": 38, "score": 177862.70136676807 }, { "content": "fn repeat_vec_proptest_impl(\n\n item_sizes: Vec<(Counter, usize)>,\n\n ops: Vec<RepeatVecOp>,\n\n) -> TestCaseResult {\n\n let mut test_vec = RepeatVec::new();\n\n let mut naive_vec = NaiveRepeatVec::new();\n\n\n\n for (item, size) in item_sizes {\n\n test_vec.extend(item.clone(), size);\n\n naive_vec.extend(item, size);\n\n }\n\n\n\n prop_assert_eq!(test_vec.len(), naive_vec.len());\n\n\n\n fn scaled_index(index: PropIndex, len: usize) -> usize {\n\n // Go roughly 10% beyond the end of the list to also check negative cases.\n\n let scaled_len = len + (len / 10);\n\n if scaled_len == 0 {\n\n // The vector is empty -- return 0, which is beyond the end of the vector\n\n // (but that's fine).\n", "file_path": "common/proptest-helpers/src/unit_tests/repeat_vec_tests.rs", "rank": 39, "score": 177855.3413084286 }, { "content": "// TODO rework parsing modifiers\n\nfn is_struct_definition<'input>(tokens: &mut Lexer<'input>) -> Result<bool, Error> {\n\n let mut t = tokens.peek();\n\n if t == Tok::Native {\n\n t = tokens.lookahead()?;\n\n }\n\n Ok(t == Tok::Struct || t == Tok::Resource)\n\n}\n\n\n", "file_path": "language/move-lang/src/parser/syntax.rs", "rank": 40, "score": 175174.18368398692 }, { "content": "pub fn impl_enum_valid_crypto_material(name: &Ident, variants: &DataEnum) -> TokenStream {\n\n let mut try_from = impl_enum_tryfrom(name, variants);\n\n\n\n let to_bytes_arms = match_enum_to_bytes(name, variants);\n\n\n\n try_from.extend(quote! {\n\n\n\n impl diem_crypto::ValidCryptoMaterial for #name {\n\n fn to_bytes(&self) -> Vec<u8> {\n\n match self {\n\n #to_bytes_arms\n\n }\n\n }\n\n }\n\n });\n\n try_from.into()\n\n}\n\n\n", "file_path": "crypto/crypto-derive/src/unions.rs", "rank": 41, "score": 174537.99267647252 }, { "content": "fn serialize_signature_token_single_node_impl(\n\n binary: &mut BinaryData,\n\n token: &SignatureToken,\n\n) -> Result<()> {\n\n match token {\n\n SignatureToken::Bool => binary.push(SerializedType::BOOL as u8)?,\n\n SignatureToken::U8 => binary.push(SerializedType::U8 as u8)?,\n\n SignatureToken::U64 => binary.push(SerializedType::U64 as u8)?,\n\n SignatureToken::U128 => binary.push(SerializedType::U128 as u8)?,\n\n SignatureToken::Address => binary.push(SerializedType::ADDRESS as u8)?,\n\n SignatureToken::Signer => binary.push(SerializedType::SIGNER as u8)?,\n\n SignatureToken::Vector(_) => {\n\n binary.push(SerializedType::VECTOR as u8)?;\n\n }\n\n SignatureToken::Struct(idx) => {\n\n binary.push(SerializedType::STRUCT as u8)?;\n\n serialize_struct_handle_index(binary, idx)?;\n\n }\n\n SignatureToken::StructInstantiation(idx, type_params) => {\n\n binary.push(SerializedType::STRUCT_INST as u8)?;\n", "file_path": "language/vm/src/serializer.rs", "rank": 42, "score": 173068.18867772448 }, { "content": "fn instantiate(token: &SignatureToken, subst: &Signature) -> SignatureToken {\n\n use SignatureToken::*;\n\n\n\n match token {\n\n Bool => Bool,\n\n U8 => U8,\n\n U64 => U64,\n\n U128 => U128,\n\n Address => Address,\n\n Signer => Signer,\n\n Vector(ty) => Vector(Box::new(instantiate(ty, subst))),\n\n Struct(idx) => Struct(*idx),\n\n StructInstantiation(idx, struct_type_args) => StructInstantiation(\n\n *idx,\n\n struct_type_args\n\n .iter()\n\n .map(|ty| instantiate(ty, subst))\n\n .collect(),\n\n ),\n\n Reference(ty) => Reference(Box::new(instantiate(ty, subst))),\n\n MutableReference(ty) => MutableReference(Box::new(instantiate(ty, subst))),\n\n TypeParameter(idx) => {\n\n // Assume that the caller has previously parsed and verified the structure of the\n\n // file and that this guarantees that type parameter indices are always in bounds.\n\n assume!((*idx as usize) < subst.len());\n\n subst.0[*idx as usize].clone()\n\n }\n\n }\n\n}\n", "file_path": "language/bytecode-verifier/src/type_safety.rs", "rank": 43, "score": 172864.6062189283 }, { "content": "pub fn impl_enum_tryfrom(name: &Ident, variants: &DataEnum) -> proc_macro2::TokenStream {\n\n // the TryFrom dispatch\n\n let mut try_iter = variants.variants.iter();\n\n let first_variant = try_iter\n\n .next()\n\n .expect(\"#[derive(ValidCryptoMaterial] requires a non-empty enum.\");\n\n let first_variant_ident = &first_variant.ident;\n\n let first_variant_arg = &first_variant\n\n .fields\n\n .iter()\n\n .next()\n\n .expect(\"Unrecognized enum for key types\")\n\n .ty;\n\n\n\n let mut try_chain = quote! {\n\n #first_variant_arg::try_from(bytes).and_then(|key| Ok(#name::#first_variant_ident(key)))\n\n };\n\n for variant in try_iter {\n\n let variant_ident = &variant.ident;\n\n let variant_arg = &variant\n", "file_path": "crypto/crypto-derive/src/unions.rs", "rank": 44, "score": 171998.4630484717 }, { "content": "fn mutate_sig(sig: &Signature, token_idx: usize) -> Signature {\n\n use SignatureToken::*;\n\n\n\n Signature(\n\n sig.0\n\n .iter()\n\n .enumerate()\n\n .map(|(idx, token)| {\n\n if idx == token_idx {\n\n match &token {\n\n Reference(_) | MutableReference(_) => Reference(Box::new(token.clone())),\n\n _ => Reference(Box::new(Reference(Box::new(token.clone())))),\n\n }\n\n } else {\n\n token.clone()\n\n }\n\n })\n\n .collect(),\n\n )\n\n}\n\n\n", "file_path": "language/bytecode-verifier/invalid-mutations/src/signature.rs", "rank": 45, "score": 171154.84064575797 }, { "content": "#[test]\n\npub fn test_identifier() {\n\n let ident = Identifier::new(\"Test_Ident\").unwrap();\n\n let buffer = ident.encode();\n\n assert_eq!(ident, Identifier::decode(&mut buffer.as_ref()).unwrap())\n\n}\n", "file_path": "mvm/tests/codec.rs", "rank": 46, "score": 170726.62855007005 }, { "content": "pub fn vm() -> (\n\n Mvm<StorageMock, EventHandlerMock, OracleMock, BankMock>,\n\n StorageMock,\n\n EventHandlerMock,\n\n OracleMock,\n\n BankMock,\n\n) {\n\n let store = StorageMock::new();\n\n let event = EventHandlerMock::default();\n\n let oracle = OracleMock::default();\n\n let bank = BankMock::default();\n\n let vm = Mvm::new(store.clone(), event.clone(), oracle.clone(), bank.clone()).unwrap();\n\n (vm, store, event, oracle, bank)\n\n}\n", "file_path": "mvm/tests/common/mod.rs", "rank": 47, "score": 170726.62855007005 }, { "content": "fn tvec(s: SignatureToken) -> SignatureToken {\n\n SignatureToken::Vector(Box::new(s))\n\n}\n\n\n", "file_path": "language/bytecode-verifier/bytecode-verifier-tests/src/unit_tests/constants_tests.rs", "rank": 48, "score": 170686.72962677432 }, { "content": "fn num_fields(struct_def: &StructDefinition) -> usize {\n\n match &struct_def.field_information {\n\n StructFieldInformation::Native => 0,\n\n StructFieldInformation::Declared(fields) => fields.len(),\n\n }\n\n}\n\n\n", "file_path": "language/bytecode-verifier/src/reference_safety/mod.rs", "rank": 49, "score": 170133.91006793268 }, { "content": "// Parse an 'as' use alias:\n\n// UseAlias = (\"as\" <Identifier>)?\n\nfn parse_use_alias<'input>(tokens: &mut Lexer<'input>) -> Result<Option<Name>, Error> {\n\n Ok(if tokens.peek() == Tok::As {\n\n tokens.advance()?;\n\n Some(parse_identifier(tokens)?)\n\n } else {\n\n None\n\n })\n\n}\n\n\n", "file_path": "language/move-lang/src/parser/syntax.rs", "rank": 50, "score": 169249.54593137672 }, { "content": "pub fn bounds_error(\n\n status: StatusCode,\n\n kind: IndexKind,\n\n idx: TableIndex,\n\n len: usize,\n\n) -> PartialVMError {\n\n let msg = format!(\n\n \"Index {} out of bounds for {} while indexing {}\",\n\n idx, len, kind\n\n );\n\n PartialVMError::new(status)\n\n .at_index(kind, idx)\n\n .with_message(msg)\n\n}\n\n\n", "file_path": "language/vm/src/errors.rs", "rank": 51, "score": 168806.44373548782 }, { "content": "pub fn offset_out_of_bounds(\n\n status: StatusCode,\n\n kind: IndexKind,\n\n target_offset: usize,\n\n target_pool_len: usize,\n\n cur_function: FunctionDefinitionIndex,\n\n cur_bytecode_offset: CodeOffset,\n\n) -> PartialVMError {\n\n let msg = format!(\n\n \"Index {} out of bounds for {} at bytecode offset {} in function {} while indexing {}\",\n\n target_offset, target_pool_len, cur_bytecode_offset, cur_function, kind\n\n );\n\n PartialVMError::new(status)\n\n .with_message(msg)\n\n .at_code_offset(cur_function, cur_bytecode_offset)\n\n}\n\n\n", "file_path": "language/vm/src/errors.rs", "rank": 52, "score": 168806.44373548782 }, { "content": "// Check for the specified token and consume it if it matches.\n\n// Returns true if the token matches.\n\nfn match_token<'input>(tokens: &mut Lexer<'input>, tok: Tok) -> Result<bool, Error> {\n\n if tokens.peek() == tok {\n\n tokens.advance()?;\n\n Ok(true)\n\n } else {\n\n Ok(false)\n\n }\n\n}\n\n\n", "file_path": "language/move-lang/src/parser/syntax.rs", "rank": 53, "score": 167293.69369670658 }, { "content": "pub fn verify(\n\n current_function_opt: Option<FunctionDefinitionIndex>,\n\n code: &CodeUnit,\n\n) -> PartialVMResult<()> {\n\n let current_function = current_function_opt.unwrap_or(FunctionDefinitionIndex(0));\n\n // check fall through\n\n // Check to make sure that the bytecode vector ends with a branching instruction.\n\n match code.code.last() {\n\n None => return Err(PartialVMError::new(StatusCode::EMPTY_CODE_UNIT)),\n\n Some(last) if !last.is_unconditional_branch() => {\n\n return Err(PartialVMError::new(StatusCode::INVALID_FALL_THROUGH)\n\n .at_code_offset(current_function, (code.code.len() - 1) as CodeOffset))\n\n }\n\n Some(_) => (),\n\n }\n\n\n\n // check jumps\n\n let context = &ControlFlowVerifier {\n\n current_function,\n\n code: &code.code,\n\n };\n\n let labels = instruction_labels(context);\n\n check_jumps(context, labels)\n\n}\n\n\n", "file_path": "language/bytecode-verifier/src/control_flow.rs", "rank": 54, "score": 166959.2851762668 }, { "content": "pub fn div(\n\n context: &mut impl NativeContext,\n\n ty_args: Vec<Type>,\n\n mut arguments: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(ty_args.is_empty());\n\n debug_assert!(arguments.len() == 2);\n\n\n\n let r = unwrap_u256(pop_arg!(arguments, Struct))?;\n\n let l = unwrap_u256(pop_arg!(arguments, Struct))?;\n\n\n\n if r == U256::zero() {\n\n return Err(PartialVMError::new(StatusCode::ARITHMETIC_ERROR)\n\n .with_message(format!(\"Cannot div {:?} by {:?}\", l, r)));\n\n }\n\n\n\n let res = l.div(r);\n\n let cost = native_gas(context.cost_table(), NativeCostIndex::U256_DIV, 0);\n\n Ok(NativeResult::ok(cost, vec![wrap_u256(res)]))\n\n}\n\n\n", "file_path": "language/move-vm/natives/src/u256.rs", "rank": 55, "score": 166959.2851762668 }, { "content": "pub fn as_u8(\n\n context: &mut impl NativeContext,\n\n ty_args: Vec<Type>,\n\n mut arguments: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(ty_args.is_empty());\n\n debug_assert!(arguments.len() == 1);\n\n\n\n let u256 = unwrap_u256(pop_arg!(arguments, Struct))?;\n\n let value = if u256 > U256::from(u8::MAX) {\n\n Err(PartialVMError::new(StatusCode::ARITHMETIC_ERROR)\n\n .with_message(format!(\"Cannot cast u256({}) to u8\", u256)))\n\n } else {\n\n Ok(u256.as_u64() as u8)\n\n }?;\n\n\n\n let cost = native_gas(context.cost_table(), NativeCostIndex::U256_AS_U8, 0);\n\n Ok(NativeResult::ok(cost, vec![Value::u8(value)]))\n\n}\n\n\n", "file_path": "language/move-vm/natives/src/u256.rs", "rank": 56, "score": 166959.2851762668 }, { "content": "pub fn sub(\n\n context: &mut impl NativeContext,\n\n ty_args: Vec<Type>,\n\n mut arguments: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(ty_args.is_empty());\n\n debug_assert!(arguments.len() == 2);\n\n\n\n let r = unwrap_u256(pop_arg!(arguments, Struct))?;\n\n let l = unwrap_u256(pop_arg!(arguments, Struct))?;\n\n\n\n let (res, overflowed) = l.overflowing_sub(r);\n\n if overflowed {\n\n return Err(PartialVMError::new(StatusCode::ARITHMETIC_ERROR)\n\n .with_message(format!(\"Cannot sub {:?} from {:?}\", r, l)));\n\n }\n\n\n\n let cost = native_gas(context.cost_table(), NativeCostIndex::U256_SUB, 0);\n\n Ok(NativeResult::ok(cost, vec![wrap_u256(res)]))\n\n}\n\n\n", "file_path": "language/move-vm/natives/src/u256.rs", "rank": 57, "score": 166959.2851762668 }, { "content": "pub fn as_u64(\n\n context: &mut impl NativeContext,\n\n ty_args: Vec<Type>,\n\n mut arguments: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(ty_args.is_empty());\n\n debug_assert!(arguments.len() == 1);\n\n\n\n let u256 = unwrap_u256(pop_arg!(arguments, Struct))?;\n\n let value = if u256 > U256::from(u64::MAX) {\n\n Err(PartialVMError::new(StatusCode::ARITHMETIC_ERROR)\n\n .with_message(format!(\"Cannot cast u256({}) to u64\", u256)))\n\n } else {\n\n Ok(u256.as_u64())\n\n }?;\n\n\n\n let cost = native_gas(context.cost_table(), NativeCostIndex::U256_AS_U64, 0);\n\n Ok(NativeResult::ok(cost, vec![Value::u64(value)]))\n\n}\n\n\n", "file_path": "language/move-vm/natives/src/u256.rs", "rank": 58, "score": 166959.2851762668 }, { "content": "pub fn mul(\n\n context: &mut impl NativeContext,\n\n ty_args: Vec<Type>,\n\n mut arguments: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(ty_args.is_empty());\n\n debug_assert!(arguments.len() == 2);\n\n\n\n let r = unwrap_u256(pop_arg!(arguments, Struct))?;\n\n let l = unwrap_u256(pop_arg!(arguments, Struct))?;\n\n\n\n let (res, overflowed) = l.overflowing_mul(r);\n\n if overflowed {\n\n return Err(PartialVMError::new(StatusCode::ARITHMETIC_ERROR)\n\n .with_message(format!(\"Cannot mul {:?} and {:?}\", l, r)));\n\n }\n\n\n\n let cost = native_gas(context.cost_table(), NativeCostIndex::U256_MUL, 0);\n\n Ok(NativeResult::ok(cost, vec![wrap_u256(res)]))\n\n}\n\n\n", "file_path": "language/move-vm/natives/src/u256.rs", "rank": 59, "score": 166959.2851762668 }, { "content": "pub fn get_price_script(\n\n addr_for_eth_btc: AccountAddress,\n\n addr_for_btc_pont: AccountAddress,\n\n) -> ScriptTx {\n\n ScriptTx::new(\n\n include_bytes!(\"../assets/target/scripts/get_price_test.mv\").to_vec(),\n\n vec![],\n\n vec![],\n\n vec![addr_for_eth_btc, addr_for_btc_pont],\n\n )\n\n}\n\n\n", "file_path": "mvm/tests/common/assets.rs", "rank": 60, "score": 166959.2851762668 }, { "content": "pub fn from_u64(\n\n context: &mut impl NativeContext,\n\n ty_args: Vec<Type>,\n\n mut arguments: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(ty_args.is_empty());\n\n debug_assert!(arguments.len() == 1);\n\n\n\n let u256 = U256::from(pop_arg!(arguments, u64));\n\n let cost = native_gas(context.cost_table(), NativeCostIndex::U256_FROM_U64, 0);\n\n Ok(NativeResult::ok(cost, vec![wrap_u256(u256)]))\n\n}\n\n\n", "file_path": "language/move-vm/natives/src/u256.rs", "rank": 61, "score": 166959.2851762668 }, { "content": "pub fn test_balance_script(\n\n addr: AccountAddress,\n\n addr_2: AccountAddress,\n\n init_usdt: u128,\n\n init_pont: u128,\n\n init_btc: u128,\n\n) -> ScriptTx {\n\n ScriptTx::new(\n\n include_bytes!(\"../assets/target/scripts/test_balance.mv\").to_vec(),\n\n vec![\n\n ScriptArg::U128(init_usdt),\n\n ScriptArg::U128(init_pont),\n\n ScriptArg::U128(init_btc),\n\n ],\n\n vec![],\n\n vec![addr, addr_2],\n\n )\n\n}\n\n\n", "file_path": "mvm/tests/common/assets.rs", "rank": 62, "score": 166959.2851762668 }, { "content": "pub fn as_u128(\n\n context: &mut impl NativeContext,\n\n ty_args: Vec<Type>,\n\n mut arguments: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(ty_args.is_empty());\n\n debug_assert!(arguments.len() == 1);\n\n\n\n let u256 = unwrap_u256(pop_arg!(arguments, Struct))?;\n\n\n\n let value = if u256 > U256::from(u128::MAX) {\n\n Err(PartialVMError::new(StatusCode::ARITHMETIC_ERROR)\n\n .with_message(format!(\"Cannot cast u256({}) to u128\", u256)))\n\n } else {\n\n Ok(u256.as_u128())\n\n }?;\n\n\n\n let cost = native_gas(context.cost_table(), NativeCostIndex::U256_AS_U128, 0);\n\n Ok(NativeResult::ok(cost, vec![Value::u128(value)]))\n\n}\n\n\n", "file_path": "language/move-vm/natives/src/u256.rs", "rank": 63, "score": 166959.2851762668 }, { "content": "pub fn add(\n\n context: &mut impl NativeContext,\n\n ty_args: Vec<Type>,\n\n mut arguments: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(ty_args.is_empty());\n\n debug_assert!(arguments.len() == 2);\n\n\n\n let r = unwrap_u256(pop_arg!(arguments, Struct))?;\n\n let l = unwrap_u256(pop_arg!(arguments, Struct))?;\n\n\n\n let (res, overflowed) = l.overflowing_add(r);\n\n if overflowed {\n\n return Err(PartialVMError::new(StatusCode::ARITHMETIC_ERROR)\n\n .with_message(format!(\"Cannot add {:?} and {:?}\", l, r)));\n\n }\n\n\n\n let cost = native_gas(context.cost_table(), NativeCostIndex::U256_ADD, 0);\n\n Ok(NativeResult::ok(cost, vec![wrap_u256(res)]))\n\n}\n\n\n", "file_path": "language/move-vm/natives/src/u256.rs", "rank": 64, "score": 166959.2851762668 }, { "content": "pub fn from_u8(\n\n context: &mut impl NativeContext,\n\n ty_args: Vec<Type>,\n\n mut arguments: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(ty_args.is_empty());\n\n debug_assert!(arguments.len() == 1);\n\n\n\n let u256 = U256::from(pop_arg!(arguments, u8));\n\n let cost = native_gas(context.cost_table(), NativeCostIndex::U256_FROM_U8, 0);\n\n Ok(NativeResult::ok(cost, vec![wrap_u256(u256)]))\n\n}\n\n\n", "file_path": "language/move-vm/natives/src/u256.rs", "rank": 65, "score": 166959.2851762668 }, { "content": "pub fn from_u128(\n\n context: &mut impl NativeContext,\n\n ty_args: Vec<Type>,\n\n mut arguments: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(ty_args.is_empty());\n\n debug_assert!(arguments.len() == 1);\n\n let u256 = U256::from(pop_arg!(arguments, u128));\n\n let cost = native_gas(context.cost_table(), NativeCostIndex::U256_FROM_U128, 0);\n\n Ok(NativeResult::ok(cost, vec![wrap_u256(u256)]))\n\n}\n\n\n", "file_path": "language/move-vm/natives/src/u256.rs", "rank": 66, "score": 166959.2851762668 }, { "content": "// Parse an alias for a module member:\n\n// UseMember = <Identifier> <UseAlias>\n\nfn parse_use_member<'input>(tokens: &mut Lexer<'input>) -> Result<(Name, Option<Name>), Error> {\n\n let member = parse_identifier(tokens)?;\n\n let alias_opt = parse_use_alias(tokens)?;\n\n Ok((member, alias_opt))\n\n}\n\n\n", "file_path": "language/move-lang/src/parser/syntax.rs", "rank": 67, "score": 165364.7845935456 }, { "content": "/// get_native_balance<Token>(address: &signer): u128;\n\npub fn get_balance(\n\n context: &mut impl NativeContext,\n\n mut ty_args: Vec<Type>,\n\n mut arguments: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(ty_args.len() == 1);\n\n debug_assert!(arguments.len() == 1);\n\n\n\n let address = account_address(&pop_arg!(arguments, SignerRef).borrow_signer()?.0)?;\n\n let wallet_id = wallet_id(context, address, ty_args.pop().unwrap())?;\n\n\n\n if let Some(balance) = context.get_balance(&wallet_id) {\n\n let cost = native_gas(context.cost_table(), NativeCostIndex::GET_BALANCE, 0);\n\n Ok(NativeResult::ok(\n\n cost,\n\n vec![Value(ValueImpl::U128(balance))],\n\n ))\n\n } else {\n\n Err(PartialVMError::new(StatusCode::RESOURCE_DOES_NOT_EXIST)\n\n .with_message(format!(\"Balance({:?}) not found.\", wallet_id)))\n\n }\n\n}\n\n\n", "file_path": "language/move-vm/natives/src/account.rs", "rank": 68, "score": 165186.6062990891 }, { "content": "/// withdraw_to_native<Token>(address: &signer, balance: Pontem::T<Token>);\n\npub fn native_withdraw(\n\n context: &mut impl NativeContext,\n\n mut ty_args: Vec<Type>,\n\n mut arguments: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(ty_args.len() == 1);\n\n debug_assert!(arguments.len() == 2);\n\n let balance = destroy_balance(arguments.pop_back().unwrap().0)?;\n\n let address = account_address(&pop_arg!(arguments, SignerRef).borrow_signer()?.0)?;\n\n\n\n let wallet_id = wallet_id(context, address, ty_args.pop().unwrap())?;\n\n\n\n context.save_balance_operation(wallet_id, BalanceOperation::Withdraw(balance));\n\n\n\n let cost = native_gas(context.cost_table(), NativeCostIndex::WITHDRAW, 0);\n\n Ok(NativeResult::ok(cost, vec![]))\n\n}\n\n\n", "file_path": "language/move-vm/natives/src/account.rs", "rank": 69, "score": 165186.51123480676 }, { "content": "/// deposit_from_native<Token>(address: &signer, amount: u128): Pontem::T<Token>;\n\npub fn native_deposit(\n\n context: &mut impl NativeContext,\n\n mut ty_args: Vec<Type>,\n\n mut arguments: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(ty_args.len() == 1);\n\n debug_assert!(arguments.len() == 2);\n\n\n\n let amount = pop_arg!(arguments, u128);\n\n let address = account_address(&pop_arg!(arguments, SignerRef).borrow_signer()?.0)?;\n\n\n\n let wallet_id = wallet_id(context, address, ty_args.pop().unwrap())?;\n\n\n\n if let Some(balance) = context.get_balance(&wallet_id) {\n\n if balance >= amount {\n\n context.save_balance_operation(wallet_id, BalanceOperation::Deposit(amount));\n\n let cost = native_gas(context.cost_table(), NativeCostIndex::DEPOSIT, 0);\n\n Ok(NativeResult::ok(cost, vec![create_balance(amount)]))\n\n } else {\n\n Err(\n", "file_path": "language/move-vm/natives/src/account.rs", "rank": 70, "score": 165186.46491536841 }, { "content": "pub fn native_borrow(\n\n context: &impl NativeContext,\n\n ty_args: Vec<Type>,\n\n mut args: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(ty_args.len() == 1);\n\n debug_assert!(args.len() == 2);\n\n\n\n let idx = pop_arg!(args, u64) as usize;\n\n let r = pop_arg!(args, VectorRef);\n\n\n\n let cost = native_gas(context.cost_table(), NativeCostIndex::BORROW, 1);\n\n\n\n r.borrow_elem(idx, cost, &ty_args[0], context)\n\n}\n\n\n", "file_path": "language/move-vm/natives/src/vector.rs", "rank": 71, "score": 165181.06473453692 }, { "content": "pub fn native_swap(\n\n context: &impl NativeContext,\n\n ty_args: Vec<Type>,\n\n mut args: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(ty_args.len() == 1);\n\n debug_assert!(args.len() == 3);\n\n\n\n let idx2 = pop_arg!(args, u64) as usize;\n\n let idx1 = pop_arg!(args, u64) as usize;\n\n let r = pop_arg!(args, VectorRef);\n\n\n\n let cost = native_gas(context.cost_table(), NativeCostIndex::SWAP, 1);\n\n\n\n r.swap(idx1, idx2, cost, &ty_args[0], context)\n\n}\n", "file_path": "language/move-vm/natives/src/vector.rs", "rank": 72, "score": 165181.06473453692 }, { "content": "pub fn store_sys_resources_script(\n\n addr_for_block: AccountAddress,\n\n addr_for_timestamp: AccountAddress,\n\n) -> ScriptTx {\n\n ScriptTx::new(\n\n include_bytes!(\"../assets/target/scripts/store_system_resources.mv\").to_vec(),\n\n vec![],\n\n vec![],\n\n vec![addr_for_block, addr_for_timestamp],\n\n )\n\n}\n\n\n", "file_path": "mvm/tests/common/assets.rs", "rank": 73, "score": 165181.06473453692 }, { "content": "pub fn native_sha3_256(\n\n context: &impl NativeContext,\n\n _ty_args: Vec<Type>,\n\n mut arguments: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(_ty_args.is_empty());\n\n debug_assert!(arguments.len() == 1);\n\n\n\n let hash_arg = pop_arg!(arguments, Vec<u8>);\n\n\n\n let cost = native_gas(\n\n context.cost_table(),\n\n NativeCostIndex::SHA3_256,\n\n hash_arg.len(),\n\n );\n\n\n\n let hash_vec = HashValue::sha3_256_of(hash_arg.as_slice()).to_vec();\n\n let return_values = vec![Value::vector_u8(hash_vec)];\n\n Ok(NativeResult::ok(cost, return_values))\n\n}\n", "file_path": "language/move-vm/natives/src/hash.rs", "rank": 74, "score": 165181.06473453692 }, { "content": "#[allow(unused_mut)]\n\n#[allow(unused_variables)]\n\npub fn native_print(\n\n context: &mut impl NativeContext,\n\n mut ty_args: Vec<Type>,\n\n mut args: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(ty_args.len() == 1);\n\n debug_assert!(args.len() == 1);\n\n\n\n // No-op if the feature flag is not present.\n\n #[cfg(feature = \"debug_module\")]\n\n {\n\n let ty = ty_args.pop().unwrap();\n\n let r = pop_arg!(args, Reference);\n\n\n\n let mut buf = String::new();\n\n print_reference(&mut buf, &r)?;\n\n println!(\"[debug] {}\", buf);\n\n }\n\n\n\n Ok(NativeResult::ok(ONE_GAS_UNIT, vec![]))\n\n}\n\n\n", "file_path": "language/move-vm/natives/src/debug.rs", "rank": 75, "score": 165181.06473453692 }, { "content": "pub fn native_pop(\n\n context: &impl NativeContext,\n\n ty_args: Vec<Type>,\n\n mut args: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(ty_args.len() == 1);\n\n debug_assert!(args.len() == 1);\n\n\n\n let r = pop_arg!(args, VectorRef);\n\n\n\n let cost = native_gas(context.cost_table(), NativeCostIndex::POP_BACK, 1);\n\n\n\n r.pop(cost, &ty_args[0], context)\n\n}\n\n\n", "file_path": "language/move-vm/natives/src/vector.rs", "rank": 76, "score": 165181.06473453692 }, { "content": "pub fn native_length(\n\n context: &impl NativeContext,\n\n ty_args: Vec<Type>,\n\n mut args: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(ty_args.len() == 1);\n\n debug_assert!(args.len() == 1);\n\n\n\n let r = pop_arg!(args, VectorRef);\n\n\n\n let cost = native_gas(context.cost_table(), NativeCostIndex::LENGTH, 1);\n\n\n\n let len = r.len(&ty_args[0], context)?;\n\n Ok(NativeResult::ok(cost, vec![len]))\n\n}\n\n\n", "file_path": "language/move-vm/natives/src/vector.rs", "rank": 77, "score": 165181.06473453692 }, { "content": "pub fn native_sha2_256(\n\n context: &impl NativeContext,\n\n _ty_args: Vec<Type>,\n\n mut arguments: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(_ty_args.is_empty());\n\n debug_assert!(arguments.len() == 1);\n\n\n\n let hash_arg = pop_arg!(arguments, Vec<u8>);\n\n\n\n let cost = native_gas(\n\n context.cost_table(),\n\n NativeCostIndex::SHA2_256,\n\n hash_arg.len(),\n\n );\n\n\n\n let hash_vec = Sha256::digest(hash_arg.as_slice()).to_vec();\n\n let return_values = vec![Value::vector_u8(hash_vec)];\n\n Ok(NativeResult::ok(cost, return_values))\n\n}\n\n\n", "file_path": "language/move-vm/natives/src/hash.rs", "rank": 78, "score": 165181.06473453692 }, { "content": "pub fn native_empty(\n\n context: &impl NativeContext,\n\n ty_args: Vec<Type>,\n\n args: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(ty_args.len() == 1);\n\n debug_assert!(args.is_empty());\n\n\n\n let cost = native_gas(context.cost_table(), NativeCostIndex::EMPTY, 1);\n\n Vector::empty(cost, &ty_args[0], context)\n\n}\n\n\n", "file_path": "language/move-vm/natives/src/vector.rs", "rank": 79, "score": 165181.06473453692 }, { "content": "/// Rust implementation of Move's `native public fun to_bytes<T>(&T): vector<u8>`\n\npub fn native_to_bytes(\n\n context: &mut impl NativeContext,\n\n mut ty_args: Vec<Type>,\n\n mut args: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(ty_args.len() == 1);\n\n debug_assert!(args.len() == 1);\n\n\n\n let ref_to_val = pop_arg!(args, Reference);\n\n\n\n let arg_type = ty_args.pop().unwrap();\n\n // delegate to the BCS serialization for `Value`\n\n let serialized_value_opt = match context.type_to_type_layout(&arg_type)? {\n\n None => None,\n\n Some(layout) => ref_to_val.read_ref()?.simple_serialize(&layout),\n\n };\n\n let serialized_value = match serialized_value_opt {\n\n None => {\n\n let cost = native_gas(context.cost_table(), NativeCostIndex::BCS_TO_BYTES, 1);\n\n return Ok(NativeResult::err(cost, NFE_BCS_SERIALIZATION_FAILURE));\n", "file_path": "language/move-vm/natives/src/bcs.rs", "rank": 80, "score": 165181.06473453692 }, { "content": "pub fn gas() -> Gas {\n\n Gas::new(10_000, 1).unwrap()\n\n}\n\n\n", "file_path": "mvm/tests/common/assets.rs", "rank": 81, "score": 164397.18076922488 }, { "content": "#[derive(Clone, Debug, PartialEq)]\n\nstruct SerdeNameError(Option<&'static str>);\n\n\n\n#[cfg(feature = \"std\")]\n\nimpl std::error::Error for SerdeNameError {}\n\n\n\nimpl alloc::fmt::Display for SerdeNameError {\n\n fn fmt(&self, f: &mut Formatter<'_>) -> alloc::fmt::Result {\n\n write!(f, \"{0:?}\", self.0)\n\n }\n\n}\n\n\n\nimpl serde::de::Error for SerdeNameError {\n\n fn custom<T: alloc::fmt::Display>(_msg: T) -> Self {\n\n unreachable!();\n\n }\n\n}\n\n\n\nmacro_rules! declare_deserialize {\n\n ($method:ident) => {\n\n fn $method<V>(self, _visitor: V) -> core::result::Result<V::Value, SerdeNameError>\n", "file_path": "crypto/crypto/src/serde_name.rs", "rank": 82, "score": 163546.12691711838 }, { "content": "/// Parse the `input` string as a file of Move source code and return the\n\n/// result as either a pair of FileDefinition and doc comments or some Errors. The `file` name\n\n/// is used to identify source locations in error messages.\n\npub fn parse_file_string(\n\n file: &'static str,\n\n input: &str,\n\n comment_map: BTreeMap<Span, String>,\n\n) -> Result<(Vec<Definition>, BTreeMap<u32, String>), Errors> {\n\n let mut tokens = Lexer::new(input, file, comment_map);\n\n match tokens.advance() {\n\n Err(err) => Err(vec![err]),\n\n Ok(..) => Ok(()),\n\n }?;\n\n match parse_file(&mut tokens) {\n\n Err(err) => Err(vec![err]),\n\n Ok(def) => {\n\n let doc_comments = tokens.check_and_get_doc_comments()?;\n\n Ok((def, doc_comments))\n\n }\n\n }\n\n}\n", "file_path": "language/move-lang/src/parser/syntax.rs", "rank": 83, "score": 163472.70625430282 }, { "content": "pub fn native_push_back(\n\n context: &impl NativeContext,\n\n ty_args: Vec<Type>,\n\n mut args: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(ty_args.len() == 1);\n\n debug_assert!(args.len() == 2);\n\n\n\n let e = args.pop_back().unwrap();\n\n let r = pop_arg!(args, VectorRef);\n\n\n\n let cost = native_gas(\n\n context.cost_table(),\n\n NativeCostIndex::PUSH_BACK,\n\n e.size().get() as usize,\n\n );\n\n\n\n r.push_back(e, &ty_args[0], context)?;\n\n Ok(NativeResult::ok(cost, vec![]))\n\n}\n\n\n", "file_path": "language/move-vm/natives/src/vector.rs", "rank": 84, "score": 163467.99383066813 }, { "content": "pub fn new_from_instructions(\n\n mut instrs: Vec<(Bytecode, GasCost)>,\n\n native_table: Vec<GasCost>,\n\n) -> CostTable {\n\n instrs.sort_by_key(|cost| instruction_key(&cost.0));\n\n\n\n if cfg!(debug_assertions) {\n\n let mut instructions_covered = 0;\n\n for (index, (instr, _)) in instrs.iter().enumerate() {\n\n let key = instruction_key(instr);\n\n if index == (key - 1) as usize {\n\n instructions_covered += 1;\n\n }\n\n }\n\n debug_assert!(\n\n instructions_covered == Bytecode::NUM_INSTRUCTIONS,\n\n \"all instructions must be in the cost table\"\n\n );\n\n }\n\n let instruction_table = instrs\n\n .into_iter()\n\n .map(|(_, cost)| cost)\n\n .collect::<Vec<GasCost>>();\n\n CostTable {\n\n instruction_table,\n\n native_table,\n\n gas_constants: GasConstants::default(),\n\n }\n\n}\n\n\n", "file_path": "language/move-vm/types/src/gas_schedule.rs", "rank": 85, "score": 163467.99383066813 }, { "content": "pub fn native_destroy_empty(\n\n context: &impl NativeContext,\n\n ty_args: Vec<Type>,\n\n mut args: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(ty_args.len() == 1);\n\n debug_assert!(args.len() == 1);\n\n\n\n let v = pop_arg!(args, Vector);\n\n\n\n let cost = native_gas(context.cost_table(), NativeCostIndex::DESTROY_EMPTY, 1);\n\n\n\n v.destroy_empty(cost, &ty_args[0], context)\n\n}\n\n\n", "file_path": "language/move-vm/natives/src/vector.rs", "rank": 86, "score": 163467.99383066813 }, { "content": "pub fn native_emit_event(\n\n context: &mut impl NativeContext,\n\n mut ty_args: Vec<Type>,\n\n mut arguments: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(ty_args.len() == 1);\n\n debug_assert!(arguments.len() == 2);\n\n\n\n let ty = ty_args.pop().unwrap();\n\n let msg = arguments.pop_back().unwrap();\n\n let address = account_address(&pop_arg!(arguments, SignerRef).borrow_signer()?.0)?;\n\n\n\n let cost = native_gas(\n\n context.cost_table(),\n\n NativeCostIndex::EMIT_EVENT,\n\n msg.size().get() as usize,\n\n );\n\n\n\n let save_res = context.save_event(address, ty, msg, context.caller().cloned())?;\n\n\n", "file_path": "language/move-vm/natives/src/event.rs", "rank": 87, "score": 163467.99383066813 }, { "content": "/// Calculate the intrinsic gas for the transaction based upon its size in bytes/words.\n\npub fn calculate_intrinsic_gas(\n\n transaction_size: AbstractMemorySize<GasCarrier>,\n\n gas_constants: &GasConstants,\n\n) -> GasUnits<GasCarrier> {\n\n precondition!(transaction_size.get() <= MAX_TRANSACTION_SIZE_IN_BYTES as GasCarrier);\n\n let min_transaction_fee = gas_constants.min_transaction_gas_units;\n\n\n\n if transaction_size.get() > gas_constants.large_transaction_cutoff.get() {\n\n let excess = transaction_size.sub(gas_constants.large_transaction_cutoff);\n\n min_transaction_fee.add(gas_constants.intrinsic_gas_per_byte.mul(excess))\n\n } else {\n\n min_transaction_fee.unitary_cast()\n\n }\n\n}\n\n\n\n#[allow(non_camel_case_types)]\n\n#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq, PartialOrd, Ord)]\n\n#[repr(u8)]\n\npub enum NativeCostIndex {\n\n SHA2_256 = 0,\n", "file_path": "language/move-vm/types/src/gas_schedule.rs", "rank": 88, "score": 161816.55649919517 }, { "content": "#[allow(unused_variables)]\n\npub fn native_print_stack_trace(\n\n context: &mut impl NativeContext,\n\n ty_args: Vec<Type>,\n\n args: VecDeque<Value>,\n\n) -> PartialVMResult<NativeResult> {\n\n debug_assert!(ty_args.is_empty());\n\n debug_assert!(args.is_empty());\n\n\n\n #[cfg(feature = \"debug_module\")]\n\n {\n\n let mut s = String::new();\n\n context.print_stack_trace(&mut s)?;\n\n println!(\"{}\", s);\n\n }\n\n\n\n Ok(NativeResult::ok(ONE_GAS_UNIT, vec![]))\n\n}\n", "file_path": "language/move-vm/natives/src/debug.rs", "rank": 89, "score": 161816.55649919517 }, { "content": "pub fn abort_module() -> ModuleTx {\n\n ModuleTx::new(\n\n include_bytes!(\"../assets/target/modules/Abort.mv\").to_vec(),\n\n CORE_CODE_ADDRESS,\n\n )\n\n}\n\n\n", "file_path": "mvm/tests/common/assets.rs", "rank": 90, "score": 160771.801768274 }, { "content": "pub fn pont_module() -> ModuleTx {\n\n ModuleTx::new(\n\n include_bytes!(\"../assets/target/modules/PONT.mv\").to_vec(),\n\n CORE_CODE_ADDRESS,\n\n )\n\n}\n\n\n", "file_path": "mvm/tests/common/assets.rs", "rank": 91, "score": 160771.801768274 }, { "content": "pub fn event_module() -> ModuleTx {\n\n ModuleTx::new(\n\n include_bytes!(\"../assets/target/modules/Event.mv\").to_vec(),\n\n CORE_CODE_ADDRESS,\n\n )\n\n}\n\n\n", "file_path": "mvm/tests/common/assets.rs", "rank": 92, "score": 160771.801768274 }, { "content": "pub fn time_module() -> ModuleTx {\n\n ModuleTx::new(\n\n include_bytes!(\"../assets/target/modules/Time.mv\").to_vec(),\n\n CORE_CODE_ADDRESS,\n\n )\n\n}\n\n\n", "file_path": "mvm/tests/common/assets.rs", "rank": 93, "score": 160771.801768274 }, { "content": "pub fn block_module() -> ModuleTx {\n\n ModuleTx::new(\n\n include_bytes!(\"../assets/target/modules/Block.mv\").to_vec(),\n\n CORE_CODE_ADDRESS,\n\n )\n\n}\n\n\n", "file_path": "mvm/tests/common/assets.rs", "rank": 94, "score": 160771.801768274 }, { "content": "pub fn pontem_module() -> ModuleTx {\n\n ModuleTx::new(\n\n include_bytes!(\"../assets/target/modules/Pontem.mv\").to_vec(),\n\n CORE_CODE_ADDRESS,\n\n )\n\n}\n\n\n", "file_path": "mvm/tests/common/assets.rs", "rank": 95, "score": 160771.801768274 }, { "content": "pub fn coins_module() -> ModuleTx {\n\n ModuleTx::new(\n\n include_bytes!(\"../assets/target/modules/Coins.mv\").to_vec(),\n\n CORE_CODE_ADDRESS,\n\n )\n\n}\n\n\n", "file_path": "mvm/tests/common/assets.rs", "rank": 96, "score": 160771.801768274 }, { "content": "pub fn store_module() -> ModuleTx {\n\n ModuleTx::new(\n\n include_bytes!(\"../assets/target/modules/Store.mv\").to_vec(),\n\n CORE_CODE_ADDRESS,\n\n )\n\n}\n\n\n", "file_path": "mvm/tests/common/assets.rs", "rank": 97, "score": 160771.801768274 }, { "content": "/// Specific gas per instruction configuration for dvm.\n\n/// INITIAL_GAS_SCHEDULE from libra with dfinance update.\n\n///\n\n/// Custom instructions costs:\n\n/// MutBorrowGlobal -> GasCost::new(1000, 3);\n\n/// MutBorrowGlobalGeneric -> GasCost::new(1000, 3);\n\n/// ImmBorrowGlobal -> GasCost::new(1000, 3);\n\n/// ImmBorrowGlobalGeneric -> GasCost::new(1000, 3);\n\npub fn cost_table() -> CostTable {\n\n let mut instrs = vec![\n\n (MoveTo(StructDefinitionIndex::new(0)), GasCost::new(825, 1)),\n\n (\n\n MoveToGeneric(StructDefInstantiationIndex::new(0)),\n\n GasCost::new(825, 1),\n\n ),\n\n (\n\n MoveFrom(StructDefinitionIndex::new(0)),\n\n GasCost::new(917, 1),\n\n ),\n\n (\n\n MoveFromGeneric(StructDefInstantiationIndex::new(0)),\n\n GasCost::new(917, 1),\n\n ),\n\n (BrTrue(0), GasCost::new(31, 1)),\n\n (WriteRef, GasCost::new(65, 1)),\n\n (Mul, GasCost::new(41, 1)),\n\n (MoveLoc(0), GasCost::new(41, 1)),\n\n (And, GasCost::new(49, 1)),\n", "file_path": "mvm/src/gas_schedule.rs", "rank": 98, "score": 160771.801768274 }, { "content": "pub fn account_module() -> ModuleTx {\n\n ModuleTx::new(\n\n include_bytes!(\"../assets/target/modules/Account.mv\").to_vec(),\n\n CORE_CODE_ADDRESS,\n\n )\n\n}\n\n\n", "file_path": "mvm/tests/common/assets.rs", "rank": 99, "score": 160771.801768274 } ]
Rust
platform/nutekt-digital/demos/raves/src/lib.rs
atomb/logue-sdk
3ba795ece7e90871e171c483c652040186800f5b
#![no_std] use panic_halt as _; use core::f32; use core::ptr; use micromath::F32Ext; pub mod dsp; pub mod mathutil; pub mod nts1; use dsp::biquad; use mathutil::*; use nts1::*; use nts1::clipsat::osc_softclipf; use nts1::platform::*; use nts1::random::osc_white; use nts1::userosc::*; use nts1::wavebank::*; #[repr(u8)] pub enum RavesFlags { None = 0, Wave0 = 1 << 1, Wave1 = 1 << 2, SubWave = 1 << 3, RingMix = 1 << 4, BitCrush = 1 << 5, Reset = 1 << 6, } #[repr(C)] pub struct RavesState { wave0: *const WaveLUT, wave1: *const WaveLUT, subwave: *const WaveLUT, phi0: f32, phi1: f32, phisub: f32, w00: f32, w01: f32, w0sub: f32, lfo: f32, lfoz: f32, dither: f32, bitres: f32, bitresrcp: f32, imperfection: f32, flags: u8, } impl RavesState { pub const fn new() -> Self { RavesState { wave0: ptr::null(), wave1: ptr::null(), subwave: ptr::null(), phi0: 0.0, phi1: 0.0, phisub: 0.0, w00: K_SR440, w01: K_SR440, w0sub: K_SR220, lfo: 0.0, lfoz: 0.0, dither: 0.0, bitres: 1.0, bitresrcp: 1.0, imperfection: 0.0, flags: RavesFlags::None as u8, } } pub fn init(&mut self) { self.wave0 = get_waves_a_elt(0); self.wave1 = get_waves_d_elt(0); self.subwave = get_waves_a_elt(0); self.imperfection = osc_white() * 1.0417e-006; } pub fn reset(&mut self) { self.phi0 = 0.0; self.phi1 = 0.0; self.phisub = 0.0; self.lfo = self.lfoz; } } #[repr(C)] pub struct RavesParams { submix: f32, ringmix: f32, bitcrush: f32, shape: f32, shiftshape: f32, wave0: u8, wave1: u8, subwave: u8, padding: u8, } impl RavesParams { pub const fn new() -> Self { RavesParams { submix: 0.05, ringmix: 0.0, bitcrush: 0.0, shape: 0.0, shiftshape: 0.0, wave0: 0, wave1: 0, subwave: 0, padding: 0, } } } #[repr(C)] pub struct Raves { state: RavesState, params: RavesParams, prelpf: biquad::BiQuad, postlpf: biquad::BiQuad, } impl Raves { pub const fn new() -> Self { Raves { params: RavesParams::new(), state: RavesState::new(), prelpf: biquad::BiQuad::new(), postlpf: biquad::BiQuad::new(), } } pub fn init(&mut self) { self.params = RavesParams::new(); self.state = RavesState::new(); self.state.init(); self.prelpf.coeffs.set_pole_lp(0.8); self.postlpf.coeffs.set_folp(osc_tanpif(0.45)); } pub fn update_pitch(&mut self, w0: f32) { let w0new = w0 + self.state.imperfection; let drift = self.params.shiftshape; self.state.w00 = w0new; self.state.w01 = w0new + drift * 5.20833333333333e-006; self.state.w0sub = 0.5 * w0new + drift * 3.125e-006; } pub fn update_waves(&mut self, flags: u16) { if (flags & RavesFlags::Wave0 as u16) != 0 { let k_a_thr = K_WAVES_A_CNT; let k_b_thr = k_a_thr + K_WAVES_B_CNT; let k_c_thr = k_b_thr + K_WAVES_C_CNT; let mut idx = self.params.wave0 as usize; if idx < k_a_thr { self.state.wave0 = get_waves_a_elt(idx); } else if idx < k_b_thr { idx -= k_a_thr; self.state.wave0 = get_waves_b_elt(idx); } else if idx < k_c_thr { idx -= k_b_thr; self.state.wave0 = get_waves_c_elt(idx); } else { } } if (flags & RavesFlags::Wave1 as u16) != 0 { let k_d_thr = K_WAVES_D_CNT; let k_e_thr = k_d_thr + K_WAVES_E_CNT; let k_f_thr = k_e_thr + K_WAVES_F_CNT; let mut idx = self.params.wave1 as usize; if idx < k_d_thr { self.state.wave1 = get_waves_d_elt(idx); } else if idx < k_e_thr { idx -= k_d_thr; self.state.wave1 = get_waves_e_elt(idx); } else if idx < k_f_thr { idx -= k_e_thr; self.state.wave1 = get_waves_f_elt(idx); } else { } } if (flags & RavesFlags::SubWave as u16) != 0 { self.state.subwave = get_waves_a_elt(self.params.subwave as usize); } } } pub fn osc_init(raves: &mut Raves, _platform: u32, _api: u32) { raves.init(); } pub fn osc_cycle(raves: &mut Raves, params: &UserOscParams, yn: &mut [i32]) { let phi = (params.pitch >> 8) as u8; let plo = (params.pitch & 0xFF) as u8; let flags = raves.state.flags; raves.update_pitch(osc_w0f_for_note(phi, plo)); raves.update_waves(flags as u16); let p : &RavesParams = &raves.params; { let sm : &mut RavesState = &mut raves.state; if (flags as u8) & (RavesFlags::Reset as u8) != 0 { sm.reset(); } if (flags as u8) & (RavesFlags::BitCrush as u8) != 0 { sm.dither = p.bitcrush * 2e-008; sm.bitres = osc_bitresf(p.bitcrush); sm.bitresrcp = 1.0 / sm.bitres; } sm.lfo = q31_to_f32(params.shape_lfo); sm.flags = RavesFlags::None as u8; } let s : &RavesState = &raves.state; let mut phi0 = s.phi0; let mut phi1 = s.phi1; let mut phisub = s.phisub; let mut lfoz = s.lfoz; let lfo_inc = (s.lfo - lfoz) / yn.len() as f32; let submix = p.submix; let ringmix = p.ringmix; let prelpf = &mut raves.prelpf; let postlpf = &mut raves.postlpf; for y in yn.iter_mut() { let wavemix = clipminmaxf(0.005, p.shape + lfoz, 0.995); let mut sig = (1.0 - wavemix) * osc_wave_scanf(wave_table_ref(s.wave0), phi0); sig += wavemix * osc_wave_scanf(wave_table_ref(s.wave1), phi1); let subsig = osc_wave_scanf(wave_table_ref(s.subwave), phisub); sig = (1.0 - submix) * sig + submix * subsig; sig = (1.0 - ringmix) * sig + ringmix * (subsig * sig); sig = clip1m1f(sig); sig = prelpf.process_fo(sig); sig += s.dither * osc_white(); sig = (sig * s.bitres).round() * s.bitresrcp; sig = postlpf.process_fo(sig); sig = osc_softclipf(0.125, sig); *y = f32_to_q31(sig); phi0 += s.w00; phi0 -= (phi0 as u32) as f32; phi1 += s.w01; phi1 -= (phi1 as u32) as f32; phisub += s.w0sub; phisub -= (phisub as u32) as f32; lfoz += lfo_inc; } { let sm : &mut RavesState = &mut raves.state; sm.phi0 = phi0; sm.phi1 = phi1; sm.phisub = phisub; sm.lfoz = lfoz; } } pub fn osc_noteon(raves: &mut Raves, _params: &UserOscParams) { raves.state.flags |= RavesFlags::Reset as u8; } pub fn osc_param(raves: &mut Raves, index: UserOscParamId, value: u16) { let p : &mut RavesParams = &mut raves.params; let s : &mut RavesState = &mut raves.state; match index { UserOscParamId::Id1 => { let cnt : usize = K_WAVES_A_CNT + K_WAVES_B_CNT + K_WAVES_C_CNT; p.wave0 = (value % cnt as u16) as u8; s.flags |= RavesFlags::Wave0 as u8; }, UserOscParamId::Id2 => { let cnt : usize = K_WAVES_D_CNT + K_WAVES_E_CNT + K_WAVES_F_CNT; p.wave1 = (value % cnt as u16) as u8; s.flags |= RavesFlags::Wave1 as u8; }, UserOscParamId::Id3 => { p.subwave = (value % K_WAVES_A_CNT as u16) as u8; s.flags |= RavesFlags::SubWave as u8; }, UserOscParamId::Id4 => { p.submix = clip01f(0.05 + (value as f32 * 0.01 * 0.90)); }, UserOscParamId::Id5 => { p.ringmix = clip01f(value as f32 * 0.01); }, UserOscParamId::Id6 => { p.bitcrush = clip01f(value as f32 * 0.01); s.flags |= RavesFlags::BitCrush as u8; }, UserOscParamId::Shape => { p.shape = param_val_to_f32(value); }, UserOscParamId::ShiftShape => { p.shiftshape = 1.0 + param_val_to_f32(value); }, } }
#![no_std] use panic_halt as _; use core::f32; use core::ptr; use micromath::F32Ext; pub mod dsp; pub mod mathutil; pub mod nts1; use dsp::biquad; use mathutil::*; use nts1::*; use nts1::clipsat::osc_softclipf; use nts1::platform::*; use nts1::random::osc_white; use nts1::userosc::*; use nts1::wavebank::*; #[repr(u8)] pub enum RavesFlags { None = 0, Wave0 = 1 << 1, Wave1 = 1 << 2, SubWave = 1 << 3, RingMix = 1 << 4, BitCrush = 1 << 5, Reset = 1 << 6, } #[repr(C)] pub struct RavesState { wave0: *const WaveLUT, wave1: *const WaveLUT, subwave: *const WaveLUT, phi0: f32, phi1: f32, phisub: f32, w00: f32, w01: f32, w0sub: f32, lfo: f32, lfoz: f32, dither: f32, bitres: f32, bitresrcp: f32, imperfection: f32, flags: u8, } impl RavesState { pub const fn new() -> Self { RavesState { wave0: ptr::null(), wave1: ptr::null(), subwave: ptr::null(), phi0: 0.0, phi1: 0.0, phisub: 0.0, w00: K_SR440, w01: K_SR440, w0sub: K_SR220, lfo: 0.0, lfoz: 0.0, dither: 0.0, bitres: 1.0, bitresrcp: 1.0, imperfection: 0.0, flags: RavesFlags::None as u8, } } pub fn init(&mut self) { self.wave0 = get_waves_a_elt(0); self.wave1 = get_waves_d_elt(0); self.subwave = get_waves_a_elt(0); self.imperfection = osc_white() * 1.0417e-006; } pub fn reset(&mut self) { self.phi0 = 0.0; self.phi1 = 0.0; self.phisub = 0.0; self.lfo = self.lfoz; } } #[repr(C)] pub struct RavesParams { submix: f32, ringmix: f32, bitcrush: f32, shape: f32, shiftshape: f32, wave0: u8, wave1: u8, subwave: u8, padding: u8, } impl RavesParams { pub const fn new() -> Self { RavesParams { submix: 0.05, ringmix: 0.0, bitcrush: 0.0, shape: 0.0, shiftshape: 0.0, wave0: 0, wave1: 0, subwave: 0, padding: 0, } } } #[repr(C)] pub struct Raves { state: RavesState, params: RavesParams, prelpf: biquad::BiQuad, postlpf: biquad::BiQuad, } impl Raves { pub const fn new() -> Self { Raves { params: RavesParams::new(), state: RavesState::new(), prelpf: biquad::BiQuad::new(), postlpf: biquad::BiQuad::new(), } } pub fn init(&mut self) { self.params = RavesParams::new(); self.state = RavesState::new(); self.state.init(); self.prelpf.coeffs.set_pole_lp(0.8); self.postlpf.coeffs.set_folp(osc_tanpif(0.45)); } pub fn update_pitch(&mut self, w0: f32) { let w0new = w0 + self.state.imperfection; let drift = self.params.shiftshape; self.state.w00 = w0new; self.state.w01 = w0new + drift * 5.20833333333333e-006; self.state.w0sub = 0.5 * w0new + drift * 3.125e-006; } pub fn update_waves(&mut self, flags: u16) { if (flags & RavesFlags::Wave0 as u16) != 0 { let k_a_thr = K_WAVES_A_CNT; let k_b_thr = k_a_thr + K_WAVES_B_CNT; let k_c_thr = k_b_thr + K_WAVES_C_CNT; let mut idx = self.params.wave0 as usize; if idx < k_a_thr { self.state.wave0 = get_waves_a_elt(idx); } else if idx < k_b_thr { idx -= k_a_thr; self.state.wave0 = get_waves_b_elt(idx); } else if idx < k_c_thr { idx -= k_b_thr; self.state.wave0 = get_waves_c_elt(idx); } else { } } if (flags & RavesFlags::Wave1 as u16) != 0 { let k_d_thr = K_WAVES_D_CNT; let k_e_thr = k_d_thr + K_WAVES_E_CNT; let k_f_thr = k_e_thr + K_WAVES_F_CNT; let mut idx = self.params.wave1 as usize; if idx < k_d_thr { self.state.wave1 = get_waves_d_elt(idx); } else if idx < k_e_thr { idx -= k_d_thr; self.state.wave1 = get_waves_e_elt(idx); } else if idx < k_f_thr { idx -= k_e_thr; self.state.wave1 = get_waves_f_elt(idx); } else { } } if (flags & RavesFlags::SubWave as u16) != 0 { self.state.subwave = get_waves_a_elt(self.params.subwave as usize); } } } pub fn osc_init(raves: &mut Raves, _platform: u32, _api: u32) { raves.init(); } pub fn osc_cycle(raves: &mut Raves, params: &UserOscParams, yn: &mut [i32]) { let phi = (params.pitch >> 8) as u8; let plo = (params.pitch & 0xFF) as u8; let flags = raves.state.flags; raves.update_pitch(osc_w0f_for_note(phi, plo)); raves.update_waves(flags as u16); let p : &RavesParams = &raves.params; { let sm : &mut RavesState = &mut raves.state; if (flags as u8) & (RavesFlags::Reset as u8) != 0 { sm.reset(); } if (flags as u8) & (RavesFlags::BitCrush as u8) != 0 { sm.dither = p.bitcrush * 2e-008; sm.bitres = osc_bitresf(p.bitcrush); sm.bitresrcp = 1.0 / sm.bitres; } sm.lfo = q31_to_f32(params.shape_lfo); sm.flags = RavesFlags::None as u8; } let s : &RavesState = &raves.state; let mut phi0 = s.phi0; let mut phi1 = s.phi1; let mut phisub = s.phisub; let mut lfoz = s.lfoz; let lfo_inc = (s.lfo - lfoz) / yn.len() as f32; let submix = p.submix; let ringmix = p.ringmix; let prelpf = &mut raves.prelpf; let postlpf = &mut raves.postlpf; for y in yn.iter_mut() { let wavemix = clipminmaxf(0.005, p.shape + lfoz, 0.995); let mut sig = (1.0 - wavemix) * osc_wave_scanf(wave_table_ref(s.wave0), phi0); sig += wavemix * osc_wave_scanf(wave_table_ref(s.wave1), phi1); let subsig = osc_wave_scanf(wave_table_ref(s.subwave), phisub); sig = (1.0 - submix) * sig + submix * subsig; sig = (1.0 - ringmix) * sig + ringmix * (subsig * sig); sig = clip1m1f(sig); sig = prelpf.process_fo(sig); sig += s.dither * osc_white(); sig = (sig * s.bitres).round() * s.bitresrcp; sig = postlpf.process_fo(sig); sig = osc_softclipf(0.125, sig); *y = f32_to_q31(sig); phi0 += s.w00; phi0 -= (phi0 as u32) as f32; phi1 += s.w01; phi1 -= (phi1 as u32) as f32; phisub += s.w0sub; phisub -= (phisub as u32) as f32; lfoz += lfo_inc; } { let sm : &mut RavesState = &mut raves.state; sm.phi0 = phi0; sm.phi1 = phi1; sm.phisub = phisub; sm.lfoz = lfoz; } } pub fn osc_noteon(raves: &mut Raves, _params: &UserOscParams) { raves.state.flags |= RavesFlags::Reset as u8; } pub fn osc_param(raves: &mut Raves, index: UserOscParamId, value: u16) { let p : &mut RavesParams = &mut raves.params; let s : &mut RavesState = &mut raves.state; match index { UserOscParamId::Id1 => { let cnt : usize = K_WAVES_A_CNT + K_WAVES_B_C
NT + K_WAVES_C_CNT; p.wave0 = (value % cnt as u16) as u8; s.flags |= RavesFlags::Wave0 as u8; }, UserOscParamId::Id2 => { let cnt : usize = K_WAVES_D_CNT + K_WAVES_E_CNT + K_WAVES_F_CNT; p.wave1 = (value % cnt as u16) as u8; s.flags |= RavesFlags::Wave1 as u8; }, UserOscParamId::Id3 => { p.subwave = (value % K_WAVES_A_CNT as u16) as u8; s.flags |= RavesFlags::SubWave as u8; }, UserOscParamId::Id4 => { p.submix = clip01f(0.05 + (value as f32 * 0.01 * 0.90)); }, UserOscParamId::Id5 => { p.ringmix = clip01f(value as f32 * 0.01); }, UserOscParamId::Id6 => { p.bitcrush = clip01f(value as f32 * 0.01); s.flags |= RavesFlags::BitCrush as u8; }, UserOscParamId::Shape => { p.shape = param_val_to_f32(value); }, UserOscParamId::ShiftShape => { p.shiftshape = 1.0 + param_val_to_f32(value); }, } }
function_block-function_prefixed
[ { "content": "/// Convert 10-bit parameter value to f32\n\npub fn param_val_to_f32(x: u16) -> f32 {\n\n x as f32 * 9.77517106549365e-004f32\n\n}\n\n\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1/userosc.rs", "rank": 1, "score": 362597.1218847061 }, { "content": "pub fn get_waves_f_elt(idx: usize) -> *const WaveLUT {\n\n unsafe { *wavesF.get_unchecked(idx) }\n\n}\n\n\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1/wavebank.rs", "rank": 3, "score": 346915.4928925656 }, { "content": "pub fn get_waves_e_elt(idx: usize) -> *const WaveLUT {\n\n unsafe { *wavesE.get_unchecked(idx) }\n\n}\n\n\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1/wavebank.rs", "rank": 4, "score": 346915.4928925657 }, { "content": "pub fn get_waves_d_elt(idx: usize) -> *const WaveLUT {\n\n unsafe { *wavesD.get_unchecked(idx) }\n\n}\n\n\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1/wavebank.rs", "rank": 5, "score": 346915.4928925656 }, { "content": "pub fn get_waves_b_elt(idx: usize) -> *const WaveLUT {\n\n unsafe { *wavesB.get_unchecked(idx) }\n\n}\n\n\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1/wavebank.rs", "rank": 6, "score": 346915.4928925656 }, { "content": "pub fn get_waves_a_elt(idx: usize) -> *const WaveLUT {\n\n unsafe { *wavesA.get_unchecked(idx) }\n\n}\n\n\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1/wavebank.rs", "rank": 7, "score": 346915.49289256555 }, { "content": "pub fn get_waves_c_elt(idx: usize) -> *const WaveLUT {\n\n unsafe { *wavesC.get_unchecked(idx) }\n\n}\n\n\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1/wavebank.rs", "rank": 8, "score": 346915.49289256555 }, { "content": "type CycleCallback = unsafe extern \"C\" fn(params: &UserOscParams, yn: *mut i32, frames: u32);\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1/userosc.rs", "rank": 9, "score": 346213.8769592448 }, { "content": "pub fn f32_to_q31(x: f32) -> i32 {\n\n (x * F32_TO_Q31_C as f32) as i32\n\n}\n", "file_path": "platform/nutekt-digital/demos/raves/src/mathutil.rs", "rank": 10, "score": 328961.41287949774 }, { "content": "pub fn q31_to_f32(x: i32) -> f32 {\n\n x as f32 * Q31_TO_F32_C\n\n}\n\n\n", "file_path": "platform/nutekt-digital/demos/raves/src/mathutil.rs", "rank": 11, "score": 328961.41287949774 }, { "content": "type ParamCallback = unsafe extern \"C\" fn(index: UserOscParamId, value: u16);\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1/userosc.rs", "rank": 12, "score": 316128.6965191422 }, { "content": "/// Get Hertz value for `note`, which should be in the range [0-151].\n\n/// Larger values will be clipped to 151.\n\npub fn osc_notehzf(note: u8) -> f32 {\n\n let idx = clipmaxnote(note, K_MIDI_TO_HZ_SIZE - 1);\n\n return unsafe { *midi_to_hz_lut_f.get_unchecked(idx) };\n\n}\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1.rs", "rank": 13, "score": 314199.5629124673 }, { "content": "pub fn osc_w0f_for_note(note: u8, modulation: u8) -> f32{\n\n let f0 = osc_notehzf(note);\n\n let f1 = osc_notehzf(note + 1);\n\n let f = clipmaxf(linintf(modulation as f32 * K_NOTE_MOD_FSCALE, f0, f1), K_NOTE_MAX_HZ);\n\n return f * K_SAMPLERATE_RECIP;\n\n}\n\n\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1.rs", "rank": 14, "score": 311190.42538131506 }, { "content": "pub fn osc_wave_scanuf(w: &WaveLUT, x: u32) -> f32 {\n\n let xu = x as usize;\n\n let x0 = xu >> K_WAVES_U32_SHIFT;\n\n let x1 = (x0 + 1) & K_WAVES_MASK;\n\n let fr = K_WAVES_FRRECIP * ((x & ((1 << K_WAVES_U32_SHIFT) - 1)) as f32);\n\n return linintf(fr, w[x0], w[x1]);\n\n}\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1/wavebank.rs", "rank": 15, "score": 304111.26295662404 }, { "content": "pub fn clipmaxnote(note: u8, m: usize) -> usize {\n\n let unote = note as usize;\n\n if unote >= m { m } else { unote }\n\n}\n\n\n", "file_path": "platform/nutekt-digital/demos/raves/src/mathutil.rs", "rank": 16, "score": 300986.7924141361 }, { "content": "pub fn clip01f(x: f32) -> f32 {\n\n if x > 1.0 { 1.0 } else if x < 0.0 { 0.0 } else { x }\n\n}\n\n\n", "file_path": "platform/nutekt-digital/demos/raves/src/mathutil.rs", "rank": 18, "score": 283246.6812362312 }, { "content": "pub fn clip1m1f(x: f32) -> f32 {\n\n if x > 1.0 { 1.0 } else if x < -1.0 { -1.0 } else { x }\n\n}\n\n\n", "file_path": "platform/nutekt-digital/demos/raves/src/mathutil.rs", "rank": 19, "score": 283246.6812362312 }, { "content": "pub fn clipmaxu32(x: u32, m: u32) -> u32 {\n\n if x >= m { m } else { x }\n\n}\n\n\n", "file_path": "platform/nutekt-digital/demos/raves/src/mathutil.rs", "rank": 20, "score": 281411.5410106015 }, { "content": "pub fn clipmaxf(x: f32, m: f32) -> f32 {\n\n if x >= m { m } else { x }\n\n}\n\n\n", "file_path": "platform/nutekt-digital/demos/raves/src/mathutil.rs", "rank": 21, "score": 281287.0522267573 }, { "content": "pub fn osc_tanpif(x: f32) -> f32 {\n\n let idxf = x * K_TANPI_RANGE_RECIP * K_TANPI_SIZE as f32;\n\n let idx = idxf as usize;\n\n let y0 = unsafe { *tanpi_lut_f.get_unchecked(idx) };\n\n let y1 = unsafe { *tanpi_lut_f.get_unchecked(idx + 1) };\n\n return linintf(idxf - idx as f32, y0, y1);\n\n}\n\n\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1.rs", "rank": 22, "score": 279860.5096022601 }, { "content": "pub fn osc_bitresf(x: f32) -> f32 {\n\n let xf = x * K_BITRES_SIZE as f32;\n\n let xi = xf as usize;\n\n let y0 = unsafe { *bitres_lut_f.get_unchecked(xi) };\n\n let y1 = unsafe { *bitres_lut_f.get_unchecked(xi + 1) };\n\n return linintf(xf - xi as f32, y0, y1);\n\n}\n\n\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1.rs", "rank": 23, "score": 279860.5096022601 }, { "content": "pub fn osc_softclipf(c: f32, x: f32) -> f32 {\n\n let x = clip1m1f(x);\n\n return x - c * (x*x*x);\n\n}\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1/clipsat.rs", "rank": 24, "score": 275134.1041051911 }, { "content": "/// Returns a random integer in [0, u32::MAX]. Generated with\n\n/// Park-Miller-Carta.\n\npub fn osc_rand() -> u32 {\n\n unsafe { _osc_rand() }\n\n}\n\n\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1/random.rs", "rank": 25, "score": 273531.8135763081 }, { "content": "/// Gaussian white noise. Returns a value in [-1.0, 1.0].\n\npub fn osc_white() -> f32 {\n\n unsafe { _osc_white() }\n\n}\n\n\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1/random.rs", "rank": 26, "score": 273440.13563827984 }, { "content": "pub fn clipminmaxf(lo: f32, x: f32, hi: f32) -> f32 {\n\n if x >= hi { hi } else if x <= lo { lo } else { x }\n\n}\n\n\n", "file_path": "platform/nutekt-digital/demos/raves/src/mathutil.rs", "rank": 27, "score": 272648.75985121954 }, { "content": "pub fn osc_wave_scanf(w: &WaveLUT, x: f32) -> f32 {\n\n let p = x - (x as u32) as f32;\n\n let x0f = p * K_WAVES_SIZE as f32;\n\n let x0 = x0f as usize & K_WAVES_MASK;\n\n let x1 = (x0 + 1) & K_WAVES_MASK;\n\n return linintf(x0f - (x0f as u32) as f32, w[x0], w[x1]);\n\n}\n\n\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1/wavebank.rs", "rank": 28, "score": 270860.7371927042 }, { "content": "pub fn osc_mcu_hash() -> u32 {\n\n unsafe { _osc_mcu_hash() }\n\n}\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1/platform.rs", "rank": 29, "score": 270221.89588538784 }, { "content": "pub fn linintf(fr: f32, x0: f32, x1: f32) -> f32 {\n\n x0 + fr * (x1 - x0)\n\n}\n\n\n", "file_path": "platform/nutekt-digital/demos/raves/src/mathutil.rs", "rank": 30, "score": 269905.69031895604 }, { "content": "pub fn wave_table_ref(p: *const WaveLUT) -> &'static WaveLUT {\n\n unsafe { &*p }\n\n}\n\n\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1/wavebank.rs", "rank": 31, "score": 266821.43878260534 }, { "content": "type ValueCallback = unsafe extern \"C\" fn(value: u16);\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1/userosc.rs", "rank": 33, "score": 237189.6987230174 }, { "content": "type InitCallback = unsafe extern \"C\" fn(platform: u32, api: u32);\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1/userosc.rs", "rank": 34, "score": 196175.47109186713 }, { "content": "type OffCallback = unsafe extern \"C\" fn(params: &UserOscParams);\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1/userosc.rs", "rank": 35, "score": 181764.4103402565 }, { "content": "type OnCallback = unsafe extern \"C\" fn(params: &UserOscParams);\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1/userosc.rs", "rank": 36, "score": 181764.4103402565 }, { "content": "type MuteCallback = unsafe extern \"C\" fn(params: &UserOscParams);\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1/userosc.rs", "rank": 37, "score": 179588.21001060808 }, { "content": "type DummyCallback = unsafe extern \"C\" fn();\n\n\n\npub const DEFAULT_RESERVED0: [u8; 7] = [0; 7];\n\npub const DEFAULT_RESERVED1: [u8; 5*mem::size_of::<DummyCallback>()] =\n\n [0; 5*mem::size_of::<DummyCallback>()];\n\n\n\n#[repr(C)]\n\n#[repr(packed)]\n\npub struct UserOscHookTable {\n\n pub magic: [u8; 4],\n\n pub api: u32,\n\n pub platform: u8,\n\n pub reserved0: [u8; 7],\n\n\n\n /// Initialization callback. Must be implemented by your custom\n\n /// oscillator.\n\n ///\n\n /// The `platform` parameter is the current target platform/module.\n\n ///\n\n /// The `api` parameter is the current API version.\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1/userosc.rs", "rank": 38, "score": 139302.5615220178 }, { "content": "fn main() {\n\n let out = &PathBuf::from(env::var_os(\"OUT_DIR\").unwrap());\n\n File::create(out.join(\"userosc.x\"))\n\n .unwrap()\n\n .write_all(include_bytes!(\"scripts/userosc.x\"))\n\n .unwrap();\n\n println!(\"cargo:rustc-link-search={}\", out.display());\n\n println!(\"cargo:rerun-if-changed=scripts/userosc.x\");\n\n println!(\"cargo:rerun-if-changed=build.rs\");\n\n}\n", "file_path": "platform/nutekt-digital/demos/raves/build.rs", "rank": 39, "score": 120785.80734623615 }, { "content": "const F32_TO_Q31_C : u32 = 0x7FFFFFFF; // 2^31\n\nconst Q31_TO_F32_C : f32 = 4.65661287307739e-010f32; // 1 / 2^31\n\n\n", "file_path": "platform/nutekt-digital/demos/raves/src/mathutil.rs", "rank": 40, "score": 92501.38615834426 }, { "content": "use crate::mathutil::*;\n\n\n\npub const K_MIDI_TO_HZ_SIZE: usize = 152;\n\npub const K_NOTE_MOD_FSCALE: f32 = 0.00392156862745098f32;\n\npub const K_NOTE_MAX_HZ: f32 = 23679.643054f32;\n\n\n\npub const K_BITRES_SIZE_EXP : usize = 7;\n\npub const K_BITRES_SIZE : usize = 1 << K_BITRES_SIZE_EXP;\n\npub const K_BITRES_MASK : usize = K_BITRES_SIZE - 1;\n\npub const K_BITRES_LUT_SIZE : usize = K_BITRES_SIZE + 1;\n\n\n\npub const K_TANPI_SIZE_EXP: usize = 8;\n\npub const K_TANPI_SIZE: usize = 1 << K_TANPI_SIZE_EXP;\n\npub const K_TANPI_MASK: usize = K_TANPI_SIZE - 1;\n\npub const K_TANPI_RANGE_RECIP: f32 = 2.04081632653061; // 1/0.49\n\npub const K_TANPI_LUT_SIZE: usize = K_TANPI_SIZE + 1;\n\n\n\npub mod clipsat;\n\npub mod platform;\n\npub mod random;\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1.rs", "rank": 41, "score": 92386.11948310747 }, { "content": "pub mod userosc;\n\npub mod wavebank;\n\n\n\nuse platform::*;\n\n\n\nextern \"C\" {\n\n static midi_to_hz_lut_f: [f32; K_MIDI_TO_HZ_SIZE];\n\n static bitres_lut_f: [f32; K_BITRES_LUT_SIZE];\n\n static tanpi_lut_f: [f32; K_TANPI_LUT_SIZE];\n\n}\n\n\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1.rs", "rank": 42, "score": 92377.61881100353 }, { "content": "pub mod biquad;\n", "file_path": "platform/nutekt-digital/demos/raves/src/dsp.rs", "rank": 43, "score": 91359.6914122061 }, { "content": "use core::mem;\n\n\n\n#[repr(C)]\n\npub struct UserOscParams {\n\n /// Value of LFO implicitly applied to shape parameter.\n\n pub shape_lfo: i32,\n\n /// Current pitch. High byte: note number. Low byte: fine (0-255).\n\n pub pitch: u16,\n\n /// Current cutoff value (0x0000-0x1fff).\n\n pub cutoff: u16,\n\n /// Current resonance value (0x0000-0x1fff)\n\n pub resonance: u16,\n\n pub reserved0: [u16; 3],\n\n}\n\n\n\n#[repr(u16)]\n\npub enum UserOscParamId {\n\n /// Edit parameter 1\n\n Id1 = 0,\n\n /// Edit parameter 2\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1/userosc.rs", "rank": 44, "score": 90835.84989687991 }, { "content": " pub func_on: OnCallback,\n\n\n\n /// Note off callback. Must be implemented by your custom\n\n /// oscillator.\n\n ///\n\n /// The `params` parameter contains the current realtime parameter\n\n /// state.\n\n pub func_off: OffCallback,\n\n\n\n pub func_mute: MuteCallback,\n\n\n\n pub func_value: ValueCallback,\n\n\n\n /// Parameter change callback. Must be implemented by your custom\n\n /// oscillator.\n\n ///\n\n /// The parameter `index` contains the parameter ID (as in\n\n /// `UserOscParamId`) and `value` contains the parameter value.\n\n ///\n\n /// Resolution is 10 bits for shape and shift-shape, 0-200 for\n\n /// bipolar percentage paramters (0% at 100, -100% at 0) and 0-11\n\n /// for unipolar percentage parameters and typeless parameters.\n\n pub func_param: ParamCallback,\n\n\n\n //pub reserved1: [DummyCallback; 5],\n\n // Use bytes for the following so it can be zeroed\n\n pub reserved1: [u8; 5*mem::size_of::<DummyCallback>()],\n\n}\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1/userosc.rs", "rank": 45, "score": 90831.65609779788 }, { "content": "use crate::mathutil::linintf;\n\n\n\npub type WaveLUT = [f32; K_WAVES_LUT_SIZE];\n\n\n\nextern \"C\" {\n\n static wavesA: [*const WaveLUT; K_WAVES_A_CNT];\n\n static wavesB: [*const WaveLUT; K_WAVES_B_CNT];\n\n static wavesC: [*const WaveLUT; K_WAVES_C_CNT];\n\n static wavesD: [*const WaveLUT; K_WAVES_D_CNT];\n\n static wavesE: [*const WaveLUT; K_WAVES_E_CNT];\n\n static wavesF: [*const WaveLUT; K_WAVES_F_CNT];\n\n}\n\n\n\npub const K_WAVES_SIZE_EXP : usize = 7;\n\npub const K_WAVES_SIZE : usize = 1 << K_WAVES_SIZE_EXP;\n\npub const K_WAVES_U32_SHIFT : usize = 24;\n\npub const K_WAVES_FRRECIP : f32 = 5.96046447753906e-008;\n\npub const K_WAVES_MASK : usize = K_WAVES_SIZE - 1;\n\npub const K_WAVES_LUT_SIZE : usize = K_WAVES_SIZE + 1;\n\n\n\npub const K_WAVES_A_CNT : usize = 16;\n\npub const K_WAVES_B_CNT : usize = 16;\n\npub const K_WAVES_C_CNT : usize = 14;\n\npub const K_WAVES_D_CNT : usize = 13;\n\npub const K_WAVES_E_CNT : usize = 15;\n\npub const K_WAVES_F_CNT : usize = 16;\n\n\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1/wavebank.rs", "rank": 46, "score": 90827.33870164164 }, { "content": "extern \"C\" {\n\n /// Current platform\n\n pub static k_osc_api_platform: u32;\n\n\n\n /// Current API version\n\n pub static k_osc_api_version: u32;\n\n\n\n /// Get a MCU-specific \"unique\" hash.\n\n fn _osc_mcu_hash() -> u32;\n\n}\n\n\n\n/// The inverse of the 48,000 Hz sample rate used in the NTS-1.\n\npub const K_SAMPLERATE_RECIP: f32 = 2.08333333333333e-005;\n\n\n\n/// SAMPLERATE_RECIP multiplied by 440.0, since FP math isn't allowed in const fns.\n\npub const K_SR440: f32 = 9.16666666666666e-003;\n\n\n\n/// SAMPLERATE_RECIP multiplied by 220.0, since FP math isn't allowed in const fns.\n\npub const K_SR220: f32 = 4.58333333333333e-003;\n\n\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1/platform.rs", "rank": 47, "score": 90818.95630158359 }, { "content": " pub func_entry: InitCallback,\n\n\n\n /// Rendering callback. Must be implemented by your custom oscillator.\n\n ///\n\n /// The `params` parameter contains the current realtime parameter\n\n /// state.\n\n ///\n\n /// The `yn` parameter points to the output buffer (1 sample per\n\n /// frame).\n\n ///\n\n /// The `frames` parameter holds the size of the output buffer.\n\n ///\n\n /// The implementation must support at least up to 64 frames.\n\n /// Optimize it for powers of two.\n\n pub func_cycle: CycleCallback,\n\n\n\n /// Note on callback. Must be implemented by your custom oscillator.\n\n ///\n\n /// The `params` parameter contains the current realtime parameter\n\n /// state.\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1/userosc.rs", "rank": 48, "score": 90816.89177386671 }, { "content": " Id2,\n\n /// Edit parameter 3\n\n Id3,\n\n /// Edit parameter 4\n\n Id4,\n\n /// Edit parameter 5\n\n Id5,\n\n /// Edit parameter 6\n\n Id6,\n\n /// Shape parameter\n\n Shape,\n\n /// Alternative Shape parameter: generally available via a shift function\n\n ShiftShape,\n\n}\n\n\n\n/// Convert 10-bit parameter value to f32\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1/userosc.rs", "rank": 49, "score": 90814.01694706299 }, { "content": "use crate::mathutil::clip1m1f;\n\n\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1/clipsat.rs", "rank": 50, "score": 90810.38687348308 }, { "content": "extern \"C\" {\n\n fn _osc_rand() -> u32;\n\n fn _osc_white() -> f32;\n\n}\n\n\n\n/// Returns a random integer in [0, u32::MAX]. Generated with\n\n/// Park-Miller-Carta.\n", "file_path": "platform/nutekt-digital/demos/raves/src/nts1/random.rs", "rank": 51, "score": 90810.22313604443 }, { "content": "pub struct Coeffs {\n\n ff0: f32,\n\n ff1: f32,\n\n ff2: f32,\n\n fb1: f32,\n\n fb2: f32,\n\n}\n\n\n\nimpl Coeffs {\n\n pub const fn new() -> Self {\n\n Coeffs {\n\n ff0: 0.0,\n\n ff1: 0.0,\n\n ff2: 0.0,\n\n fb1: 0.0,\n\n fb2: 0.0,\n\n }\n\n }\n\n\n\n pub fn set_pole_lp(&mut self, pole: f32) {\n", "file_path": "platform/nutekt-digital/demos/raves/src/dsp/biquad.rs", "rank": 52, "score": 89840.76633419514 }, { "content": " z1: f32,\n\n // z2: f32, // Included in C++ but unused in the code ported so far\n\n}\n\n\n\nimpl BiQuad {\n\n pub const fn new() -> Self {\n\n BiQuad {\n\n coeffs: Coeffs::new(),\n\n z1: 0.0,\n\n //z2: 0.0,\n\n }\n\n }\n\n\n\n pub fn process_fo(&mut self, xn: f32) -> f32 {\n\n let acc = self.coeffs.ff0 * xn + self.z1;\n\n self.z1 = self.coeffs.ff1 * xn;\n\n self.z1 -= self.coeffs.fb1 * acc;\n\n return acc;\n\n\n\n }\n\n}\n", "file_path": "platform/nutekt-digital/demos/raves/src/dsp/biquad.rs", "rank": 53, "score": 89839.03696260665 }, { "content": " self.ff0 = 1.0 - pole;\n\n self.ff1 = 0.0;\n\n self.ff2 = 0.0;\n\n self.fb1 = -pole;\n\n self.fb2 = 0.0;\n\n }\n\n\n\n pub fn set_folp(&mut self, k: f32) {\n\n let kp1 = k + 1.0;\n\n let km1 = k - 1.0;\n\n self.ff0 = k / kp1;\n\n self.ff1 = k / kp1;\n\n self.ff2 = 0.0;\n\n self.fb1 = km1 / kp1;\n\n self.fb2 = 0.0;\n\n }\n\n}\n\n\n\npub struct BiQuad {\n\n pub coeffs: Coeffs,\n", "file_path": "platform/nutekt-digital/demos/raves/src/dsp/biquad.rs", "rank": 54, "score": 89826.75419917062 }, { "content": " uint8_t pad[USER_PRG_SIG_SIZE];\n", "file_path": "platform/prologue/inc/userprg.h", "rank": 55, "score": 81918.76463480096 }, { "content": " uint8_t pad[USER_PRG_SIG_SIZE];\n", "file_path": "platform/nutekt-digital/inc/userprg.h", "rank": 56, "score": 81163.71763680423 }, { "content": " uint8_t pad[USER_PRG_SIG_SIZE];\n", "file_path": "platform/minilogue-xd/inc/userprg.h", "rank": 57, "score": 81163.71763680423 }, { "content": "struct Waves {\n\n\n\n enum {\n\n k_flags_none = 0,\n\n k_flag_wave0 = 1<<1,\n\n k_flag_wave1 = 1<<2,\n\n k_flag_subwave = 1<<3,\n\n k_flag_ringmix = 1<<4,\n\n k_flag_bitcrush = 1<<5,\n\n k_flag_reset = 1<<6\n\n };\n\n \n\n struct Params {\n\n float submix;\n\n float ringmix;\n\n float bitcrush;\n\n float shape;\n\n float shiftshape;\n\n uint8_t wave0;\n\n uint8_t wave1;\n", "file_path": "platform/prologue/demos/waves/waves.hpp", "rank": 58, "score": 75991.45926261909 }, { "content": "struct Waves {\n\n\n\n enum {\n\n k_flags_none = 0,\n\n k_flag_wave0 = 1<<1,\n\n k_flag_wave1 = 1<<2,\n\n k_flag_subwave = 1<<3,\n\n k_flag_ringmix = 1<<4,\n\n k_flag_bitcrush = 1<<5,\n\n k_flag_reset = 1<<6\n\n };\n\n \n\n struct Params {\n\n float submix;\n\n float ringmix;\n\n float bitcrush;\n\n float shape;\n\n float shiftshape;\n\n uint8_t wave0;\n\n uint8_t wave1;\n", "file_path": "platform/minilogue-xd/demos/waves/waves.hpp", "rank": 59, "score": 75183.12232892054 }, { "content": "struct Waves {\n\n\n\n enum {\n\n k_flags_none = 0,\n\n k_flag_wave0 = 1<<1,\n\n k_flag_wave1 = 1<<2,\n\n k_flag_subwave = 1<<3,\n\n k_flag_ringmix = 1<<4,\n\n k_flag_bitcrush = 1<<5,\n\n k_flag_reset = 1<<6\n\n };\n\n \n\n struct Params {\n\n float submix;\n\n float ringmix;\n\n float bitcrush;\n\n float shape;\n\n float shiftshape;\n\n uint8_t wave0;\n\n uint8_t wave1;\n", "file_path": "platform/nutekt-digital/demos/waves/waves.hpp", "rank": 60, "score": 75183.12232892054 }, { "content": "\n\n s_param_z = p_z;\n\n}\n\n\n\n\n\nvoid MODFX_PARAM(uint8_t index, int32_t value)\n\n{\n\n const float valf = q31_to_f32(value);\n\n switch (index) {\n\n case k_user_modfx_param_time:\n\n s_lfo_wave = si_roundf(valf * (k_wave_count - 1));\n\n break;\n\n case k_user_modfx_param_depth:\n\n s_param = valf;\n\n break;\n\n default:\n\n break;\n\n }\n\n}\n\n\n", "file_path": "platform/prologue/modfx/tests/src/lfo.cpp", "rank": 61, "score": 46951.26711166084 }, { "content": "void DELFX_PARAM(uint8_t index, int32_t value)\n\n{\n\n const float valf = q31_to_f32(value);\n\n switch (index) {\n\n case k_user_delfx_param_time:\n\n s_lfo_wave = si_roundf(valf * (k_wave_count - 1));\n\n break;\n\n case k_user_delfx_param_depth:\n\n s_param = valf;\n\n break;\n\n case k_user_delfx_param_shift_depth:\n\n s_lfo.setF0(20.f + 420.f * valf, s_fs_recip);\n\n break;\n\n default:\n\n break;\n\n }\n\n}\n\n\n", "file_path": "platform/prologue/delfx/tests/src/lfo.cpp", "rank": 62, "score": 46951.1327414811 }, { "content": "void REVFX_PARAM(uint8_t index, int32_t value)\n\n{\n\n const float valf = q31_to_f32(value);\n\n switch (index) {\n\n case k_user_revfx_param_time:\n\n s_lfo_wave = si_roundf(valf * (k_wave_count - 1));\n\n break;\n\n case k_user_revfx_param_depth:\n\n s_param = valf;\n\n break;\n\n case k_user_revfx_param_shift_depth:\n\n s_lfo.setF0(20.f + 420.f * valf, s_fs_recip);\n\n break;\n\n default:\n\n break;\n\n }\n\n}\n\n\n", "file_path": "platform/prologue/revfx/tests/src/lfo.cpp", "rank": 63, "score": 46951.1327414811 }, { "content": " s_lfo.setF0(220.f,s_fs_recip);\n\n}\n\n\n\nvoid MODFX_PROCESS(const float *main_xn, float *main_yn,\n\n const float *sub_xn, float *sub_yn,\n\n uint32_t frames)\n\n{\n\n float * __restrict my = main_yn;\n\n const float * my_e = my + 2*frames;\n\n float * __restrict sy = sub_yn;\n\n\n\n const float p = s_param;\n\n float p_z = s_param_z;\n\n \n\n for (; my != my_e; ) {\n\n\n\n p_z = linintf(0.002f, p_z, p);\n\n \n\n s_lfo.cycle();\n\n\n", "file_path": "platform/prologue/modfx/tests/src/lfo.cpp", "rank": 64, "score": 46943.64550504802 }, { "content": "/*\n\n * File: lfo.cpp\n\n *\n\n * Simple runtime test using LFO class as audio rate oscillator\n\n *\n\n * \n\n * \n\n * 2018 (c) Korg\n\n *\n\n */\n\n\n\n#include \"userdelfx.h\"\n\n\n\n#include \"simplelfo.hpp\"\n\n\n\nstatic dsp::SimpleLFO s_lfo;\n\n\n\nenum {\n\n k_sin = 0,\n\n k_tri,\n", "file_path": "platform/prologue/delfx/tests/src/lfo.cpp", "rank": 65, "score": 46942.9010681727 }, { "content": "/*\n\n * File: lfo.cpp\n\n *\n\n * Simple runtime test using LFO class as audio rate oscillator\n\n *\n\n * \n\n * \n\n * 2018 (c) Korg\n\n *\n\n */\n\n\n\n#include \"usermodfx.h\"\n\n\n\n#include \"simplelfo.hpp\"\n\n\n\nstatic dsp::SimpleLFO s_lfo;\n\n\n\nenum {\n\n k_sin = 0,\n\n k_tri,\n", "file_path": "platform/prologue/modfx/tests/src/lfo.cpp", "rank": 66, "score": 46942.9010681727 }, { "content": " k_saw,\n\n k_sqr,\n\n k_sin_uni,\n\n k_tri_uni,\n\n k_saw_uni,\n\n k_sqr_uni,\n\n k_sin_off,\n\n k_tri_off,\n\n k_saw_off,\n\n k_sqr_off,\n\n k_wave_count\n\n};\n\n\n\nstatic uint8_t s_lfo_wave;\n\nstatic float s_param_z, s_param;\n\nstatic const float s_fs_recip = 1.f / 48000.f;\n\n\n\nvoid MODFX_INIT(uint32_t platform, uint32_t api)\n\n{\n\n s_lfo.reset();\n", "file_path": "platform/prologue/modfx/tests/src/lfo.cpp", "rank": 67, "score": 46941.96621334298 }, { "content": " k_saw,\n\n k_sqr,\n\n k_sin_uni,\n\n k_tri_uni,\n\n k_saw_uni,\n\n k_sqr_uni,\n\n k_sin_off,\n\n k_tri_off,\n\n k_saw_off,\n\n k_sqr_off,\n\n k_wave_count\n\n};\n\n\n\nstatic uint8_t s_lfo_wave;\n\nstatic float s_param_z, s_param;\n\nstatic const float s_fs_recip = 1.f / 48000.f;\n\n\n\nvoid DELFX_INIT(uint32_t platform, uint32_t api)\n\n{\n\n s_lfo.reset();\n", "file_path": "platform/prologue/delfx/tests/src/lfo.cpp", "rank": 68, "score": 46941.96621334298 }, { "content": " k_saw,\n\n k_sqr,\n\n k_sin_uni,\n\n k_tri_uni,\n\n k_saw_uni,\n\n k_sqr_uni,\n\n k_sin_off,\n\n k_tri_off,\n\n k_saw_off,\n\n k_sqr_off,\n\n k_wave_count\n\n};\n\n\n\nstatic uint8_t s_lfo_wave;\n\nstatic float s_param_z, s_param;\n\nstatic const float s_fs_recip = 1.f / 48000.f;\n\n\n\nvoid REVFX_INIT(uint32_t platform, uint32_t api)\n\n{\n\n s_lfo.reset();\n", "file_path": "platform/prologue/revfx/tests/src/lfo.cpp", "rank": 69, "score": 46941.96621334298 }, { "content": " wave = s_lfo.triangle_bi_off(s_param_z);\n\n break;\n\n\n\n case k_saw_off:\n\n wave = s_lfo.saw_bi_off(s_param_z);\n\n break;\n\n\n\n case k_sqr_off:\n\n wave = s_lfo.square_bi_off(s_param_z);\n\n break;\n\n }\n\n \n\n // Scale down the wave, full swing is way too loud. (polyphony headroom)\n\n wave *= 0.1f;\n\n \n\n *(my++) = wave;\n\n *(my++) = wave;\n\n *(sy++) = wave;\n\n *(sy++) = wave;\n\n }\n", "file_path": "platform/prologue/modfx/tests/src/lfo.cpp", "rank": 70, "score": 46939.10320534461 }, { "content": " case k_tri_uni:\n\n wave = s_lfo.triangle_uni();\n\n break;\n\n\n\n case k_saw_uni:\n\n wave = s_lfo.saw_uni();\n\n break;\n\n\n\n case k_sqr_uni:\n\n wave = s_lfo.square_uni();\n\n break;\n\n\n\n case k_sin_off:\n\n wave = s_lfo.sine_bi_off(s_param_z);\n\n break;\n\n\n\n case k_tri_off:\n\n wave = s_lfo.triangle_bi_off(s_param_z);\n\n break;\n\n\n", "file_path": "platform/prologue/delfx/tests/src/lfo.cpp", "rank": 71, "score": 46939.04217399168 }, { "content": " case k_tri_uni:\n\n wave = s_lfo.triangle_uni();\n\n break;\n\n\n\n case k_saw_uni:\n\n wave = s_lfo.saw_uni();\n\n break;\n\n\n\n case k_sqr_uni:\n\n wave = s_lfo.square_uni();\n\n break;\n\n\n\n case k_sin_off:\n\n wave = s_lfo.sine_bi_off(s_param_z);\n\n break;\n\n\n\n case k_tri_off:\n\n wave = s_lfo.triangle_bi_off(s_param_z);\n\n break;\n\n\n", "file_path": "platform/prologue/revfx/tests/src/lfo.cpp", "rank": 72, "score": 46939.04217399168 }, { "content": "/*\n\n * File: lfo.cpp\n\n *\n\n * Test SDK LFO\n\n *\n\n * \n\n * \n\n * 2018 (c) Korg\n\n *\n\n */\n\n\n\n#include \"userrevfx.h\"\n\n\n\n#include \"simplelfo.hpp\"\n\n\n\nstatic dsp::SimpleLFO s_lfo;\n\n\n\nenum {\n\n k_sin = 0,\n\n k_tri,\n", "file_path": "platform/prologue/revfx/tests/src/lfo.cpp", "rank": 73, "score": 46939.03249930382 }, { "content": " s_lfo.setF0(220.f,s_fs_recip);\n\n}\n\n\n\nvoid DELFX_PROCESS(float *xn, uint32_t frames)\n\n{\n\n float * __restrict x = xn;\n\n const float * x_e = x + 2*frames;\n\n\n\n const float p = s_param;\n\n float p_z = s_param_z;\n\n \n\n for (; x != x_e; ) {\n\n\n\n p_z = linintf(0.002f, p_z, p);\n\n \n\n s_lfo.cycle();\n\n \n\n float wave;\n\n \n\n switch (s_lfo_wave) {\n", "file_path": "platform/prologue/delfx/tests/src/lfo.cpp", "rank": 74, "score": 46938.99279636468 }, { "content": " s_lfo.setF0(220.f,s_fs_recip);\n\n}\n\n\n\nvoid REVFX_PROCESS(float *xn, uint32_t frames)\n\n{\n\n float * __restrict x = xn;\n\n const float * x_e = x + 2*frames;\n\n\n\n const float p = s_param;\n\n float p_z = s_param_z;\n\n \n\n for (; x != x_e; ) {\n\n\n\n p_z = linintf(0.002f, p_z, p);\n\n \n\n s_lfo.cycle();\n\n \n\n float wave;\n\n \n\n switch (s_lfo_wave) {\n", "file_path": "platform/prologue/revfx/tests/src/lfo.cpp", "rank": 75, "score": 46938.99279636468 }, { "content": " case k_saw_off:\n\n wave = s_lfo.saw_bi_off(s_param_z);\n\n break;\n\n\n\n case k_sqr_off:\n\n wave = s_lfo.square_bi_off(s_param_z);\n\n break;\n\n }\n\n \n\n // Scale down the wave, full swing is way too loud. (polyphony headroom)\n\n wave *= 0.025f;\n\n \n\n *(x++) += wave;\n\n *(x++) += wave;\n\n }\n\n\n\n s_param_z = p_z;\n\n}\n\n\n\n\n", "file_path": "platform/prologue/revfx/tests/src/lfo.cpp", "rank": 76, "score": 46938.97134907866 }, { "content": " case k_saw_off:\n\n wave = s_lfo.saw_bi_off(s_param_z);\n\n break;\n\n\n\n case k_sqr_off:\n\n wave = s_lfo.square_bi_off(s_param_z);\n\n break;\n\n }\n\n \n\n // Scale down the wave, full swing is way too loud. (polyphony headroom)\n\n wave *= 0.025f;\n\n \n\n *(x++) += wave;\n\n *(x++) += wave;\n\n }\n\n\n\n s_param_z = p_z;\n\n}\n\n\n\n\n", "file_path": "platform/prologue/delfx/tests/src/lfo.cpp", "rank": 77, "score": 46938.97134907866 }, { "content": " wave = s_lfo.sine_uni();\n\n break;\n\n\n\n case k_tri_uni:\n\n wave = s_lfo.triangle_uni();\n\n break;\n\n\n\n case k_saw_uni:\n\n wave = s_lfo.saw_uni();\n\n break;\n\n\n\n case k_sqr_uni:\n\n wave = s_lfo.square_uni();\n\n break;\n\n\n\n case k_sin_off:\n\n wave = s_lfo.sine_bi_off(s_param_z);\n\n break;\n\n\n\n case k_tri_off:\n", "file_path": "platform/prologue/modfx/tests/src/lfo.cpp", "rank": 78, "score": 46938.07734441354 }, { "content": " case k_sin:\n\n wave = s_lfo.sine_bi();\n\n break;\n\n \n\n case k_tri:\n\n wave = s_lfo.triangle_bi();\n\n break;\n\n \n\n case k_saw:\n\n wave = s_lfo.saw_bi();\n\n break;\n\n \n\n case k_sqr:\n\n wave = s_lfo.square_bi();\n\n break;\n\n \n\n case k_sin_uni:\n\n wave = s_lfo.sine_uni();\n\n break;\n\n\n", "file_path": "platform/prologue/delfx/tests/src/lfo.cpp", "rank": 79, "score": 46934.61458701345 }, { "content": " case k_sin:\n\n wave = s_lfo.sine_bi();\n\n break;\n\n \n\n case k_tri:\n\n wave = s_lfo.triangle_bi();\n\n break;\n\n \n\n case k_saw:\n\n wave = s_lfo.saw_bi();\n\n break;\n\n \n\n case k_sqr:\n\n wave = s_lfo.square_bi();\n\n break;\n\n \n\n case k_sin_uni:\n\n wave = s_lfo.sine_uni();\n\n break;\n\n\n", "file_path": "platform/prologue/revfx/tests/src/lfo.cpp", "rank": 80, "score": 46934.61458701345 }, { "content": " float wave;\n\n \n\n switch (s_lfo_wave) {\n\n case k_sin:\n\n wave = s_lfo.sine_bi();\n\n break;\n\n \n\n case k_tri:\n\n wave = s_lfo.triangle_bi();\n\n break;\n\n \n\n case k_saw:\n\n wave = s_lfo.saw_bi();\n\n break;\n\n \n\n case k_sqr:\n\n wave = s_lfo.square_bi();\n\n break;\n\n \n\n case k_sin_uni:\n", "file_path": "platform/prologue/modfx/tests/src/lfo.cpp", "rank": 81, "score": 46934.61458701345 }, { "content": "\n\n /**\n\n * Get current value of positive unipolar square wave for phase with offset\n\n *\n\n * @param offset Offset to apply to current phase, in [-1, 1]\n\n */\n\n inline __attribute__((optimize(\"Ofast\"),always_inline))\n\n float square_uni_off(const float offset) \n\n {\n\n const q31_t phi = phi0 + (f32_to_q31(offset)<<1);\n\n return (phi < 0) ? 0.f : 1.f;\n\n }\n\n \n\n /*===========================================================================*/\n\n /* Members Vars */\n\n /*===========================================================================*/\n\n \n\n q31_t phi0;\n\n q31_t w0;\n\n \n\n };\n\n}\n\n\n\n/** @} */\n", "file_path": "platform/prologue/inc/dsp/simplelfo.hpp", "rank": 82, "score": 46894.9437686753 }, { "content": " {\n\n return q31_to_f32(qadd((phi0>>1),0x40000000));\n\n } \n\n\n\n /**\n\n * Get current value of bipolar saw wave for phase with offset\n\n *\n\n * @param offset Offset to apply to current phase, in [-1, 1]\n\n */\n\n inline __attribute__((optimize(\"Ofast\"),always_inline))\n\n float saw_bi_off(const float offset) \n\n {\n\n const q31_t phi = phi0 + (f32_to_q31(offset)<<1);\n\n return q31_to_f32(phi);\n\n } \n\n\n\n /**\n\n * Get current value of positive unipolar saw wave for phase with offset\n\n *\n\n * @param offset Offset to apply to current phase, in [-1, 1]\n", "file_path": "platform/prologue/inc/dsp/simplelfo.hpp", "rank": 83, "score": 46893.361044374185 }, { "content": " *\n\n * @param offset Offset to apply to current phase, in [-1, 1]\n\n */\n\n inline __attribute__((optimize(\"Ofast\"),always_inline))\n\n float sine_bi_off(const float offset) \n\n {\n\n const float phi = q31_to_f32(phi0 + f32_to_q31(2*offset));\n\n return 4 * phi * (si_fabsf(phi) - 1.f);\n\n } \n\n\n\n /**\n\n * Get current value of positive unipolar sine wave for phase with offset\n\n *\n\n * @param offset Offset to apply to current phase, in [-1, 1]\n\n */\n\n inline __attribute__((optimize(\"Ofast\"),always_inline))\n\n float sine_uni_off(const float offset) \n\n {\n\n const float phi = q31_to_f32(phi0 + f32_to_q31(2*offset));\n\n return 0.5f + 2 * phi * (si_fabsf(phi) - 1.f);\n", "file_path": "platform/prologue/inc/dsp/simplelfo.hpp", "rank": 84, "score": 46892.85083243655 }, { "content": " {\n\n phi0 += w0;\n\n }\n\n\n\n /**\n\n * Reset phase\n\n */\n\n inline __attribute__((optimize(\"Ofast\"),always_inline))\n\n void reset(void) \n\n {\n\n phi0 = 0x80000000;\n\n }\n\n\n\n /**\n\n * Set LFO frequency\n\n *\n\n * param f0 Frequency in Hz\n\n * param fsrecip Reciprocal of sampling frequency (1/Fs)\n\n */\n\n inline __attribute__((optimize(\"Ofast\"),always_inline))\n", "file_path": "platform/prologue/inc/dsp/simplelfo.hpp", "rank": 85, "score": 46892.801023568914 }, { "content": " void setF0(const float f0, const float fsrecip) \n\n {\n\n w0 = f32_to_q31(2.f * f0 * fsrecip);\n\n }\n\n\n\n /**\n\n * Set LFO frequency in radians\n\n *\n\n * @param w Frequency in radians\n\n */\n\n inline __attribute__((optimize(\"Ofast\"),always_inline))\n\n void setW0(const float w) \n\n {\n\n w0 = f32_to_q31(2.f * w);\n\n }\n\n \n\n // --- Sinusoids --------------\n\n\n\n /**\n\n * Get value of bipolar sine wave for current phase \n", "file_path": "platform/prologue/inc/dsp/simplelfo.hpp", "rank": 86, "score": 46891.47832267108 }, { "content": " /**\n\n * Get value of positive unipolar square wave for current phase \n\n */\n\n inline __attribute__((optimize(\"Ofast\"),always_inline))\n\n float square_uni(void) \n\n {\n\n return (phi0 < 0) ? 0.f : 1.f;\n\n } \n\n\n\n /**\n\n * Get current value of bipolar square wave for phase with offset\n\n *\n\n * @param offset Offset to apply to current phase, in [-1, 1]\n\n */\n\n inline __attribute__((optimize(\"Ofast\"),always_inline))\n\n float square_bi_off(const float offset) \n\n {\n\n const q31_t phi = phi0 + (f32_to_q31(offset)<<1);\n\n return (phi < 0) ? -1.f : 1.f;\n\n } \n", "file_path": "platform/prologue/inc/dsp/simplelfo.hpp", "rank": 87, "score": 46890.962849366886 }, { "content": "\n\n /**\n\n * Get current value of bipolar triangle wave for phase with offset\n\n *\n\n * @param offset Offset to apply to current phase, in [-1, 1]\n\n */\n\n inline __attribute__((optimize(\"Ofast\"),always_inline))\n\n float triangle_bi_off(const float offset) \n\n {\n\n const q31_t phi = phi0 + f32_to_q31(2*offset);\n\n return q31_to_f32(qsub(q31abs(phi),0x40000000)<<1);\n\n } \n\n\n\n /**\n\n * Get current value of positive unipolar triangle wave for phase with offset\n\n *\n\n * @param offset Offset to apply to current phase, in [-1, 1]\n\n */\n\n inline __attribute__((optimize(\"Ofast\"),always_inline))\n\n float triangle_uni_off(const float offset) \n", "file_path": "platform/prologue/inc/dsp/simplelfo.hpp", "rank": 88, "score": 46890.85756346134 }, { "content": "use std::env;\n\nuse std::fs::File;\n\nuse std::io::Write;\n\nuse std::path::PathBuf;\n", "file_path": "platform/nutekt-digital/demos/raves/build.rs", "rank": 89, "score": 46890.352262193795 }, { "content": " {\n\n const float phi = q31_to_f32(phi0 + f32_to_q31(2*offset));\n\n return si_fabsf(phi); \n\n }\n\n \n\n // --- Saws --------------\n\n /**\n\n * Get current value of bipolar saw wave for current phase\n\n */\n\n inline __attribute__((optimize(\"Ofast\"),always_inline))\n\n float saw_bi(void) \n\n {\n\n return q31_to_f32(phi0);\n\n } \n\n\n\n /**\n\n * Get value of positive unipolar saw wave for current phase \n\n */\n\n inline __attribute__((optimize(\"Ofast\"),always_inline))\n\n float saw_uni(void) \n", "file_path": "platform/prologue/inc/dsp/simplelfo.hpp", "rank": 90, "score": 46889.37329371347 }, { "content": " */\n\n inline __attribute__((optimize(\"Ofast\"),always_inline))\n\n float saw_uni_off(const float offset) \n\n {\n\n q31_t phi = phi0 + (f32_to_q31(offset)<<1); \n\n return 0.5f * phi + 0.5f;\n\n phi >>= 1;\n\n return q31_to_f32(qadd(phi,0x40000000));\n\n }\n\n \n\n // --- Squares --------------\n\n /**\n\n * Get current value of bipolar square wave for current phase\n\n */\n\n inline __attribute__((optimize(\"Ofast\"),always_inline))\n\n float square_bi(void) \n\n {\n\n return (phi0 < 0) ? -1.f : 1.f;\n\n } \n\n\n", "file_path": "platform/prologue/inc/dsp/simplelfo.hpp", "rank": 91, "score": 46889.164941912335 }, { "content": " buf_clr_f32((float *)mLine, mSize);\n\n }\n\n\n\n /**\n\n * Set the memory area to use as backing buffer for the delay line.\n\n *\n\n * @param ram Pointer to memory buffer\n\n * @param line_size Size in float of memory buffer\n\n *\n\n * @note Will round size to next power of two.\n\n */\n\n inline __attribute__((optimize(\"Ofast\"),always_inline))\n\n void setMemory(float *ram, size_t line_size) {\n\n mLine = ram;\n\n mSize = nextpow2_u32(line_size); // must be power of 2\n\n mMask = (mSize-1);\n\n mWriteIdx = 0;\n\n }\n\n\n\n /**\n", "file_path": "platform/prologue/inc/dsp/delayline.hpp", "rank": 92, "score": 46887.775142861006 }, { "content": " inline __attribute__((optimize(\"Ofast\"),always_inline))\n\n void setMemory(f32pair_t *ram, size_t line_size) {\n\n mLine = ram;\n\n mSize = nextpow2_u32(line_size); // must be power of 2\n\n mMask = (mSize-1);\n\n mWriteIdx = 0;\n\n }\n\n\n\n /**\n\n * Write a sample pair to the delay line\n\n *\n\n * @param p Reference to float pair.\n\n */\n\n inline __attribute__((optimize(\"Ofast\"),always_inline))\n\n void write(const f32pair_t &p) {\n\n mLine[(mWriteIdx--) & mMask] = p;\n\n }\n\n\n\n /**\n\n * Read a sample pair from the delay line at given position from current write index.\n", "file_path": "platform/prologue/inc/dsp/delayline.hpp", "rank": 93, "score": 46884.73211702962 }, { "content": " } \n\n\n\n // --- Triangles --------------\n\n /**\n\n * Get current value of bipolar triangle wave for current phase\n\n */\n\n inline __attribute__((optimize(\"Ofast\"),always_inline))\n\n float triangle_bi(void) \n\n {\n\n return q31_to_f32(qsub(q31abs(phi0),0x40000000)<<1);\n\n } \n\n\n\n /**\n\n * Get value of positive unipolar triangle wave for current phase \n\n */\n\n inline __attribute__((optimize(\"Ofast\"),always_inline))\n\n float triangle_uni(void) \n\n {\n\n return si_fabsf(q31_to_f32(phi0));\n\n } \n", "file_path": "platform/prologue/inc/dsp/simplelfo.hpp", "rank": 94, "score": 46884.42736900362 }, { "content": " * Write a single sample to the head of the delay line\n\n *\n\n * @param s Sample to write\n\n */\n\n inline __attribute__((optimize(\"Ofast\"),always_inline))\n\n void write(const float s) {\n\n mLine[(mWriteIdx--) & mMask] = s;\n\n }\n\n\n\n /**\n\n * Read a single sample from the delay line at given position from current write index.\n\n *\n\n * @param pos Offset from write index\n\n * @return Sample at given position from write index\n\n */\n\n inline __attribute__((optimize(\"Ofast\"),always_inline))\n\n float read(const uint32_t pos) {\n\n return mLine[(mWriteIdx + pos) & mMask];\n\n }\n\n\n", "file_path": "platform/prologue/inc/dsp/delayline.hpp", "rank": 95, "score": 46884.276577733304 }, { "content": " /**\n\n * Read a single sample from the delay line's primary channel at given position from current write index.\n\n *\n\n * @param pos Offset from write index.\n\n * @return Sample at given position from write index\n\n */\n\n inline __attribute__((optimize(\"Ofast\"),always_inline))\n\n float read0(const uint32_t pos) {\n\n return (mLine[(mWriteIdx + pos) & mMask]).a;\n\n }\n\n\n\n /**\n\n * Read a single sample from the delay line's secondary channel at given position from current write index.\n\n *\n\n * @param pos Offset from write index.\n\n * @return Sample at given position from write index\n\n */\n\n inline __attribute__((optimize(\"Ofast\"),always_inline))\n\n float read1(const uint32_t pos) {\n\n return (mLine[(mWriteIdx + pos) & mMask]).b;\n", "file_path": "platform/prologue/inc/dsp/delayline.hpp", "rank": 96, "score": 46884.255528711765 } ]
Rust
gstreamer/src/subclass/pad.rs
snapview/gstreamer-rs
a1da562e960208572f124d5a8b878dc0e11b03f5
use gst_sys; use glib; use glib::translate::*; use glib::subclass::prelude::*; use Pad; use PadClass; pub trait PadImpl: PadImplExt + ObjectImpl + Send + Sync { fn linked(&self, pad: &Pad, peer: &Pad) { self.parent_linked(pad, peer) } fn unlinked(&self, pad: &Pad, peer: &Pad) { self.parent_unlinked(pad, peer) } } pub trait PadImplExt { fn parent_linked(&self, pad: &Pad, peer: &Pad); fn parent_unlinked(&self, pad: &Pad, peer: &Pad); } impl<T: PadImpl> PadImplExt for T { fn parent_linked(&self, pad: &Pad, peer: &Pad) { unsafe { let data = T::type_data(); let parent_class = data.as_ref().get_parent_class() as *mut gst_sys::GstPadClass; (*parent_class) .linked .map(|f| f(pad.to_glib_none().0, peer.to_glib_none().0)) .unwrap_or(()) } } fn parent_unlinked(&self, pad: &Pad, peer: &Pad) { unsafe { let data = T::type_data(); let parent_class = data.as_ref().get_parent_class() as *mut gst_sys::GstPadClass; (*parent_class) .unlinked .map(|f| f(pad.to_glib_none().0, peer.to_glib_none().0)) .unwrap_or(()) } } } unsafe impl<T: PadImpl> IsSubclassable<T> for PadClass { fn override_vfuncs(&mut self) { <glib::ObjectClass as IsSubclassable<T>>::override_vfuncs(self); unsafe { let klass = &mut *(self as *mut Self as *mut gst_sys::GstPadClass); klass.linked = Some(pad_linked::<T>); klass.unlinked = Some(pad_unlinked::<T>); } } } unsafe extern "C" fn pad_linked<T: PadImpl>(ptr: *mut gst_sys::GstPad, peer: *mut gst_sys::GstPad) { let instance = &*(ptr as *mut T::Instance); let imp = instance.get_impl(); let wrap: Borrowed<Pad> = from_glib_borrow(ptr); imp.linked(&wrap, &from_glib_borrow(peer)) } unsafe extern "C" fn pad_unlinked<T: PadImpl>( ptr: *mut gst_sys::GstPad, peer: *mut gst_sys::GstPad, ) { let instance = &*(ptr as *mut T::Instance); let imp = instance.get_impl(); let wrap: Borrowed<Pad> = from_glib_borrow(ptr); imp.unlinked(&wrap, &from_glib_borrow(peer)) } #[cfg(test)] mod tests { use super::*; use crate::prelude::*; use glib; use glib::subclass; use std::sync::atomic; struct TestPad { linked: atomic::AtomicBool, unlinked: atomic::AtomicBool, } impl ObjectSubclass for TestPad { const NAME: &'static str = "TestPad"; type ParentType = ::Pad; type Instance = subclass::simple::InstanceStruct<Self>; type Class = subclass::simple::ClassStruct<Self>; glib_object_subclass!(); fn new() -> Self { Self { linked: atomic::AtomicBool::new(false), unlinked: atomic::AtomicBool::new(false), } } } impl ObjectImpl for TestPad {} impl PadImpl for TestPad { fn linked(&self, pad: &Pad, peer: &Pad) { self.linked.store(true, atomic::Ordering::SeqCst); self.parent_linked(pad, peer) } fn unlinked(&self, pad: &Pad, peer: &Pad) { self.unlinked.store(true, atomic::Ordering::SeqCst); self.parent_unlinked(pad, peer) } } #[test] fn test_pad_subclass() { ::init().unwrap(); let pad = glib::Object::new( TestPad::get_type(), &[("name", &"test"), ("direction", &::PadDirection::Src)], ) .unwrap() .downcast::<::Pad>() .unwrap(); assert_eq!(pad.get_name(), "test"); let otherpad = ::Pad::new(Some("other-test"), ::PadDirection::Sink); pad.link(&otherpad).unwrap(); pad.unlink(&otherpad).unwrap(); let imp = TestPad::from_instance(&pad); assert!(imp.linked.load(atomic::Ordering::SeqCst)); assert!(imp.unlinked.load(atomic::Ordering::SeqCst)); } }
use gst_sys; use glib; use glib::translate::*; use glib::subclass::prelude::*; use Pad; use PadClass; pub trait PadImpl: PadImplExt + ObjectImpl + Send + Sync { fn linked(&self, pad: &Pad, peer: &Pad) { self.parent_linked(pad, peer) } fn unlinked(&self, pad: &Pad, peer: &Pad) { self.parent_unlinked(pad, peer) } } pub trait PadImplExt { fn parent_linked(&self, pad: &Pad, peer: &Pad); fn parent_unlinked(&self, pad: &Pad, peer: &Pad); } impl<T: PadImpl> PadImplExt for T { fn parent_linked(&self, pad: &Pad, peer: &Pad) { unsafe { let data = T::type_data(); let parent_class = data.as_ref().get_parent_class() as *mut gst_sys::GstPadClass; (*parent_class) .linked .map(|f| f(pad.to_glib_none().0, peer.to_glib_none().0)) .unwrap_or(()) } } fn parent_unlinked(&self, pad: &Pad, peer: &Pad) { unsafe { let data = T::type_data(); let paren
.unwrap_or(()) } } } unsafe impl<T: PadImpl> IsSubclassable<T> for PadClass { fn override_vfuncs(&mut self) { <glib::ObjectClass as IsSubclassable<T>>::override_vfuncs(self); unsafe { let klass = &mut *(self as *mut Self as *mut gst_sys::GstPadClass); klass.linked = Some(pad_linked::<T>); klass.unlinked = Some(pad_unlinked::<T>); } } } unsafe extern "C" fn pad_linked<T: PadImpl>(ptr: *mut gst_sys::GstPad, peer: *mut gst_sys::GstPad) { let instance = &*(ptr as *mut T::Instance); let imp = instance.get_impl(); let wrap: Borrowed<Pad> = from_glib_borrow(ptr); imp.linked(&wrap, &from_glib_borrow(peer)) } unsafe extern "C" fn pad_unlinked<T: PadImpl>( ptr: *mut gst_sys::GstPad, peer: *mut gst_sys::GstPad, ) { let instance = &*(ptr as *mut T::Instance); let imp = instance.get_impl(); let wrap: Borrowed<Pad> = from_glib_borrow(ptr); imp.unlinked(&wrap, &from_glib_borrow(peer)) } #[cfg(test)] mod tests { use super::*; use crate::prelude::*; use glib; use glib::subclass; use std::sync::atomic; struct TestPad { linked: atomic::AtomicBool, unlinked: atomic::AtomicBool, } impl ObjectSubclass for TestPad { const NAME: &'static str = "TestPad"; type ParentType = ::Pad; type Instance = subclass::simple::InstanceStruct<Self>; type Class = subclass::simple::ClassStruct<Self>; glib_object_subclass!(); fn new() -> Self { Self { linked: atomic::AtomicBool::new(false), unlinked: atomic::AtomicBool::new(false), } } } impl ObjectImpl for TestPad {} impl PadImpl for TestPad { fn linked(&self, pad: &Pad, peer: &Pad) { self.linked.store(true, atomic::Ordering::SeqCst); self.parent_linked(pad, peer) } fn unlinked(&self, pad: &Pad, peer: &Pad) { self.unlinked.store(true, atomic::Ordering::SeqCst); self.parent_unlinked(pad, peer) } } #[test] fn test_pad_subclass() { ::init().unwrap(); let pad = glib::Object::new( TestPad::get_type(), &[("name", &"test"), ("direction", &::PadDirection::Src)], ) .unwrap() .downcast::<::Pad>() .unwrap(); assert_eq!(pad.get_name(), "test"); let otherpad = ::Pad::new(Some("other-test"), ::PadDirection::Sink); pad.link(&otherpad).unwrap(); pad.unlink(&otherpad).unwrap(); let imp = TestPad::from_instance(&pad); assert!(imp.linked.load(atomic::Ordering::SeqCst)); assert!(imp.unlinked.load(atomic::Ordering::SeqCst)); } }
t_class = data.as_ref().get_parent_class() as *mut gst_sys::GstPadClass; (*parent_class) .unlinked .map(|f| f(pad.to_glib_none().0, peer.to_glib_none().0))
function_block-random_span
[ { "content": "pub trait ChildProxyImpl: ObjectImpl + Send + Sync {\n\n fn get_child_by_name(&self, object: &ChildProxy, name: &str) -> Option<glib::Object> {\n\n unsafe {\n\n let type_ = gst_sys::gst_child_proxy_get_type();\n\n let iface = gobject_sys::g_type_default_interface_ref(type_)\n\n as *mut gst_sys::GstChildProxyInterface;\n\n assert!(!iface.is_null());\n\n\n\n let ret = ((*iface).get_child_by_name.as_ref().unwrap())(\n\n object.to_glib_none().0,\n\n name.to_glib_none().0,\n\n );\n\n\n\n gobject_sys::g_type_default_interface_unref(iface as glib_sys::gpointer);\n\n\n\n from_glib_full(ret)\n\n }\n\n }\n\n\n\n fn get_child_by_index(&self, object: &ChildProxy, index: u32) -> Option<glib::Object>;\n", "file_path": "gstreamer/src/subclass/child_proxy.rs", "rank": 1, "score": 322296.15011709207 }, { "content": "pub trait ClockImpl: ClockImplExt + ObjectImpl + Send + Sync {\n\n fn change_resolution(\n\n &self,\n\n clock: &Clock,\n\n old_resolution: ClockTime,\n\n new_resolution: ClockTime,\n\n ) -> ClockTime {\n\n self.parent_change_resolution(clock, old_resolution, new_resolution)\n\n }\n\n\n\n fn get_resolution(&self, clock: &Clock) -> ClockTime {\n\n self.parent_get_resolution(clock)\n\n }\n\n\n\n fn get_internal_time(&self, clock: &Clock) -> ClockTime {\n\n self.parent_get_internal_time(clock)\n\n }\n\n\n\n fn wait(\n\n &self,\n", "file_path": "gstreamer/src/subclass/clock.rs", "rank": 2, "score": 310055.91430903727 }, { "content": "pub trait ElementImpl: ElementImplExt + ObjectImpl + Send + Sync {\n\n fn change_state(\n\n &self,\n\n element: &::Element,\n\n transition: StateChange,\n\n ) -> Result<StateChangeSuccess, StateChangeError> {\n\n self.parent_change_state(element, transition)\n\n }\n\n\n\n fn request_new_pad(\n\n &self,\n\n element: &::Element,\n\n templ: &::PadTemplate,\n\n name: Option<String>,\n\n caps: Option<&::Caps>,\n\n ) -> Option<::Pad> {\n\n self.parent_request_new_pad(element, templ, name, caps)\n\n }\n\n\n\n fn release_pad(&self, element: &::Element, pad: &::Pad) {\n", "file_path": "gstreamer/src/subclass/element.rs", "rank": 3, "score": 310055.91430903727 }, { "content": "pub trait DeviceImpl: DeviceImplExt + ObjectImpl + Send + Sync {\n\n fn create_element(\n\n &self,\n\n device: &Device,\n\n name: Option<&str>,\n\n ) -> Result<Element, LoggableError> {\n\n self.parent_create_element(device, name)\n\n }\n\n\n\n fn reconfigure_element(&self, device: &Device, element: &Element) -> Result<(), LoggableError> {\n\n self.parent_reconfigure_element(device, element)\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/subclass/device.rs", "rank": 4, "score": 310055.91430903727 }, { "content": "pub trait DeviceProviderImpl: DeviceProviderImplExt + ObjectImpl + Send + Sync {\n\n fn probe(&self, device_provider: &DeviceProvider) -> Vec<Device> {\n\n self.parent_probe(device_provider)\n\n }\n\n\n\n fn start(&self, device_provider: &DeviceProvider) -> Result<(), LoggableError> {\n\n self.parent_start(device_provider)\n\n }\n\n\n\n fn stop(&self, device_provider: &DeviceProvider) {\n\n self.parent_stop(device_provider)\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/subclass/device_provider.rs", "rank": 5, "score": 299110.8684221497 }, { "content": "pub trait RTSPServerImpl: RTSPServerImplExt + ObjectImpl + Send + Sync {\n\n fn create_client(&self, server: &RTSPServer) -> Option<::RTSPClient> {\n\n self.parent_create_client(server)\n\n }\n\n\n\n fn client_connected(&self, server: &RTSPServer, client: &::RTSPClient) {\n\n self.parent_client_connected(server, client);\n\n }\n\n}\n\n\n", "file_path": "gstreamer-rtsp-server/src/subclass/rtsp_server.rs", "rank": 6, "score": 292536.54200043506 }, { "content": "pub trait RTSPClientImpl: RTSPClientImplExt + ObjectImpl + Send + Sync {\n\n fn create_sdp(&self, client: &RTSPClient, media: &::RTSPMedia) -> Option<gst_sdp::SDPMessage> {\n\n self.parent_create_sdp(client, media)\n\n }\n\n\n\n fn configure_client_media(\n\n &self,\n\n client: &RTSPClient,\n\n media: &::RTSPMedia,\n\n stream: &::RTSPStream,\n\n ctx: &::RTSPContext,\n\n ) -> Result<(), gst::LoggableError> {\n\n self.parent_configure_client_media(client, media, stream, ctx)\n\n }\n\n\n\n // TODO: configure_client_transport\n\n\n\n fn params_set(&self, client: &RTSPClient, ctx: &::RTSPContext) -> gst_rtsp::RTSPResult {\n\n self.parent_params_set(client, ctx)\n\n }\n", "file_path": "gstreamer-rtsp-server/src/subclass/rtsp_client.rs", "rank": 7, "score": 292536.54200043506 }, { "content": "pub trait RTSPMediaImpl: RTSPMediaImplExt + ObjectImpl + Send + Sync {\n\n fn handle_message(&self, media: &RTSPMedia, message: &gst::MessageRef) -> bool {\n\n self.parent_handle_message(media, message)\n\n }\n\n\n\n fn prepare(&self, media: &RTSPMedia, thread: &RTSPThread) -> Result<(), gst::LoggableError> {\n\n self.parent_prepare(media, thread)\n\n }\n\n\n\n fn unprepare(&self, media: &RTSPMedia) -> Result<(), gst::LoggableError> {\n\n self.parent_unprepare(media)\n\n }\n\n\n\n fn suspend(&self, media: &RTSPMedia) -> Result<(), gst::LoggableError> {\n\n self.parent_suspend(media)\n\n }\n\n\n\n fn unsuspend(&self, media: &RTSPMedia) -> Result<(), gst::LoggableError> {\n\n self.parent_unsuspend(media)\n\n }\n", "file_path": "gstreamer-rtsp-server/src/subclass/rtsp_media.rs", "rank": 8, "score": 292536.54200043506 }, { "content": "pub trait RTSPMediaFactoryImpl: RTSPMediaFactoryImplExt + ObjectImpl + Send + Sync {\n\n fn gen_key(\n\n &self,\n\n factory: &RTSPMediaFactory,\n\n url: &gst_rtsp::RTSPUrl,\n\n ) -> Option<glib::GString> {\n\n self.parent_gen_key(factory, url)\n\n }\n\n\n\n fn create_element(\n\n &self,\n\n factory: &RTSPMediaFactory,\n\n url: &gst_rtsp::RTSPUrl,\n\n ) -> Option<gst::Element> {\n\n self.parent_create_element(factory, url)\n\n }\n\n\n\n fn construct(\n\n &self,\n\n factory: &RTSPMediaFactory,\n", "file_path": "gstreamer-rtsp-server/src/subclass/rtsp_media_factory.rs", "rank": 9, "score": 283596.59727708215 }, { "content": "pub trait GhostPadImpl: PadImpl {}\n\n\n\nunsafe impl<T: GhostPadImpl> IsSubclassable<T> for GhostPadClass {\n\n fn override_vfuncs(&mut self) {\n\n <::PadClass as IsSubclassable<T>>::override_vfuncs(self);\n\n unsafe {\n\n let _klass = &mut *(self as *mut Self as *mut gst_sys::GstGhostPadClass);\n\n // Nothing to do here\n\n }\n\n }\n\n}\n", "file_path": "gstreamer/src/subclass/ghost_pad.rs", "rank": 11, "score": 256990.4652416393 }, { "content": "fn into_raw_pad_task<F: FnMut() + Send + 'static>(func: F) -> gpointer {\n\n #[allow(clippy::type_complexity)]\n\n let func: Box<RefCell<F>> = Box::new(RefCell::new(func));\n\n Box::into_raw(func) as gpointer\n\n}\n\n\n\nunsafe extern \"C\" fn destroy_closure_pad_task<F>(ptr: gpointer) {\n\n Box::<RefCell<F>>::from_raw(ptr as *mut _);\n\n}\n\n\n\nimpl Pad {\n\n pub fn new(name: Option<&str>, direction: ::PadDirection) -> Self {\n\n skip_assert_initialized!();\n\n Self::builder(name, direction).build()\n\n }\n\n\n\n pub fn builder(name: Option<&str>, direction: ::PadDirection) -> PadBuilder<Self> {\n\n skip_assert_initialized!();\n\n PadBuilder::new(name, direction)\n\n }\n", "file_path": "gstreamer/src/pad.rs", "rank": 12, "score": 255064.61014723347 }, { "content": "fn into_raw_sync<F: Fn(&Bus, &Message) -> BusSyncReply + Send + Sync + 'static>(\n\n func: F,\n\n) -> gpointer {\n\n let func: Box<F> = Box::new(func);\n\n Box::into_raw(func) as gpointer\n\n}\n\n\n\nimpl Bus {\n\n pub fn add_signal_watch_full(&self, priority: Priority) {\n\n unsafe {\n\n gst_sys::gst_bus_add_signal_watch_full(self.to_glib_none().0, priority.to_glib());\n\n }\n\n }\n\n\n\n pub fn create_watch<F>(&self, name: Option<&str>, priority: Priority, func: F) -> glib::Source\n\n where\n\n F: FnMut(&Bus, &Message) -> Continue + Send + 'static,\n\n {\n\n skip_assert_initialized!();\n\n unsafe {\n", "file_path": "gstreamer/src/bus.rs", "rank": 13, "score": 247049.80362560856 }, { "content": "pub trait AggregatorPadImpl: AggregatorPadImplExt + PadImpl {\n\n fn flush(\n\n &self,\n\n aggregator_pad: &AggregatorPad,\n\n aggregator: &Aggregator,\n\n ) -> Result<gst::FlowSuccess, gst::FlowError> {\n\n self.parent_flush(aggregator_pad, aggregator)\n\n }\n\n\n\n fn skip_buffer(\n\n &self,\n\n aggregator_pad: &AggregatorPad,\n\n aggregator: &Aggregator,\n\n buffer: &gst::Buffer,\n\n ) -> bool {\n\n self.parent_skip_buffer(aggregator_pad, aggregator, buffer)\n\n }\n\n}\n\n\n", "file_path": "gstreamer-base/src/subclass/aggregator_pad.rs", "rank": 14, "score": 241779.5464434229 }, { "content": "pub fn init() -> Result<(), glib::Error> {\n\n unsafe {\n\n let mut error = ptr::null_mut();\n\n if from_glib(gst_sys::gst_init_check(\n\n ptr::null_mut(),\n\n ptr::null_mut(),\n\n &mut error,\n\n )) {\n\n Ok(())\n\n } else {\n\n Err(from_glib_full(error))\n\n }\n\n }\n\n}\n\n\n\n/// Deinitialize GStreamer\n\n///\n\n/// # Safety\n\n///\n\n/// This must only be called once during the lifetime of the process, once no GStreamer threads\n", "file_path": "gstreamer/src/lib.rs", "rank": 15, "score": 235607.90681235067 }, { "content": "pub trait PadExt: 'static {\n\n fn activate_mode(&self, mode: PadMode, active: bool) -> Result<(), glib::error::BoolError>;\n\n\n\n fn can_link<P: IsA<Pad>>(&self, sinkpad: &P) -> bool;\n\n\n\n fn check_reconfigure(&self) -> bool;\n\n\n\n fn create_stream_id<P: IsA<Element>>(\n\n &self,\n\n parent: &P,\n\n stream_id: Option<&str>,\n\n ) -> Option<GString>;\n\n\n\n //fn create_stream_id_printf<P: IsA<Element>>(&self, parent: &P, stream_id: Option<&str>, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs) -> Option<GString>;\n\n\n\n //fn create_stream_id_printf_valist<P: IsA<Element>>(&self, parent: &P, stream_id: Option<&str>, var_args: /*Unknown conversion*//*Unimplemented*/Unsupported) -> Option<GString>;\n\n\n\n fn forward<P: FnMut(&Pad) -> bool>(&self, forward: P) -> bool;\n\n\n\n fn get_allowed_caps(&self) -> Option<Caps>;\n", "file_path": "gstreamer/src/auto/pad.rs", "rank": 16, "score": 226217.3492323556 }, { "content": "pub trait PadExtManual: 'static {\n\n fn add_probe<F>(&self, mask: PadProbeType, func: F) -> Option<PadProbeId>\n\n where\n\n F: Fn(&Self, &mut PadProbeInfo) -> PadProbeReturn + Send + Sync + 'static;\n\n fn remove_probe(&self, id: PadProbeId);\n\n\n\n fn chain(&self, buffer: Buffer) -> Result<FlowSuccess, FlowError>;\n\n fn push(&self, buffer: Buffer) -> Result<FlowSuccess, FlowError>;\n\n\n\n fn chain_list(&self, list: BufferList) -> Result<FlowSuccess, FlowError>;\n\n fn push_list(&self, list: BufferList) -> Result<FlowSuccess, FlowError>;\n\n\n\n fn pull_range(&self, offset: u64, size: u32) -> Result<Buffer, FlowError>;\n\n fn pull_range_fill(\n\n &self,\n\n offset: u64,\n\n buffer: &mut ::BufferRef,\n\n size: u32,\n\n ) -> Result<(), FlowError>;\n\n fn get_range(&self, offset: u64, size: u32) -> Result<Buffer, FlowError>;\n", "file_path": "gstreamer/src/pad.rs", "rank": 17, "score": 226217.3492323556 }, { "content": "pub fn init() -> Result<(), glib::BoolError> {\n\n if gst::init().is_err() {\n\n return Err(glib_bool_error!(\"Could not initialize GStreamer.\"));\n\n }\n\n\n\n unsafe {\n\n if from_glib(ges_sys::ges_init()) {\n\n Ok(())\n\n } else {\n\n Err(glib_bool_error!(\"Could not initialize GES.\"))\n\n }\n\n }\n\n}\n\n\n\npub unsafe fn deinit() {\n\n ges_sys::ges_deinit();\n\n}\n\n\n\nmacro_rules! assert_initialized_main_thread {\n\n () => {\n", "file_path": "gstreamer-editing-services/src/lib.rs", "rank": 18, "score": 224821.32812342432 }, { "content": "pub trait MessageErrorDomain: glib::error::ErrorDomain {}\n\n\n\nimpl MessageErrorDomain for ::CoreError {}\n\nimpl MessageErrorDomain for ::ResourceError {}\n\nimpl MessageErrorDomain for ::StreamError {}\n\nimpl MessageErrorDomain for ::LibraryError {}\n\n\n\npub struct ErrorBuilder<'a, T> {\n\n builder: MessageBuilder<'a>,\n\n error: T,\n\n message: &'a str,\n\n debug: Option<&'a str>,\n\n #[allow(unused)]\n\n details: Option<Structure>,\n\n}\n\n\n\nimpl<'a, T: MessageErrorDomain> ErrorBuilder<'a, T> {\n\n fn new(error: T, message: &'a str) -> Self {\n\n skip_assert_initialized!();\n\n Self {\n", "file_path": "gstreamer/src/message.rs", "rank": 19, "score": 224042.57113508251 }, { "content": "pub trait IteratorImpl<T>: Clone + Send + 'static\n\nwhere\n\n for<'a> T: FromValueOptional<'a> + StaticType + ToValue + Send + 'static,\n\n{\n\n fn next(&mut self) -> Option<Result<T, IteratorError>>;\n\n fn resync(&mut self);\n\n}\n\n\n\nunsafe extern \"C\" fn rs_iterator_copy<T, I: IteratorImpl<T>>(\n\n it: *const gst_sys::GstIterator,\n\n copy: *mut gst_sys::GstIterator,\n\n) where\n\n for<'a> T: FromValueOptional<'a> + StaticType + ToValue + Send + 'static,\n\n{\n\n let it = it as *const RsIterator<T, I>;\n\n let copy = copy as *mut RsIterator<T, I>;\n\n\n\n ptr::write(&mut (*copy).imp, (*it).imp.clone());\n\n}\n\n\n", "file_path": "gstreamer/src/iterator.rs", "rank": 20, "score": 222623.32928423013 }, { "content": "pub trait AggregatorPadImplExt {\n\n fn parent_flush(\n\n &self,\n\n aggregator_pad: &AggregatorPad,\n\n aggregator: &Aggregator,\n\n ) -> Result<gst::FlowSuccess, gst::FlowError>;\n\n\n\n fn parent_skip_buffer(\n\n &self,\n\n aggregator_pad: &AggregatorPad,\n\n aggregator: &Aggregator,\n\n buffer: &gst::Buffer,\n\n ) -> bool;\n\n}\n\n\n\nimpl<T: AggregatorPadImpl> AggregatorPadImplExt for T {\n\n fn parent_flush(\n\n &self,\n\n aggregator_pad: &AggregatorPad,\n\n aggregator: &Aggregator,\n", "file_path": "gstreamer-base/src/subclass/aggregator_pad.rs", "rank": 21, "score": 220627.69062689273 }, { "content": "pub fn tag_get_type(name: &str) -> glib::Type {\n\n skip_assert_initialized!();\n\n unsafe { from_glib(gst_sys::gst_tag_get_type(name.to_glib_none().0)) }\n\n}\n\n\n", "file_path": "gstreamer/src/tags.rs", "rank": 22, "score": 219981.86565724236 }, { "content": "pub trait GhostPadExt: 'static {\n\n fn get_target(&self) -> Option<Pad>;\n\n\n\n fn set_target<P: IsA<Pad>>(&self, newtarget: Option<&P>) -> Result<(), glib::error::BoolError>;\n\n}\n\n\n\nimpl<O: IsA<GhostPad>> GhostPadExt for O {\n\n fn get_target(&self) -> Option<Pad> {\n\n unsafe {\n\n from_glib_full(gst_sys::gst_ghost_pad_get_target(\n\n self.as_ref().to_glib_none().0,\n\n ))\n\n }\n\n }\n\n\n\n fn set_target<P: IsA<Pad>>(&self, newtarget: Option<&P>) -> Result<(), glib::error::BoolError> {\n\n unsafe {\n\n glib_result_from_gboolean!(\n\n gst_sys::gst_ghost_pad_set_target(\n\n self.as_ref().to_glib_none().0,\n\n newtarget.map(|p| p.as_ref()).to_glib_none().0\n\n ),\n\n \"Failed to set target\"\n\n )\n\n }\n\n }\n\n}\n", "file_path": "gstreamer/src/auto/ghost_pad.rs", "rank": 23, "score": 219301.0386985367 }, { "content": "pub trait ProxyPadExt: 'static {\n\n fn get_internal(&self) -> Option<ProxyPad>;\n\n}\n\n\n\nimpl<O: IsA<ProxyPad>> ProxyPadExt for O {\n\n fn get_internal(&self) -> Option<ProxyPad> {\n\n unsafe {\n\n from_glib_full(gst_sys::gst_proxy_pad_get_internal(\n\n self.as_ref().to_glib_none().0,\n\n ))\n\n }\n\n }\n\n}\n", "file_path": "gstreamer/src/auto/proxy_pad.rs", "rank": 24, "score": 219301.0386985367 }, { "content": "pub fn update_registry() -> Result<(), glib::error::BoolError> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n glib_result_from_gboolean!(\n\n gst_sys::gst_update_registry(),\n\n \"Failed to update the registry\"\n\n )\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 25, "score": 216716.92298239376 }, { "content": "pub fn type_find_helper<P: IsA<gst::Pad>>(\n\n src: &P,\n\n size: u64,\n\n) -> Result<gst::Caps, glib::BoolError> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n Option::<_>::from_glib_full(gst_base_sys::gst_type_find_helper(\n\n src.as_ref().to_glib_none().0,\n\n size,\n\n ))\n\n .ok_or_else(|| glib_bool_error!(\"Could not find type\"))\n\n }\n\n}\n\n\n", "file_path": "gstreamer-base/src/auto/functions.rs", "rank": 26, "score": 216710.81945973056 }, { "content": "pub trait AggregatorPadExt: 'static {\n\n #[cfg(any(feature = \"v1_14\", feature = \"dox\"))]\n\n fn drop_buffer(&self) -> bool;\n\n\n\n #[cfg(any(feature = \"v1_14_1\", feature = \"dox\"))]\n\n fn has_buffer(&self) -> bool;\n\n\n\n #[cfg(any(feature = \"v1_14\", feature = \"dox\"))]\n\n fn is_eos(&self) -> bool;\n\n\n\n #[cfg(any(feature = \"v1_14\", feature = \"dox\"))]\n\n fn peek_buffer(&self) -> Option<gst::Buffer>;\n\n\n\n #[cfg(any(feature = \"v1_14\", feature = \"dox\"))]\n\n fn pop_buffer(&self) -> Option<gst::Buffer>;\n\n\n\n #[cfg(any(feature = \"v1_16\", feature = \"dox\"))]\n\n fn get_property_emit_signals(&self) -> bool;\n\n\n\n #[cfg(any(feature = \"v1_16\", feature = \"dox\"))]\n", "file_path": "gstreamer-base/src/auto/aggregator_pad.rs", "rank": 27, "score": 216062.2183577739 }, { "content": "pub trait AggregatorPadExtManual: 'static {\n\n fn get_segment(&self) -> gst::Segment;\n\n}\n\n\n\nimpl<O: IsA<AggregatorPad>> AggregatorPadExtManual for O {\n\n fn get_segment(&self) -> gst::Segment {\n\n unsafe {\n\n let ptr: &gst_base_sys::GstAggregatorPad = &*(self.as_ptr() as *const _);\n\n let _guard = ::utils::MutexGuard::lock(&ptr.parent.object.lock);\n\n from_glib_none(&ptr.segment as *const gst_sys::GstSegment)\n\n }\n\n }\n\n}\n", "file_path": "gstreamer-base/src/aggregator_pad.rs", "rank": 28, "score": 216062.21835777385 }, { "content": "#[cfg(any(feature = \"v1_14\", feature = \"dox\"))]\n\npub fn get_main_executable_path() -> Result<GString, glib::BoolError> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n Option::<_>::from_glib_none(gst_sys::gst_get_main_executable_path())\n\n .ok_or_else(|| glib_bool_error!(\"Failed to get main executable path\"))\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 29, "score": 207764.56652901002 }, { "content": "pub fn parse_launchv(argv: &[&str]) -> Result<Element, glib::Error> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n let mut error = ptr::null_mut();\n\n let ret = gst_sys::gst_parse_launchv(argv.to_glib_none().0, &mut error);\n\n if error.is_null() {\n\n Ok(from_glib_none(ret))\n\n } else {\n\n Err(from_glib_full(error))\n\n }\n\n }\n\n}\n\n\n\n//#[cfg(any(feature = \"v1_18\", feature = \"dox\"))]\n\n//pub fn tracing_get_active_tracers() -> /*Ignored*/Vec<Tracer> {\n\n// unsafe { TODO: call gst_sys:gst_tracing_get_active_tracers() }\n\n//}\n\n\n\n//pub fn tracing_register_hook<P: FnOnce() + Send + Sync + 'static>(tracer: /*Ignored*/&Tracer, detail: &str, func: P) {\n\n// unsafe { TODO: call gst_sys:gst_tracing_register_hook() }\n\n//}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 30, "score": 205439.62397397478 }, { "content": "#[cfg(target_os = \"macos\")]\n\npub fn run<T, F: FnOnce() -> T + Send + 'static>(main: F) -> T\n\nwhere\n\n T: Send + 'static,\n\n{\n\n use std::thread;\n\n\n\n let l = runloop::CFRunLoop::get_main();\n\n let t = thread::spawn(move || {\n\n let res = main();\n\n l.stop();\n\n res\n\n });\n\n\n\n runloop::CFRunLoop::run();\n\n\n\n t.join().unwrap()\n\n}\n", "file_path": "tutorials/src/tutorials-common.rs", "rank": 31, "score": 204988.30691293642 }, { "content": "#[cfg(target_os = \"macos\")]\n\npub fn run<T, F: FnOnce() -> T + Send + 'static>(main: F) -> T\n\nwhere\n\n T: Send + 'static,\n\n{\n\n use std::thread;\n\n\n\n let l = runloop::CFRunLoop::get_main();\n\n let t = thread::spawn(move || {\n\n let res = main();\n\n l.stop();\n\n res\n\n });\n\n\n\n runloop::CFRunLoop::run();\n\n\n\n t.join().unwrap()\n\n}\n", "file_path": "examples/src/examples-common.rs", "rank": 32, "score": 204988.30691293642 }, { "content": "pub fn parse_launch(pipeline_description: &str) -> Result<Element, glib::Error> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n let mut error = ptr::null_mut();\n\n let ret = gst_sys::gst_parse_launch(pipeline_description.to_glib_none().0, &mut error);\n\n if error.is_null() {\n\n Ok(from_glib_none(ret))\n\n } else {\n\n Err(from_glib_full(error))\n\n }\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 33, "score": 202458.45211623708 }, { "content": "#[cfg(any(feature = \"v1_18\", feature = \"dox\"))]\n\npub fn type_is_plugin_api(type_: glib::types::Type) -> Option<::PluginAPIFlags> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n use std::mem;\n\n\n\n let mut flags = mem::MaybeUninit::uninit();\n\n let ret = from_glib(gst_sys::gst_type_is_plugin_api(\n\n type_.to_glib(),\n\n flags.as_mut_ptr(),\n\n ));\n\n let flags = flags.assume_init();\n\n if ret {\n\n Some(from_glib(flags))\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "gstreamer/src/functions.rs", "rank": 34, "score": 199605.48835631376 }, { "content": "pub fn merge_use_first(src: &Value) -> Value {\n\n skip_assert_initialized!();\n\n assert_eq!(src.type_(), ::List::static_type());\n\n\n\n unsafe {\n\n use glib::translate::Uninitialized;\n\n\n\n let mut res = Value::uninitialized();\n\n gst_sys::gst_tag_merge_use_first(res.to_glib_none_mut().0, src.to_glib_none().0);\n\n res\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/tags.rs", "rank": 35, "score": 198619.83900496905 }, { "content": "#[cfg(any(feature = \"v1_18\", feature = \"dox\"))]\n\npub fn type_mark_as_plugin_api(type_: glib::types::Type, flags: PluginAPIFlags) {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n gst_sys::gst_type_mark_as_plugin_api(type_.to_glib(), flags.to_glib());\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 36, "score": 194252.4669418853 }, { "content": "pub fn pb_utils_get_source_description(protocol: &str) -> Result<GString, glib::BoolError> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n Option::<_>::from_glib_full(gst_pbutils_sys::gst_pb_utils_get_source_description(\n\n protocol.to_glib_none().0,\n\n ))\n\n .ok_or_else(|| glib_bool_error!(\"Failed to get source description\"))\n\n }\n\n}\n\n\n", "file_path": "gstreamer-pbutils/src/auto/functions.rs", "rank": 37, "score": 189323.40353680888 }, { "content": "#[cfg(any(feature = \"v1_12\", feature = \"dox\"))]\n\npub fn debug_get_stack_trace(flags: StackTraceFlags) -> Result<GString, glib::BoolError> {\n\n skip_assert_initialized!();\n\n unsafe {\n\n Option::<_>::from_glib_full(gst_sys::gst_debug_get_stack_trace(flags.to_glib()))\n\n .ok_or_else(|| glib_bool_error!(\"Failed to get stack trace\"))\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 38, "score": 189323.40353680888 }, { "content": "pub fn pb_utils_get_sink_description(protocol: &str) -> Result<GString, glib::BoolError> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n Option::<_>::from_glib_full(gst_pbutils_sys::gst_pb_utils_get_sink_description(\n\n protocol.to_glib_none().0,\n\n ))\n\n .ok_or_else(|| glib_bool_error!(\"Failed to get sink description\"))\n\n }\n\n}\n\n\n", "file_path": "gstreamer-pbutils/src/auto/functions.rs", "rank": 39, "score": 189323.40353680888 }, { "content": "pub fn pb_utils_get_element_description(factory_name: &str) -> Result<GString, glib::BoolError> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n Option::<_>::from_glib_full(gst_pbutils_sys::gst_pb_utils_get_element_description(\n\n factory_name.to_glib_none().0,\n\n ))\n\n .ok_or_else(|| glib_bool_error!(\"Failed to get element description\"))\n\n }\n\n}\n\n\n", "file_path": "gstreamer-pbutils/src/auto/functions.rs", "rank": 40, "score": 187002.42228285794 }, { "content": "fn into_raw_watch<F: FnMut(&RTSPSessionPool) -> Continue + Send + 'static>(func: F) -> gpointer {\n\n #[allow(clippy::type_complexity)]\n\n let func: Box<RefCell<F>> = Box::new(RefCell::new(func));\n\n Box::into_raw(func) as gpointer\n\n}\n\n\n", "file_path": "gstreamer-rtsp-server/src/rtsp_session_pool.rs", "rank": 41, "score": 174199.5353496098 }, { "content": "pub fn type_find_helper_for_data<P: IsA<gst::Object>, R: AsRef<[u8]>>(\n\n obj: Option<&P>,\n\n data: R,\n\n) -> Result<(gst::Caps, gst::TypeFindProbability), glib::error::BoolError> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n let mut prob = mem::MaybeUninit::uninit();\n\n let data = data.as_ref();\n\n let (ptr, len) = (data.as_ptr(), data.len());\n\n let ret = gst_base_sys::gst_type_find_helper_for_data(\n\n obj.map(|p| p.as_ref()).to_glib_none().0,\n\n mut_override(ptr),\n\n len,\n\n prob.as_mut_ptr(),\n\n );\n\n if ret.is_null() {\n\n Err(glib_bool_error!(\"No type could be found\"))\n\n } else {\n\n Ok((from_glib_full(ret), from_glib(prob.assume_init())))\n\n }\n\n }\n\n}\n\n\n", "file_path": "gstreamer-base/src/functions.rs", "rank": 42, "score": 172380.5107814873 }, { "content": "pub trait Tag<'a> {\n\n type TagType: FromValueOptional<'a> + SetValue + Send;\n\n fn tag_name<'b>() -> &'b str;\n\n}\n\n\n\nmacro_rules! impl_tag(\n\n ($name:ident, $t:ty, $rust_tag:ident, $gst_tag:ident) => {\n\n pub enum $name {}\n\n impl<'a> Tag<'a> for $name {\n\n type TagType = $t;\n\n fn tag_name<'b>() -> &'b str {\n\n *$rust_tag\n\n }\n\n }\n\n\n\n pub(crate) static $rust_tag: Lazy<&'static str> = Lazy::new(||\n\n unsafe { CStr::from_ptr(gst_sys::$gst_tag).to_str().unwrap() });\n\n };\n\n);\n\n\n", "file_path": "gstreamer/src/tags.rs", "rank": 43, "score": 169927.35788427643 }, { "content": "pub fn parse_launch_full(\n\n pipeline_description: &str,\n\n mut context: Option<&mut ParseContext>,\n\n flags: ParseFlags,\n\n) -> Result<Element, glib::Error> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n let mut error = ptr::null_mut();\n\n let ret = gst_sys::gst_parse_launch_full(\n\n pipeline_description.to_glib_none().0,\n\n context.to_glib_none_mut().0,\n\n flags.to_glib(),\n\n &mut error,\n\n );\n\n if error.is_null() {\n\n Ok(from_glib_none(ret))\n\n } else {\n\n Err(from_glib_full(error))\n\n }\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/functions.rs", "rank": 44, "score": 169726.98775253235 }, { "content": "pub fn parse_launchv_full(\n\n argv: &[&str],\n\n mut context: Option<&mut ParseContext>,\n\n flags: ParseFlags,\n\n) -> Result<Element, glib::Error> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n let mut error = ptr::null_mut();\n\n let ret = gst_sys::gst_parse_launchv_full(\n\n argv.to_glib_none().0,\n\n context.to_glib_none_mut().0,\n\n flags.to_glib(),\n\n &mut error,\n\n );\n\n if error.is_null() {\n\n Ok(from_glib_none(ret))\n\n } else {\n\n Err(from_glib_full(error))\n\n }\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/functions.rs", "rank": 45, "score": 169726.98775253235 }, { "content": "pub fn convert_sample(\n\n sample: &gst::Sample,\n\n caps: &gst::Caps,\n\n timeout: gst::ClockTime,\n\n) -> Result<gst::Sample, glib::Error> {\n\n skip_assert_initialized!();\n\n unsafe {\n\n let mut error = ptr::null_mut();\n\n let ret = gst_video_sys::gst_video_convert_sample(\n\n sample.to_glib_none().0,\n\n caps.to_glib_none().0,\n\n timeout.to_glib(),\n\n &mut error,\n\n );\n\n\n\n if error.is_null() {\n\n Ok(from_glib_full(ret))\n\n } else {\n\n Err(from_glib_full(error))\n\n }\n\n }\n\n}\n\n\n", "file_path": "gstreamer-video/src/functions.rs", "rank": 46, "score": 169726.98775253235 }, { "content": "#[cfg(any(feature = \"v1_12\", feature = \"dox\"))]\n\npub fn calculate_linear_regression(\n\n xy: &[(u64, u64)],\n\n temp: Option<&mut [(u64, u64)]>,\n\n) -> Option<(u64, u64, u64, u64, f64)> {\n\n skip_assert_initialized!();\n\n use std::mem;\n\n\n\n unsafe {\n\n assert_eq!(mem::size_of::<u64>() * 2, mem::size_of::<(u64, u64)>());\n\n assert_eq!(mem::align_of::<u64>(), mem::align_of::<(u64, u64)>());\n\n assert!(\n\n temp.as_ref()\n\n .map(|temp| temp.len())\n\n .unwrap_or_else(|| xy.len())\n\n >= xy.len()\n\n );\n\n\n\n let mut m_num = mem::MaybeUninit::uninit();\n\n let mut m_denom = mem::MaybeUninit::uninit();\n\n let mut b = mem::MaybeUninit::uninit();\n", "file_path": "gstreamer/src/functions.rs", "rank": 47, "score": 169726.98775253235 }, { "content": "#[cfg(any(feature = \"v1_16\", feature = \"dox\"))]\n\npub fn type_find_helper_for_data_with_extension<P: IsA<gst::Object>, R: AsRef<[u8]>>(\n\n obj: Option<&P>,\n\n data: R,\n\n extension: Option<&str>,\n\n) -> Result<(gst::Caps, gst::TypeFindProbability), glib::error::BoolError> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n let mut prob = mem::MaybeUninit::uninit();\n\n let data = data.as_ref();\n\n let (ptr, len) = (data.as_ptr(), data.len());\n\n let ret = gst_base_sys::gst_type_find_helper_for_data_with_extension(\n\n obj.map(|p| p.as_ref()).to_glib_none().0,\n\n mut_override(ptr),\n\n len,\n\n extension.to_glib_none().0,\n\n prob.as_mut_ptr(),\n\n );\n\n if ret.is_null() {\n\n Err(glib_bool_error!(\"No type could be found\"))\n\n } else {\n\n Ok((from_glib_full(ret), from_glib(prob.assume_init())))\n\n }\n\n }\n\n}\n\n\n", "file_path": "gstreamer-base/src/functions.rs", "rank": 48, "score": 169647.98603530083 }, { "content": "pub trait TypeFindImpl {\n\n fn peek(&mut self, offset: i64, size: u32) -> Option<&[u8]>;\n\n fn suggest(&mut self, probability: TypeFindProbability, caps: &Caps);\n\n fn get_length(&self) -> Option<u64> {\n\n None\n\n }\n\n}\n\n\n\nimpl<'a> TypeFind<'a> {\n\n pub fn register<F>(\n\n plugin: Option<&Plugin>,\n\n name: &str,\n\n rank: Rank,\n\n extensions: Option<&str>,\n\n possible_caps: Option<&Caps>,\n\n func: F,\n\n ) -> Result<(), glib::error::BoolError>\n\n where\n\n F: Fn(&mut TypeFind) + Send + Sync + 'static,\n\n {\n", "file_path": "gstreamer/src/typefind.rs", "rank": 49, "score": 168774.3025145115 }, { "content": "pub trait PanicPoison {\n\n fn panicked(&self) -> &AtomicBool;\n\n}\n\n\n\nimpl<T: ObjectSubclass> PanicPoison for ElementInstanceStruct<T> {\n\n fn panicked(&self) -> &AtomicBool {\n\n &self.panicked\n\n }\n\n}\n", "file_path": "gstreamer/src/subclass/mod.rs", "rank": 50, "score": 168774.3025145115 }, { "content": "pub trait HasStreamLock {\n\n fn get_stream_lock(&self) -> *mut glib_sys::GRecMutex;\n\n fn get_element_as_ptr(&self) -> *const gst_sys::GstElement;\n\n}\n", "file_path": "gstreamer-video/src/utils.rs", "rank": 51, "score": 168774.3025145115 }, { "content": "pub fn parse_bin_from_description(\n\n bin_description: &str,\n\n ghost_unlinked_pads: bool,\n\n) -> Result<Bin, glib::Error> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n let mut error = ptr::null_mut();\n\n let ret = gst_sys::gst_parse_bin_from_description(\n\n bin_description.to_glib_none().0,\n\n ghost_unlinked_pads.to_glib(),\n\n &mut error,\n\n );\n\n if error.is_null() {\n\n Ok(from_glib_none(ret))\n\n } else {\n\n Err(from_glib_full(error))\n\n }\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 52, "score": 167083.8838091908 }, { "content": "#[cfg(any(feature = \"v1_16\", feature = \"dox\"))]\n\npub fn audio_buffer_truncate(\n\n buffer: gst::Buffer,\n\n bpf: u32,\n\n trim: usize,\n\n samples: Option<usize>,\n\n) -> gst::Buffer {\n\n skip_assert_initialized!();\n\n\n\n unsafe {\n\n from_glib_full(gst_audio_sys::gst_audio_buffer_truncate(\n\n buffer.into_ptr(),\n\n bpf as i32,\n\n trim,\n\n samples.unwrap_or(std::usize::MAX),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "gstreamer-audio/src/functions.rs", "rank": 53, "score": 167083.8838091908 }, { "content": "pub fn parse_bin_from_description_full(\n\n bin_description: &str,\n\n ghost_unlinked_pads: bool,\n\n mut context: Option<&mut ParseContext>,\n\n flags: ParseFlags,\n\n) -> Result<Element, glib::Error> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n let mut error = ptr::null_mut();\n\n let ret = gst_sys::gst_parse_bin_from_description_full(\n\n bin_description.to_glib_none().0,\n\n ghost_unlinked_pads.to_glib(),\n\n context.to_glib_none_mut().0,\n\n flags.to_glib(),\n\n &mut error,\n\n );\n\n if error.is_null() {\n\n Ok(from_glib_none(ret))\n\n } else {\n\n Err(from_glib_full(error))\n\n }\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/functions.rs", "rank": 54, "score": 167083.8838091908 }, { "content": "pub fn convert_sample_future(\n\n sample: &gst::Sample,\n\n caps: &gst::Caps,\n\n timeout: gst::ClockTime,\n\n) -> std::pin::Pin<Box<dyn std::future::Future<Output = Result<gst::Sample, glib::Error>> + 'static>>\n\n{\n\n skip_assert_initialized!();\n\n\n\n use futures_channel::oneshot;\n\n use futures_util::future::lazy;\n\n use futures_util::future::FutureExt;\n\n\n\n let (sender, receiver) = oneshot::channel();\n\n\n\n let sample = sample.clone();\n\n let caps = caps.clone();\n\n let future = lazy(move |_| {\n\n assert!(\n\n glib::MainContext::ref_thread_default().is_owner(),\n\n \"Spawning futures only allowed if the thread is owning the MainContext\"\n", "file_path": "gstreamer-video/src/functions.rs", "rank": 55, "score": 167083.8838091908 }, { "content": "pub fn audio_buffer_clip(\n\n buffer: gst::Buffer,\n\n segment: &gst::Segment,\n\n rate: u32,\n\n bpf: u32,\n\n) -> Option<gst::Buffer> {\n\n skip_assert_initialized!();\n\n\n\n unsafe {\n\n from_glib_full(gst_audio_sys::gst_audio_buffer_clip(\n\n buffer.into_ptr(),\n\n segment.to_glib_none().0,\n\n rate as i32,\n\n bpf as i32,\n\n ))\n\n }\n\n}\n\n\n", "file_path": "gstreamer-audio/src/functions.rs", "rank": 56, "score": 167083.8838091908 }, { "content": "pub fn parse_bin_from_description_with_name(\n\n bin_description: &str,\n\n ghost_unlinked_pads: bool,\n\n bin_name: &str,\n\n) -> Result<Bin, glib::Error> {\n\n assert_initialized_main_thread!();\n\n let bin = parse_bin_from_description(bin_description, ghost_unlinked_pads)?;\n\n if !bin_name.is_empty() {\n\n let obj = bin.clone().upcast::<Object>();\n\n unsafe {\n\n gst_sys::gst_object_set_name(obj.to_glib_none().0, bin_name.to_glib_none().0);\n\n }\n\n }\n\n Ok(bin)\n\n}\n\n\n", "file_path": "gstreamer/src/functions.rs", "rank": 57, "score": 167083.8838091908 }, { "content": "pub fn calculate_display_ratio(\n\n video_width: u32,\n\n video_height: u32,\n\n video_par: gst::Fraction,\n\n display_par: gst::Fraction,\n\n) -> Option<gst::Fraction> {\n\n skip_assert_initialized!();\n\n\n\n unsafe {\n\n let mut dar_n = mem::MaybeUninit::uninit();\n\n let mut dar_d = mem::MaybeUninit::uninit();\n\n\n\n let res: bool = from_glib(gst_video_sys::gst_video_calculate_display_ratio(\n\n dar_n.as_mut_ptr(),\n\n dar_d.as_mut_ptr(),\n\n video_width,\n\n video_height,\n\n *video_par.numer() as u32,\n\n *video_par.denom() as u32,\n\n *display_par.numer() as u32,\n", "file_path": "gstreamer-video/src/functions.rs", "rank": 58, "score": 167083.8838091908 }, { "content": "pub trait ElementImplExt {\n\n fn parent_change_state(\n\n &self,\n\n element: &::Element,\n\n transition: StateChange,\n\n ) -> Result<StateChangeSuccess, StateChangeError>;\n\n\n\n fn parent_request_new_pad(\n\n &self,\n\n element: &::Element,\n\n templ: &::PadTemplate,\n\n name: Option<String>,\n\n caps: Option<&::Caps>,\n\n ) -> Option<::Pad>;\n\n\n\n fn parent_release_pad(&self, element: &::Element, pad: &::Pad);\n\n\n\n fn parent_send_event(&self, element: &::Element, event: Event) -> bool;\n\n\n\n fn parent_query(&self, element: &::Element, query: &mut QueryRef) -> bool;\n", "file_path": "gstreamer/src/subclass/element.rs", "rank": 59, "score": 166132.16936741004 }, { "content": "pub trait ClockImplExt {\n\n fn parent_change_resolution(\n\n &self,\n\n clock: &Clock,\n\n old_resolution: ClockTime,\n\n new_resolution: ClockTime,\n\n ) -> ClockTime;\n\n\n\n fn parent_get_resolution(&self, clock: &Clock) -> ClockTime;\n\n\n\n fn parent_get_internal_time(&self, clock: &Clock) -> ClockTime;\n\n\n\n fn parent_wait(\n\n &self,\n\n clock: &Clock,\n\n id: &ClockId,\n\n ) -> (Result<ClockSuccess, ClockError>, ClockTimeDiff);\n\n\n\n fn parent_wait_async(&self, clock: &Clock, id: &ClockId) -> Result<ClockSuccess, ClockError>;\n\n\n", "file_path": "gstreamer/src/subclass/clock.rs", "rank": 60, "score": 166132.16936741004 }, { "content": "pub trait BinImplExt {\n\n fn parent_add_element(&self, bin: &Bin, element: &Element) -> Result<(), LoggableError>;\n\n\n\n fn parent_remove_element(&self, bin: &Bin, element: &Element) -> Result<(), LoggableError>;\n\n\n\n fn parent_handle_message(&self, bin: &Bin, message: Message);\n\n}\n\n\n\nimpl<T: BinImpl> BinImplExt for T {\n\n fn parent_add_element(&self, bin: &Bin, element: &Element) -> Result<(), LoggableError> {\n\n unsafe {\n\n let data = T::type_data();\n\n let parent_class = data.as_ref().get_parent_class() as *mut gst_sys::GstBinClass;\n\n let f = (*parent_class).add_element.ok_or_else(|| {\n\n gst_loggable_error!(::CAT_RUST, \"Parent function `add_element` is not defined\")\n\n })?;\n\n gst_result_from_gboolean!(\n\n f(bin.to_glib_none().0, element.to_glib_none().0),\n\n ::CAT_RUST,\n\n \"Failed to add the element using the parent function\"\n", "file_path": "gstreamer/src/subclass/bin.rs", "rank": 61, "score": 166132.16936741004 }, { "content": "pub trait DeviceImplExt {\n\n fn parent_create_element(\n\n &self,\n\n device: &Device,\n\n name: Option<&str>,\n\n ) -> Result<Element, LoggableError>;\n\n\n\n fn parent_reconfigure_element(\n\n &self,\n\n device: &Device,\n\n element: &Element,\n\n ) -> Result<(), LoggableError>;\n\n}\n\n\n\nimpl<T: DeviceImpl> DeviceImplExt for T {\n\n fn parent_create_element(\n\n &self,\n\n device: &Device,\n\n name: Option<&str>,\n\n ) -> Result<Element, LoggableError> {\n", "file_path": "gstreamer/src/subclass/device.rs", "rank": 62, "score": 166132.16936741004 }, { "content": "pub trait ContextGLExt {\n\n fn get_gl_display(&self) -> Option<GLDisplay>;\n\n fn set_gl_display<T: IsA<GLDisplay>>(&self, display: &T);\n\n}\n\n\n\nimpl ContextGLExt for ContextRef {\n\n fn get_gl_display(&self) -> Option<GLDisplay> {\n\n unsafe {\n\n let mut display = ptr::null_mut();\n\n if from_glib(gst_gl_sys::gst_context_get_gl_display(\n\n self.as_mut_ptr(),\n\n &mut display,\n\n )) {\n\n Some(from_glib_full(display))\n\n } else {\n\n None\n\n }\n\n }\n\n }\n\n\n\n fn set_gl_display<T: IsA<GLDisplay>>(&self, display: &T) {\n\n unsafe {\n\n gst_gl_sys::gst_context_set_gl_display(\n\n self.as_mut_ptr(),\n\n display.as_ref().to_glib_none().0,\n\n );\n\n }\n\n }\n\n}\n", "file_path": "gstreamer-gl/src/context.rs", "rank": 63, "score": 166132.16936741004 }, { "content": "pub fn reorder_channels(\n\n data: &mut [u8],\n\n format: ::AudioFormat,\n\n channels: u32,\n\n from: &[AudioChannelPosition],\n\n to: &[AudioChannelPosition],\n\n) -> Result<(), glib::BoolError> {\n\n assert_initialized_main_thread!();\n\n\n\n if from.len() != to.len() || from.len() > 64 {\n\n return Err(glib_bool_error!(\"Invalid number of channels\"));\n\n }\n\n\n\n let from_len = from.len();\n\n let to_len = to.len();\n\n\n\n let from_raw: [gst_audio_sys::GstAudioChannelPosition; 64] = array_init::array_init(|i| {\n\n if i >= from_len as usize {\n\n gst_audio_sys::GST_AUDIO_CHANNEL_POSITION_INVALID\n\n } else {\n", "file_path": "gstreamer-audio/src/audio_channel_position.rs", "rank": 64, "score": 164571.286748743 }, { "content": "pub fn center_video_rectangle(\n\n src: &VideoRectangle,\n\n dst: &VideoRectangle,\n\n scale: bool,\n\n) -> VideoRectangle {\n\n skip_assert_initialized!();\n\n let mut result = gst_video_sys::GstVideoRectangle {\n\n x: 0,\n\n y: 0,\n\n w: 0,\n\n h: 0,\n\n };\n\n let src_rect = gst_video_sys::GstVideoRectangle {\n\n x: src.x,\n\n y: src.y,\n\n w: src.w,\n\n h: src.h,\n\n };\n\n let dst_rect = gst_video_sys::GstVideoRectangle {\n\n x: dst.x,\n\n y: dst.y,\n\n w: dst.w,\n\n h: dst.h,\n\n };\n\n unsafe {\n\n gst_video_sys::gst_video_sink_center_rect(src_rect, dst_rect, &mut result, scale.to_glib());\n\n }\n\n VideoRectangle::new(result.x, result.y, result.w, result.h)\n\n}\n", "file_path": "gstreamer-video/src/video_rectangle.rs", "rank": 65, "score": 164571.286748743 }, { "content": "pub fn audio_make_raw_caps(\n\n formats: &[::AudioFormat],\n\n layout: ::AudioLayout,\n\n) -> gst::caps::Builder<gst::caps::NoFeature> {\n\n assert_initialized_main_thread!();\n\n\n\n let formats: Vec<glib::SendValue> = formats\n\n .iter()\n\n .map(|f| match f {\n\n ::AudioFormat::Encoded => panic!(\"Invalid encoded format\"),\n\n ::AudioFormat::Unknown => panic!(\"Invalid unknown format\"),\n\n _ => f.to_string().to_send_value(),\n\n })\n\n .collect();\n\n\n\n let builder = gst::caps::Caps::builder(\"audio/x-raw\")\n\n .field(\"format\", &gst::List::from_owned(formats))\n\n .field(\"rate\", &gst::IntRange::<i32>::new(1, i32::MAX))\n\n .field(\"channels\", &gst::IntRange::<i32>::new(1, i32::MAX));\n\n\n", "file_path": "gstreamer-audio/src/functions.rs", "rank": 66, "score": 164571.286748743 }, { "content": "pub fn parse_bin_from_description_with_name_full(\n\n bin_description: &str,\n\n ghost_unlinked_pads: bool,\n\n bin_name: &str,\n\n context: Option<&mut ParseContext>,\n\n flags: ParseFlags,\n\n) -> Result<Element, glib::Error> {\n\n assert_initialized_main_thread!();\n\n let bin =\n\n parse_bin_from_description_full(bin_description, ghost_unlinked_pads, context, flags)?;\n\n if !bin_name.is_empty() {\n\n let obj = bin.clone().upcast::<Object>();\n\n unsafe {\n\n gst_sys::gst_object_set_name(obj.to_glib_none().0, bin_name.to_glib_none().0);\n\n }\n\n }\n\n Ok(bin)\n\n}\n\n\n", "file_path": "gstreamer/src/functions.rs", "rank": 67, "score": 164571.286748743 }, { "content": "pub fn debug_remove_default_log_function() {\n\n skip_assert_initialized!();\n\n unsafe {\n\n gst_sys::gst_debug_remove_log_function(None);\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/log.rs", "rank": 68, "score": 164571.286748743 }, { "content": "pub fn debug_print_stack_trace() {\n\n skip_assert_initialized!();\n\n unsafe {\n\n gst_sys::gst_debug_print_stack_trace();\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 69, "score": 164571.286748743 }, { "content": "pub trait GstParamSpecExt {\n\n #[cfg(any(feature = \"v1_14\", feature = \"dox\"))]\n\n fn array(\n\n name: &str,\n\n nick: &str,\n\n blurb: &str,\n\n element_spec: &glib::ParamSpec,\n\n flags: glib::ParamFlags,\n\n ) -> Self;\n\n\n\n fn fraction(\n\n name: &str,\n\n nick: &str,\n\n blurb: &str,\n\n min: ::Fraction,\n\n max: ::Fraction,\n\n default: ::Fraction,\n\n flags: glib::ParamFlags,\n\n ) -> Self;\n\n}\n", "file_path": "gstreamer/src/param_spec.rs", "rank": 70, "score": 163620.4951688063 }, { "content": "pub trait RTPBufferExt {\n\n fn new_rtp_with_sizes(\n\n payload_len: u32,\n\n pad_len: u8,\n\n csrc_count: u8,\n\n ) -> Result<gst::Buffer, glib::BoolError>;\n\n}\n\n\n\nimpl RTPBufferExt for gst::Buffer {\n\n fn new_rtp_with_sizes(\n\n payload_len: u32,\n\n pad_len: u8,\n\n csrc_count: u8,\n\n ) -> Result<gst::Buffer, glib::BoolError> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n Option::<_>::from_glib_full(gst_rtp_sys::gst_rtp_buffer_new_allocate(\n\n payload_len,\n\n pad_len,\n\n csrc_count,\n\n ))\n\n .ok_or_else(|| glib_bool_error!(\"Failed to allocate new RTP buffer\"))\n\n }\n\n }\n\n}\n\n\n", "file_path": "gstreamer-rtp/src/rtp_buffer.rs", "rank": 71, "score": 163620.4951688063 }, { "content": "pub trait AggregatorImplExt {\n\n fn parent_flush(&self, aggregator: &Aggregator) -> Result<gst::FlowSuccess, gst::FlowError>;\n\n\n\n fn parent_clip(\n\n &self,\n\n aggregator: &Aggregator,\n\n aggregator_pad: &AggregatorPad,\n\n buffer: gst::Buffer,\n\n ) -> Option<gst::Buffer>;\n\n\n\n fn parent_finish_buffer(\n\n &self,\n\n aggregator: &Aggregator,\n\n buffer: gst::Buffer,\n\n ) -> Result<gst::FlowSuccess, gst::FlowError>;\n\n\n\n #[cfg(any(feature = \"v1_18\", feature = \"dox\"))]\n\n fn parent_finish_buffer_list(\n\n &self,\n\n aggregator: &Aggregator,\n", "file_path": "gstreamer-base/src/subclass/aggregator.rs", "rank": 72, "score": 163620.4951688063 }, { "content": "pub trait VideoBufferExt {\n\n fn get_video_flags(&self) -> ::VideoBufferFlags;\n\n fn set_video_flags(&mut self, flags: ::VideoBufferFlags);\n\n fn unset_video_flags(&mut self, flags: ::VideoBufferFlags);\n\n}\n\n\n\nimpl VideoBufferExt for gst::BufferRef {\n\n fn get_video_flags(&self) -> ::VideoBufferFlags {\n\n unsafe {\n\n let ptr = self.as_mut_ptr();\n\n ::VideoBufferFlags::from_bits_truncate((*ptr).mini_object.flags)\n\n }\n\n }\n\n\n\n fn set_video_flags(&mut self, flags: ::VideoBufferFlags) {\n\n unsafe {\n\n let ptr = self.as_mut_ptr();\n\n (*ptr).mini_object.flags |= flags.bits();\n\n }\n\n }\n", "file_path": "gstreamer-video/src/video_frame.rs", "rank": 73, "score": 163620.4951688063 }, { "content": "pub fn debug_is_colored() -> bool {\n\n skip_assert_initialized!();\n\n unsafe { from_glib(gst_sys::gst_debug_is_colored()) }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 74, "score": 162453.58847272221 }, { "content": "pub fn debug_is_active() -> bool {\n\n skip_assert_initialized!();\n\n unsafe { from_glib(gst_sys::gst_debug_is_active()) }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 75, "score": 162453.58847272221 }, { "content": "pub fn pb_utils_get_codec_description(\n\n caps: &gst::CapsRef,\n\n) -> Result<glib::GString, glib::error::BoolError> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n match from_glib_full(gst_pbutils_sys::gst_pb_utils_get_codec_description(\n\n caps.as_ptr(),\n\n )) {\n\n Some(s) => Ok(s),\n\n None => Err(glib_bool_error!(\"Failed to get codec description\")),\n\n }\n\n }\n\n}\n", "file_path": "gstreamer-pbutils/src/functions.rs", "rank": 76, "score": 162179.763521523 }, { "content": "pub fn pb_utils_get_decoder_description(\n\n caps: &gst::CapsRef,\n\n) -> Result<glib::GString, glib::error::BoolError> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n match from_glib_full(gst_pbutils_sys::gst_pb_utils_get_decoder_description(\n\n caps.as_ptr(),\n\n )) {\n\n Some(s) => Ok(s),\n\n None => Err(glib_bool_error!(\"Failed to get decoder description\")),\n\n }\n\n }\n\n}\n\n\n", "file_path": "gstreamer-pbutils/src/functions.rs", "rank": 77, "score": 162179.763521523 }, { "content": "#[cfg(any(feature = \"v1_14\", feature = \"dox\"))]\n\npub fn debug_remove_ring_buffer_logger() {\n\n skip_assert_initialized!();\n\n unsafe {\n\n gst_sys::gst_debug_remove_ring_buffer_logger();\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 78, "score": 162179.763521523 }, { "content": "pub fn pb_utils_get_encoder_description(\n\n caps: &gst::CapsRef,\n\n) -> Result<glib::GString, glib::error::BoolError> {\n\n assert_initialized_main_thread!();\n\n unsafe {\n\n match from_glib_full(gst_pbutils_sys::gst_pb_utils_get_encoder_description(\n\n caps.as_ptr(),\n\n )) {\n\n Some(s) => Ok(s),\n\n None => Err(glib_bool_error!(\"Failed to get encoder description\")),\n\n }\n\n }\n\n}\n\n\n", "file_path": "gstreamer-pbutils/src/functions.rs", "rank": 79, "score": 162179.763521523 }, { "content": "pub fn buffer_reorder_channels(\n\n buffer: &mut gst::BufferRef,\n\n format: ::AudioFormat,\n\n channels: u32,\n\n from: &[AudioChannelPosition],\n\n to: &[AudioChannelPosition],\n\n) -> Result<(), glib::BoolError> {\n\n assert_initialized_main_thread!();\n\n\n\n if from.len() != to.len() || from.len() > 64 {\n\n return Err(glib_bool_error!(\"Invalid number of channels\"));\n\n }\n\n\n\n let from_len = from.len();\n\n let to_len = to.len();\n\n\n\n let from_raw: [gst_audio_sys::GstAudioChannelPosition; 64] = array_init::array_init(|i| {\n\n if i >= from_len as usize {\n\n gst_audio_sys::GST_AUDIO_CHANNEL_POSITION_INVALID\n\n } else {\n", "file_path": "gstreamer-audio/src/audio_channel_position.rs", "rank": 80, "score": 162179.763521523 }, { "content": "pub fn debug_remove_log_function(log_fn: DebugLogFunction) {\n\n skip_assert_initialized!();\n\n unsafe {\n\n let removed = gst_sys::gst_debug_remove_log_function_by_data(log_fn.0.as_ptr());\n\n assert_eq!(removed, 1);\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::sync::mpsc;\n\n use std::sync::{Arc, Mutex};\n\n\n\n #[test]\n\n fn get_existing() {\n\n ::init().unwrap();\n\n\n\n let perf_cat = DebugCategory::get(\"GST_PERFORMANCE\")\n\n .expect(\"Unable to find `DebugCategory` with name \\\"GST_PERFORMANCE\\\"\");\n", "file_path": "gstreamer/src/log.rs", "rank": 81, "score": 161677.20653303684 }, { "content": "pub trait GstValueExt: Sized {\n\n fn can_compare(&self, other: &Self) -> bool;\n\n fn compare(&self, other: &Self) -> Option<cmp::Ordering>;\n\n fn eq(&self, other: &Self) -> bool;\n\n fn can_intersect(&self, other: &Self) -> bool;\n\n fn intersect(&self, other: &Self) -> Option<Self>;\n\n fn can_subtract(&self, other: &Self) -> bool;\n\n fn subtract(&self, other: &Self) -> Option<Self>;\n\n fn can_union(&self, other: &Self) -> bool;\n\n fn union(&self, other: &Self) -> Option<Self>;\n\n fn fixate(&self) -> Option<Self>;\n\n fn is_fixed(&self) -> bool;\n\n fn is_subset(&self, superset: &Self) -> bool;\n\n fn serialize(&self) -> Result<glib::GString, glib::BoolError>;\n\n fn deserialize<'a, T: Into<&'a str>>(s: T) -> Result<glib::Value, glib::BoolError>;\n\n}\n\n\n\nimpl GstValueExt for glib::Value {\n\n fn can_compare(&self, other: &Self) -> bool {\n\n unsafe {\n", "file_path": "gstreamer/src/value.rs", "rank": 82, "score": 161566.6970982912 }, { "content": "pub trait ClockExtManual: 'static {\n\n fn new_periodic_id(\n\n &self,\n\n start_time: ClockTime,\n\n interval: ClockTime,\n\n ) -> Result<ClockId, glib::BoolError>;\n\n\n\n fn periodic_id_reinit(\n\n &self,\n\n id: &ClockId,\n\n start_time: ClockTime,\n\n interval: ClockTime,\n\n ) -> Result<(), glib::BoolError>;\n\n\n\n fn new_single_shot_id(&self, time: ClockTime) -> Result<ClockId, glib::BoolError>;\n\n\n\n fn single_shot_id_reinit(&self, id: &ClockId, time: ClockTime) -> Result<(), glib::BoolError>;\n\n\n\n fn set_clock_flags(&self, flags: ClockFlags);\n\n\n", "file_path": "gstreamer/src/clock.rs", "rank": 83, "score": 161566.6970982912 }, { "content": "pub trait PipelineExt: 'static {\n\n fn auto_clock(&self);\n\n\n\n fn get_auto_flush_bus(&self) -> bool;\n\n\n\n fn get_delay(&self) -> ClockTime;\n\n\n\n fn get_latency(&self) -> ClockTime;\n\n\n\n fn get_pipeline_clock(&self) -> Option<Clock>;\n\n\n\n fn set_auto_flush_bus(&self, auto_flush: bool);\n\n\n\n fn set_delay(&self, delay: ClockTime);\n\n\n\n fn set_latency(&self, latency: ClockTime);\n\n\n\n fn use_clock<P: IsA<Clock>>(&self, clock: Option<&P>);\n\n\n\n fn connect_property_auto_flush_bus_notify<F: Fn(&Self) + Send + Sync + 'static>(\n", "file_path": "gstreamer/src/auto/pipeline.rs", "rank": 84, "score": 161566.6970982912 }, { "content": "pub trait AllocatorExt: 'static {\n\n //fn alloc(&self, size: usize, params: /*Ignored*/Option<&mut AllocationParams>) -> /*Ignored*/Option<Memory>;\n\n\n\n //fn free(&self, memory: /*Ignored*/&Memory);\n\n\n\n fn set_default(&self);\n\n}\n\n\n\nimpl<O: IsA<Allocator>> AllocatorExt for O {\n\n //fn alloc(&self, size: usize, params: /*Ignored*/Option<&mut AllocationParams>) -> /*Ignored*/Option<Memory> {\n\n // unsafe { TODO: call gst_sys:gst_allocator_alloc() }\n\n //}\n\n\n\n //fn free(&self, memory: /*Ignored*/&Memory) {\n\n // unsafe { TODO: call gst_sys:gst_allocator_free() }\n\n //}\n\n\n\n fn set_default(&self) {\n\n unsafe {\n\n gst_sys::gst_allocator_set_default(self.as_ref().to_glib_full());\n\n }\n\n }\n\n}\n", "file_path": "gstreamer/src/auto/allocator.rs", "rank": 85, "score": 161566.6970982912 }, { "content": "pub trait PresetExt: 'static {\n\n fn delete_preset(&self, name: &str) -> Result<(), glib::error::BoolError>;\n\n\n\n fn get_meta(&self, name: &str, tag: &str) -> Option<GString>;\n\n\n\n fn get_preset_names(&self) -> Vec<GString>;\n\n\n\n fn get_property_names(&self) -> Vec<GString>;\n\n\n\n fn is_editable(&self) -> bool;\n\n\n\n fn load_preset(&self, name: &str) -> Result<(), glib::error::BoolError>;\n\n\n\n fn rename_preset(&self, old_name: &str, new_name: &str) -> Result<(), glib::error::BoolError>;\n\n\n\n fn save_preset(&self, name: &str) -> Result<(), glib::error::BoolError>;\n\n\n\n fn set_meta(\n\n &self,\n\n name: &str,\n", "file_path": "gstreamer/src/auto/preset.rs", "rank": 86, "score": 161566.6970982912 }, { "content": "pub trait ElementExt: 'static {\n\n fn abort_state(&self);\n\n\n\n fn add_pad<P: IsA<Pad>>(&self, pad: &P) -> Result<(), glib::error::BoolError>;\n\n\n\n fn create_all_pads(&self);\n\n\n\n #[cfg(any(feature = \"v1_14\", feature = \"dox\"))]\n\n fn foreach_pad<P: FnMut(&Element, &Pad) -> bool>(&self, func: P) -> bool;\n\n\n\n #[cfg(any(feature = \"v1_14\", feature = \"dox\"))]\n\n fn foreach_sink_pad<P: FnMut(&Element, &Pad) -> bool>(&self, func: P) -> bool;\n\n\n\n #[cfg(any(feature = \"v1_14\", feature = \"dox\"))]\n\n fn foreach_src_pad<P: FnMut(&Element, &Pad) -> bool>(&self, func: P) -> bool;\n\n\n\n fn get_base_time(&self) -> ClockTime;\n\n\n\n fn get_bus(&self) -> Option<Bus>;\n\n\n", "file_path": "gstreamer/src/auto/element.rs", "rank": 87, "score": 161566.6970982912 }, { "content": "pub trait ElementExtManual: 'static {\n\n fn get_element_class(&self) -> &ElementClass;\n\n\n\n fn change_state(&self, transition: StateChange)\n\n -> Result<StateChangeSuccess, StateChangeError>;\n\n fn continue_state(\n\n &self,\n\n ret: StateChangeReturn,\n\n ) -> Result<StateChangeSuccess, StateChangeError>;\n\n\n\n fn get_state(\n\n &self,\n\n timeout: ClockTime,\n\n ) -> (Result<StateChangeSuccess, StateChangeError>, State, State);\n\n fn set_state(&self, state: State) -> Result<StateChangeSuccess, StateChangeError>;\n\n\n\n fn query(&self, query: &mut QueryRef) -> bool;\n\n\n\n fn send_event(&self, event: Event) -> bool;\n\n\n", "file_path": "gstreamer/src/element.rs", "rank": 88, "score": 161566.6970982912 }, { "content": "pub trait DeviceExt: 'static {\n\n fn create_element(&self, name: Option<&str>) -> Result<Element, glib::BoolError>;\n\n\n\n fn get_caps(&self) -> Option<Caps>;\n\n\n\n fn get_device_class(&self) -> GString;\n\n\n\n fn get_display_name(&self) -> GString;\n\n\n\n fn get_properties(&self) -> Option<Structure>;\n\n\n\n fn has_classes(&self, classes: &str) -> bool;\n\n\n\n fn has_classesv(&self, classes: &[&str]) -> bool;\n\n\n\n fn reconfigure_element<P: IsA<Element>>(\n\n &self,\n\n element: &P,\n\n ) -> Result<(), glib::error::BoolError>;\n\n\n", "file_path": "gstreamer/src/auto/device.rs", "rank": 89, "score": 161566.6970982912 }, { "content": "pub trait ClockExt: 'static {\n\n fn add_observation(&self, slave: ClockTime, master: ClockTime) -> Option<f64>;\n\n\n\n fn add_observation_unapplied(\n\n &self,\n\n slave: ClockTime,\n\n master: ClockTime,\n\n ) -> Option<(f64, ClockTime, ClockTime, ClockTime, ClockTime)>;\n\n\n\n fn adjust_unlocked(&self, internal: ClockTime) -> ClockTime;\n\n\n\n fn get_calibration(&self) -> (ClockTime, ClockTime, ClockTime, ClockTime);\n\n\n\n fn get_internal_time(&self) -> ClockTime;\n\n\n\n fn get_master(&self) -> Option<Clock>;\n\n\n\n fn get_resolution(&self) -> ClockTime;\n\n\n\n fn get_time(&self) -> ClockTime;\n", "file_path": "gstreamer/src/auto/clock.rs", "rank": 90, "score": 161566.6970982912 }, { "content": "pub trait AllocatorExtManual: 'static {\n\n fn alloc(\n\n &self,\n\n size: usize,\n\n params: Option<&AllocationParams>,\n\n ) -> Result<Memory, glib::BoolError>;\n\n}\n\n\n\nimpl<O: IsA<Allocator>> AllocatorExtManual for O {\n\n fn alloc(\n\n &self,\n\n size: usize,\n\n params: Option<&AllocationParams>,\n\n ) -> Result<Memory, glib::BoolError> {\n\n unsafe {\n\n let ret = gst_sys::gst_allocator_alloc(\n\n self.as_ptr() as *mut _,\n\n size,\n\n match params {\n\n Some(val) => val.as_ptr() as *mut _,\n", "file_path": "gstreamer/src/allocator.rs", "rank": 91, "score": 161566.6970982912 }, { "content": "pub trait AudioFormatIteratorExt {\n\n fn into_audio_caps(\n\n self,\n\n layout: ::AudioLayout,\n\n ) -> Option<gst::caps::Builder<gst::caps::NoFeature>>;\n\n}\n\n\n\nimpl<T> AudioFormatIteratorExt for T\n\nwhere\n\n T: Iterator<Item = ::AudioFormat>,\n\n{\n\n fn into_audio_caps(\n\n self,\n\n layout: ::AudioLayout,\n\n ) -> Option<gst::caps::Builder<gst::caps::NoFeature>> {\n\n let formats: Vec<::AudioFormat> = self.collect();\n\n if !formats.is_empty() {\n\n Some(::functions::audio_make_raw_caps(&formats, layout))\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n", "file_path": "gstreamer-audio/src/audio_format.rs", "rank": 92, "score": 161229.8503337369 }, { "content": "pub trait DeviceProviderImplExt {\n\n fn parent_probe(&self, device_provider: &DeviceProvider) -> Vec<Device>;\n\n\n\n fn parent_start(&self, device_provider: &DeviceProvider) -> Result<(), LoggableError>;\n\n\n\n fn parent_stop(&self, device_provider: &DeviceProvider);\n\n}\n\n\n\nimpl<T: DeviceProviderImpl> DeviceProviderImplExt for T {\n\n fn parent_probe(&self, device_provider: &DeviceProvider) -> Vec<Device> {\n\n unsafe {\n\n let data = T::type_data();\n\n let parent_class =\n\n data.as_ref().get_parent_class() as *mut gst_sys::GstDeviceProviderClass;\n\n if let Some(f) = (*parent_class).probe {\n\n FromGlibPtrContainer::from_glib_full(f(device_provider.to_glib_none().0))\n\n } else {\n\n Vec::new()\n\n }\n\n }\n", "file_path": "gstreamer/src/subclass/device_provider.rs", "rank": 93, "score": 161229.8503337369 }, { "content": "pub trait VideoFormatIteratorExt {\n\n fn into_video_caps(self) -> Option<gst::caps::Builder<gst::caps::NoFeature>>;\n\n}\n\n\n\nimpl<T> VideoFormatIteratorExt for T\n\nwhere\n\n T: Iterator<Item = ::VideoFormat>,\n\n{\n\n fn into_video_caps(self) -> Option<gst::caps::Builder<gst::caps::NoFeature>> {\n\n let formats: Vec<::VideoFormat> = self.collect();\n\n if !formats.is_empty() {\n\n Some(::functions::video_make_raw_caps(&formats))\n\n } else {\n\n None\n\n }\n\n }\n\n}\n\n\n", "file_path": "gstreamer-video/src/video_format.rs", "rank": 94, "score": 161229.8503337369 }, { "content": "pub trait EncodingProfileHasRestrictionGetter {\n\n fn get_restriction(&self) -> Option<gst::Caps>;\n\n}\n\n\n\nmacro_rules! declare_encoding_profile_has_restriction(\n\n ($name:ident) => {\n\n impl EncodingProfileHasRestrictionSetter for $name {\n\n fn set_restriction(&self, restriction: Option<&gst::Caps>) {\n\n let profile: &EncodingProfile = glib::object::Cast::upcast_ref(self);\n\n\n\n unsafe {\n\n let restriction = match restriction {\n\n Some(restriction) => restriction.to_glib_full(),\n\n None => gst_sys::gst_caps_new_any(),\n\n };\n\n\n\n gst_pbutils_sys::gst_encoding_profile_set_restriction(\n\n profile.to_glib_none().0,\n\n restriction,\n\n );\n", "file_path": "gstreamer-pbutils/src/encoding_profile.rs", "rank": 95, "score": 161229.8503337369 }, { "content": "pub fn debug_bin_to_dot_data<P: IsA<Bin>>(bin: &P, details: DebugGraphDetails) -> GString {\n\n skip_assert_initialized!();\n\n unsafe {\n\n from_glib_full(gst_sys::gst_debug_bin_to_dot_data(\n\n bin.as_ref().to_glib_none().0,\n\n details.to_glib(),\n\n ))\n\n }\n\n}\n\n\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 96, "score": 160082.54186157993 }, { "content": "pub fn version_string() -> GString {\n\n skip_assert_initialized!();\n\n unsafe { from_glib_full(gst_sys::gst_version_string()) }\n\n}\n", "file_path": "gstreamer/src/auto/functions.rs", "rank": 97, "score": 159940.9914122744 }, { "content": " }\n\n\n\n fn get_property_caps(&self) -> Option<Caps> {\n\n unsafe {\n\n let mut value = Value::from_type(<Caps as StaticType>::static_type());\n\n gobject_sys::g_object_get_property(\n\n self.to_glib_none().0 as *mut gobject_sys::GObject,\n\n b\"caps\\0\".as_ptr() as *const _,\n\n value.to_glib_none_mut().0,\n\n );\n\n value\n\n .get()\n\n .expect(\"Return Value for property `caps` getter\")\n\n }\n\n }\n\n\n\n fn connect_linked<F: Fn(&Self, &Pad) + Send + Sync + 'static>(&self, f: F) -> SignalHandlerId {\n\n unsafe extern \"C\" fn linked_trampoline<P, F: Fn(&P, &Pad) + Send + Sync + 'static>(\n\n this: *mut gst_sys::GstPad,\n\n peer: *mut gst_sys::GstPad,\n", "file_path": "gstreamer/src/auto/pad.rs", "rank": 98, "score": 51.050812992006875 }, { "content": " }\n\n }\n\n\n\n fn connect_unlinked<F: Fn(&Self, &Pad) + Send + Sync + 'static>(\n\n &self,\n\n f: F,\n\n ) -> SignalHandlerId {\n\n unsafe extern \"C\" fn unlinked_trampoline<P, F: Fn(&P, &Pad) + Send + Sync + 'static>(\n\n this: *mut gst_sys::GstPad,\n\n peer: *mut gst_sys::GstPad,\n\n f: glib_sys::gpointer,\n\n ) where\n\n P: IsA<Pad>,\n\n {\n\n let f: &F = &*(f as *const F);\n\n f(\n\n &Pad::from_glib_borrow(this).unsafe_cast_ref(),\n\n &from_glib_borrow(peer),\n\n )\n\n }\n", "file_path": "gstreamer/src/auto/pad.rs", "rank": 99, "score": 50.10235056549591 } ]
Rust
src/tests/conformance/invalid_message/reject.rs
ljedrz/ziggurat
718b5f090c0c2642dcebd0636de7fc7b3c8b844d
use std::{io, time::Duration}; use crate::{ protocol::{ message::Message, payload::{block::Block, reject::CCode, FilterAdd, FilterLoad, Inv, Version}, }, setup::node::{Action, Node}, tools::synthetic_node::{PingPongError, SyntheticNode}, }; #[tokio::test] async fn version_post_handshake() { let version = Message::Version(Version::new( "0.0.0.0:0".parse().unwrap(), "0.0.0.0:0".parse().unwrap(), )); run_test_case(version, CCode::Duplicate).await.unwrap(); } #[tokio::test] async fn verack_post_handshake() { run_test_case(Message::Verack, CCode::Duplicate) .await .unwrap(); } #[tokio::test] async fn mixed_inventory() { let genesis_block = Block::testnet_genesis(); let mixed_inv = vec![genesis_block.inv_hash(), genesis_block.txs[0].inv_hash()]; run_test_case(Message::Inv(Inv::new(mixed_inv)), CCode::Invalid) .await .unwrap(); } #[tokio::test] async fn multi_block_inventory() { let multi_block_inv = vec![ Block::testnet_genesis().inv_hash(), Block::testnet_1().inv_hash(), Block::testnet_2().inv_hash(), ]; run_test_case(Message::Inv(Inv::new(multi_block_inv)), CCode::Invalid) .await .unwrap(); } #[tokio::test] async fn bloom_filter_add() { run_test_case(Message::FilterAdd(FilterAdd::default()), CCode::Obsolete) .await .unwrap(); } #[tokio::test] async fn bloom_filter_load() { run_test_case(Message::FilterLoad(FilterLoad::default()), CCode::Obsolete) .await .unwrap(); } #[tokio::test] async fn bloom_filter_clear() { run_test_case(Message::FilterClear, CCode::Obsolete) .await .unwrap(); } async fn run_test_case(message: Message, expected_code: CCode) -> io::Result<()> { const RECV_TIMEOUT: Duration = Duration::from_secs(1); let mut node = Node::new()?; node.initial_action(Action::WaitForConnection) .start() .await?; let mut synthetic_node = SyntheticNode::builder() .with_full_handshake() .with_all_auto_reply() .build() .await?; synthetic_node.connect(node.addr()).await?; synthetic_node.send_direct_message(node.addr(), message)?; let result = match synthetic_node .ping_pong_timeout(node.addr(), RECV_TIMEOUT) .await { Ok(_) => Err(io::Error::new(io::ErrorKind::Other, "Message was ignored")), Err(PingPongError::Unexpected(msg)) => match *msg { Message::Reject(reject) if reject.ccode == expected_code => Ok(()), Message::Reject(reject) => { return Err(io::Error::new( io::ErrorKind::Other, format!( "Incorrect rejection ccode: {:?} instead of {:?}", reject.ccode, expected_code ), )) } unexpected => { return Err(io::Error::new( io::ErrorKind::Other, format!("Unexpected message received: {:?}", unexpected), )) } }, Err(err) => Err(err.into()), }; synthetic_node.shut_down(); node.stop()?; result }
use std::{io, time::Duration}; use crate::{ protocol::{ message::Message, payload::{block::Block, reject::CCode, FilterAdd, FilterLoad, Inv, Version}, }, setup::node::{Action, Node}, tools::synthetic_node::{PingPongError, SyntheticNode}, }; #[tokio::test] async fn version_post_handshake() { let version = Message::Version(Version::new( "0.0.0.0:0".parse().unwrap(), "0.0.0.0
T) .await { Ok(_) => Err(io::Error::new(io::ErrorKind::Other, "Message was ignored")), Err(PingPongError::Unexpected(msg)) => match *msg { Message::Reject(reject) if reject.ccode == expected_code => Ok(()), Message::Reject(reject) => { return Err(io::Error::new( io::ErrorKind::Other, format!( "Incorrect rejection ccode: {:?} instead of {:?}", reject.ccode, expected_code ), )) } unexpected => { return Err(io::Error::new( io::ErrorKind::Other, format!("Unexpected message received: {:?}", unexpected), )) } }, Err(err) => Err(err.into()), }; synthetic_node.shut_down(); node.stop()?; result }
:0".parse().unwrap(), )); run_test_case(version, CCode::Duplicate).await.unwrap(); } #[tokio::test] async fn verack_post_handshake() { run_test_case(Message::Verack, CCode::Duplicate) .await .unwrap(); } #[tokio::test] async fn mixed_inventory() { let genesis_block = Block::testnet_genesis(); let mixed_inv = vec![genesis_block.inv_hash(), genesis_block.txs[0].inv_hash()]; run_test_case(Message::Inv(Inv::new(mixed_inv)), CCode::Invalid) .await .unwrap(); } #[tokio::test] async fn multi_block_inventory() { let multi_block_inv = vec![ Block::testnet_genesis().inv_hash(), Block::testnet_1().inv_hash(), Block::testnet_2().inv_hash(), ]; run_test_case(Message::Inv(Inv::new(multi_block_inv)), CCode::Invalid) .await .unwrap(); } #[tokio::test] async fn bloom_filter_add() { run_test_case(Message::FilterAdd(FilterAdd::default()), CCode::Obsolete) .await .unwrap(); } #[tokio::test] async fn bloom_filter_load() { run_test_case(Message::FilterLoad(FilterLoad::default()), CCode::Obsolete) .await .unwrap(); } #[tokio::test] async fn bloom_filter_clear() { run_test_case(Message::FilterClear, CCode::Obsolete) .await .unwrap(); } async fn run_test_case(message: Message, expected_code: CCode) -> io::Result<()> { const RECV_TIMEOUT: Duration = Duration::from_secs(1); let mut node = Node::new()?; node.initial_action(Action::WaitForConnection) .start() .await?; let mut synthetic_node = SyntheticNode::builder() .with_full_handshake() .with_all_auto_reply() .build() .await?; synthetic_node.connect(node.addr()).await?; synthetic_node.send_direct_message(node.addr(), message)?; let result = match synthetic_node .ping_pong_timeout(node.addr(), RECV_TIMEOU
random
[ { "content": "/// Enables tracing for all [`SyntheticNode`] instances (usually scoped by test).\n\npub fn enable_tracing() {\n\n use tracing_subscriber::{fmt, EnvFilter};\n\n\n\n fmt()\n\n .with_test_writer()\n\n .with_env_filter(EnvFilter::from_default_env())\n\n .init();\n\n}\n\n\n\n/// Describes the handshake to be performed by a [`SyntheticNode`].\n\n#[derive(Debug, Clone, Copy, PartialEq)]\n\npub enum Handshake {\n\n /// [`Version`] and [`Verack`] in both directions.\n\n ///\n\n /// [`Version`]: enum@crate::protocol::message::Message::Version\n\n /// [`Verack`]: enum@crate::protocol::message::Message::Verack\n\n Full,\n\n /// Only [`Version`] messages are exchanged.\n\n ///\n\n /// [`Version`]: enum@crate::protocol::message::Message::Version\n", "file_path": "src/tools/synthetic_node.rs", "rank": 0, "score": 88674.83253912914 }, { "content": "fn checksum(bytes: &[u8]) -> u32 {\n\n let sha2 = Sha256::digest(bytes);\n\n let sha2d = Sha256::digest(&sha2);\n\n\n\n let mut checksum = [0u8; 4];\n\n checksum[0..4].copy_from_slice(&sha2d[0..4]);\n\n\n\n u32::from_le_bytes(checksum)\n\n}\n", "file_path": "src/protocol/message/mod.rs", "rank": 1, "score": 82634.77232783013 }, { "content": "//! Inventory vector types.\n\n\n\nuse crate::protocol::payload::{codec::Codec, read_n_bytes, Hash};\n\n\n\nuse std::io::{self, Cursor, Write};\n\n\n\n/// An inventory vector.\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct Inv {\n\n pub inventory: Vec<InvHash>,\n\n}\n\n\n\nimpl Inv {\n\n /// Returns a new inventory vector from the supplied hashes.\n\n pub fn new(inventory: Vec<InvHash>) -> Self {\n\n Self { inventory }\n\n }\n\n\n\n // Returns a new empty inventory vector.\n\n pub fn empty() -> Self {\n", "file_path": "src/protocol/payload/inv.rs", "rank": 2, "score": 76909.65764770178 }, { "content": " kind: ObjectKind,\n\n /// The hash of the object.\n\n hash: Hash,\n\n}\n\n\n\nimpl InvHash {\n\n /// Returns a new `InvHash` instance.\n\n pub fn new(kind: ObjectKind, hash: Hash) -> Self {\n\n Self { kind, hash }\n\n }\n\n}\n\n\n\nimpl Codec for InvHash {\n\n fn encode(&self, buffer: &mut Vec<u8>) -> io::Result<()> {\n\n self.kind.encode(buffer)?;\n\n self.hash.encode(buffer)?;\n\n\n\n Ok(())\n\n }\n\n\n", "file_path": "src/protocol/payload/inv.rs", "rank": 3, "score": 76899.1699758537 }, { "content": " Self::new(Vec::new())\n\n }\n\n}\n\n\n\nimpl Codec for Inv {\n\n fn encode(&self, buffer: &mut Vec<u8>) -> io::Result<()> {\n\n self.inventory.encode(buffer)\n\n }\n\n\n\n fn decode(bytes: &mut Cursor<&[u8]>) -> io::Result<Self> {\n\n Ok(Self {\n\n inventory: Vec::decode(bytes)?,\n\n })\n\n }\n\n}\n\n\n\n/// An inventory hash.\n\n#[derive(Debug, PartialEq, Copy, Clone)]\n\npub struct InvHash {\n\n /// The object type linked to this inventory.\n", "file_path": "src/protocol/payload/inv.rs", "rank": 4, "score": 76898.68700952955 }, { "content": "\n\nimpl Codec for ObjectKind {\n\n fn encode(&self, buffer: &mut Vec<u8>) -> io::Result<()> {\n\n let value: u32 = match self {\n\n Self::Error => 0,\n\n Self::Tx => 1,\n\n Self::Block => 2,\n\n Self::FilteredBlock => 3,\n\n };\n\n\n\n buffer.write_all(&value.to_le_bytes())?;\n\n\n\n Ok(())\n\n }\n\n\n\n fn decode(bytes: &mut Cursor<&[u8]>) -> io::Result<Self> {\n\n let value = u32::from_le_bytes(read_n_bytes(bytes)?);\n\n\n\n let kind = match value {\n\n 0 => Self::Error,\n", "file_path": "src/protocol/payload/inv.rs", "rank": 5, "score": 76894.36974516333 }, { "content": " 1 => Self::Tx,\n\n 2 => Self::Block,\n\n 3 => Self::FilteredBlock,\n\n _ => {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n \"ObjectKind is not known\",\n\n ))\n\n }\n\n };\n\n\n\n Ok(kind)\n\n }\n\n}\n", "file_path": "src/protocol/payload/inv.rs", "rank": 6, "score": 76894.36974516333 }, { "content": " fn decode(bytes: &mut Cursor<&[u8]>) -> io::Result<Self> {\n\n let kind = ObjectKind::decode(bytes)?;\n\n let hash = Hash::decode(bytes)?;\n\n\n\n Ok(Self { kind, hash })\n\n }\n\n}\n\n\n\n/// The inventory object kind.\n\n#[derive(Debug, PartialEq, Clone, Copy)]\n\npub enum ObjectKind {\n\n /// Any data of this kind may be ignored.\n\n Error,\n\n /// The hash is that of a transaction.\n\n Tx,\n\n /// The hash is that of a block.\n\n Block,\n\n /// The hash is that of a block header.\n\n FilteredBlock,\n\n}\n", "file_path": "src/protocol/payload/inv.rs", "rank": 7, "score": 76894.36974516333 }, { "content": "//! Version payload types.\n\n\n\nuse crate::protocol::payload::{\n\n addr::NetworkAddr, codec::Codec, read_n_bytes, read_timestamp, Nonce, ProtocolVersion, VarStr,\n\n};\n\n\n\nuse chrono::{DateTime, Utc};\n\n\n\nuse std::{\n\n io::{self, Cursor, Write},\n\n net::SocketAddr,\n\n};\n\n\n\n/// A version payload.\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct Version {\n\n /// The protocol version of the sender.\n\n pub version: ProtocolVersion,\n\n /// The services supported by the sender.\n\n pub services: u64,\n", "file_path": "src/protocol/payload/version.rs", "rank": 8, "score": 76631.57645320489 }, { "content": "\n\n /// Sets the protocol version.\n\n pub fn with_version(mut self, version: u32) -> Self {\n\n self.version = ProtocolVersion(version);\n\n self\n\n }\n\n}\n\n\n\nimpl Codec for Version {\n\n fn encode(&self, buffer: &mut Vec<u8>) -> io::Result<()> {\n\n self.version.encode(buffer)?;\n\n buffer.write_all(&self.services.to_le_bytes())?;\n\n buffer.write_all(&self.timestamp.timestamp().to_le_bytes())?;\n\n\n\n self.addr_recv.encode_without_timestamp(buffer)?;\n\n self.addr_from.encode_without_timestamp(buffer)?;\n\n\n\n self.nonce.encode(buffer)?;\n\n self.user_agent.encode(buffer)?;\n\n buffer.write_all(&self.start_height.to_le_bytes())?;\n", "file_path": "src/protocol/payload/version.rs", "rank": 9, "score": 76622.98226275518 }, { "content": " Self {\n\n version: ProtocolVersion::current(),\n\n services: 1,\n\n timestamp: Utc::now(),\n\n addr_recv: NetworkAddr {\n\n last_seen: None,\n\n services: 1,\n\n addr: addr_recv,\n\n },\n\n addr_from: NetworkAddr {\n\n last_seen: None,\n\n services: 1,\n\n addr: addr_from,\n\n },\n\n nonce: Nonce::default(),\n\n user_agent: VarStr(String::from(\"\")),\n\n start_height: 0,\n\n relay: false,\n\n }\n\n }\n", "file_path": "src/protocol/payload/version.rs", "rank": 10, "score": 76621.33337235262 }, { "content": " /// The timestamp of the message.\n\n pub timestamp: DateTime<Utc>,\n\n /// The receiving address of the message.\n\n pub addr_recv: NetworkAddr,\n\n /// The sender of the message.\n\n pub addr_from: NetworkAddr,\n\n /// The nonce associated with this message.\n\n pub nonce: Nonce,\n\n /// The user agent of the sender.\n\n pub user_agent: VarStr,\n\n /// The start last block received by the sender.\n\n pub start_height: u32,\n\n /// Specifies if the receiver should relay transactions.\n\n pub relay: bool,\n\n}\n\n\n\nimpl Version {\n\n /// Constructs a `Version`, where `addr_recv` is the remote `zcashd`/`zebra` node address and\n\n /// `addr_from` is our local node address.\n\n pub fn new(addr_recv: SocketAddr, addr_from: SocketAddr) -> Self {\n", "file_path": "src/protocol/payload/version.rs", "rank": 11, "score": 76620.85366267445 }, { "content": " buffer.write_all(&[self.relay as u8])?;\n\n\n\n Ok(())\n\n }\n\n\n\n fn decode(bytes: &mut Cursor<&[u8]>) -> io::Result<Self> {\n\n let version = ProtocolVersion::decode(bytes)?;\n\n let services = u64::from_le_bytes(read_n_bytes(bytes)?);\n\n let timestamp = read_timestamp(bytes)?;\n\n\n\n let addr_recv = NetworkAddr::decode_without_timestamp(bytes)?;\n\n let addr_from = NetworkAddr::decode_without_timestamp(bytes)?;\n\n\n\n let nonce = Nonce::decode(bytes)?;\n\n let user_agent = VarStr::decode(bytes)?;\n\n\n\n let start_height = u32::from_le_bytes(read_n_bytes(bytes)?);\n\n let relay = u8::from_le_bytes(read_n_bytes(bytes)?) != 0;\n\n\n\n Ok(Self {\n", "file_path": "src/protocol/payload/version.rs", "rank": 12, "score": 76620.26353651697 }, { "content": " version,\n\n services,\n\n timestamp,\n\n addr_recv,\n\n addr_from,\n\n nonce,\n\n user_agent,\n\n start_height,\n\n relay,\n\n })\n\n }\n\n}\n", "file_path": "src/protocol/payload/version.rs", "rank": 13, "score": 76617.76490791196 }, { "content": "/// Reads a timestamp from the bytes.\n\npub fn read_timestamp(bytes: &mut Cursor<&[u8]>) -> io::Result<DateTime<Utc>> {\n\n let timestamp_i64 = i64::from_le_bytes(read_n_bytes(bytes)?);\n\n let timestamp = NaiveDateTime::from_timestamp_opt(timestamp_i64, 0)\n\n .ok_or_else(|| io::Error::new(io::ErrorKind::InvalidData, \"Bad UTC timestamp\"))?;\n\n Ok(DateTime::<Utc>::from_utc(timestamp, Utc))\n\n}\n", "file_path": "src/protocol/payload/mod.rs", "rank": 14, "score": 65524.55485209823 }, { "content": "/// Reads `n` bytes from the bytes.\n\npub fn read_n_bytes<const N: usize>(bytes: &mut Cursor<&[u8]>) -> io::Result<[u8; N]> {\n\n let mut buffer = [0u8; N];\n\n bytes.read_exact(&mut buffer)?;\n\n\n\n Ok(buffer)\n\n}\n\n\n", "file_path": "src/protocol/payload/mod.rs", "rank": 15, "score": 61743.52774038093 }, { "content": "/// Removes all previously registered metrics.\n\npub fn clear() {\n\n SIMPLE_RECORDER.counters.lock().clear();\n\n SIMPLE_RECORDER.gauges.lock().clear();\n\n SIMPLE_RECORDER.histograms.lock().clear();\n\n}\n", "file_path": "src/tools/metrics/recorder.rs", "rank": 16, "score": 54149.62999035849 }, { "content": "/// Corrupts `n` messages from the supplied set by replacing a random number of bytes with random bytes.\n\npub fn encode_slightly_corrupted_messages(\n\n rng: &mut ChaCha8Rng,\n\n n: usize,\n\n messages: &[Message],\n\n) -> Vec<Vec<u8>> {\n\n (0..n)\n\n .map(|_| {\n\n let message = messages.choose(rng).unwrap();\n\n corrupt_message(rng, message)\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/tools/fuzzing.rs", "rank": 17, "score": 52138.020398127774 }, { "content": "/// Picks `n` random messages from `message_pool`, encodes them and corrupts the checksum bytes.\n\npub fn encode_messages_with_corrupt_checksum(\n\n rng: &mut ChaCha8Rng,\n\n n: usize,\n\n message_pool: &[Message],\n\n) -> Vec<Vec<u8>> {\n\n (0..n)\n\n .map(|_| {\n\n let message = message_pool.choose(rng).unwrap();\n\n\n\n encode_message_with_corrupt_checksum(rng, message)\n\n })\n\n .collect()\n\n}\n", "file_path": "src/tools/fuzzing.rs", "rank": 18, "score": 52138.020398127774 }, { "content": "/// Returns a message with a valid header and payload of random bytes.\n\npub fn metadata_compliant_random_bytes(\n\n rng: &mut ChaCha8Rng,\n\n n: usize,\n\n commands: &[[u8; 12]],\n\n) -> Vec<Vec<u8>> {\n\n (0..n)\n\n .map(|_| {\n\n let random_len: usize = rng.gen_range(1..(64 * 1024));\n\n let mut random_payload: Vec<u8> = rng.sample_iter(Standard).take(random_len).collect();\n\n\n\n let command = commands.choose(rng).unwrap();\n\n let header = MessageHeader::new(*command, &random_payload);\n\n\n\n let mut buffer = Vec::with_capacity(HEADER_LEN + random_payload.len());\n\n header.encode(&mut buffer).unwrap();\n\n buffer.append(&mut random_payload);\n\n\n\n buffer\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/tools/fuzzing.rs", "rank": 19, "score": 52138.020398127774 }, { "content": "/// Picks `n` random messages from `message_pool`, encodes them and corrupts the body length bytes.\n\npub fn encode_messages_with_corrupt_body_length(\n\n rng: &mut ChaCha8Rng,\n\n n: usize,\n\n message_pool: &[Message],\n\n) -> Vec<Vec<u8>> {\n\n (0..n)\n\n .map(|_| {\n\n let message = message_pool.choose(rng).unwrap();\n\n\n\n encode_message_with_corrupt_body_length(rng, message)\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/tools/fuzzing.rs", "rank": 20, "score": 51212.47169086785 }, { "content": "/// Returns a randomly seeded `ChaCha8Rng` instance, useful for making tests reproducible.\n\npub fn seeded_rng() -> ChaCha8Rng {\n\n let mut seed: <ChaCha8Rng as SeedableRng>::Seed = Default::default();\n\n thread_rng().fill(&mut seed);\n\n\n\n // We print the seed for reproducibility.\n\n println!(\"Seed for RNG: {:?}\", seed);\n\n\n\n // Isn't cryptographically secure but adequate enough as a general source of seeded randomness.\n\n ChaCha8Rng::from_seed(seed)\n\n}\n\n\n", "file_path": "src/tools/fuzzing.rs", "rank": 21, "score": 49600.932016930696 }, { "content": "/// Returns the set of messages used for fuzz-testing.\n\n/// This notably excludes [`Message::Version`] because it is\n\n/// usually tested separately.\n\npub fn default_fuzz_messages() -> Vec<Message> {\n\n vec![\n\n Message::Version(Version::new(\n\n SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), 0),\n\n SocketAddr::new(IpAddr::V4(Ipv4Addr::LOCALHOST), 0),\n\n )),\n\n Message::MemPool,\n\n Message::Verack,\n\n Message::Ping(Nonce::default()),\n\n Message::Pong(Nonce::default()),\n\n Message::GetAddr,\n\n Message::Addr(Addr::empty()),\n\n Message::Headers(Headers::empty()),\n\n Message::GetHeaders(LocatorHashes::empty()),\n\n Message::GetBlocks(LocatorHashes::empty()),\n\n Message::GetData(Inv::empty()),\n\n Message::Inv(Inv::empty()),\n\n Message::NotFound(Inv::empty()),\n\n ]\n\n}\n\n\n", "file_path": "src/tools/fuzzing.rs", "rank": 22, "score": 48264.695422560624 }, { "content": "// A list of valid queries and their expected responses\n\n//\n\n// This list is intentionally kept small - only simple and working\n\n// query / response pairs are implemented.\n\nfn valid_queries_responses() -> Vec<(Message, Message)> {\n\n let nonce = Nonce::default();\n\n\n\n let block_1 = Block::testnet_1();\n\n let block_2 = Block::testnet_2();\n\n\n\n vec![\n\n (Message::Ping(nonce), Message::Pong(nonce)),\n\n (\n\n Message::GetHeaders(LocatorHashes::new(\n\n vec![block_1.double_sha256().unwrap()],\n\n Hash::zeroed(),\n\n )),\n\n Message::Headers(Headers::new(vec![block_2.header.clone()])),\n\n ),\n\n (\n\n Message::GetBlocks(LocatorHashes::new(\n\n vec![block_1.double_sha256().unwrap()],\n\n Hash::zeroed(),\n\n )),\n\n Message::Inv(Inv::new(vec![block_2.inv_hash()])),\n\n ),\n\n (\n\n Message::GetData(Inv::new(vec![block_1.inv_hash()])),\n\n Message::Block(Box::new(block_1)),\n\n ),\n\n ]\n\n}\n\n\n", "file_path": "src/tests/resistance/stress_test.rs", "rank": 23, "score": 46545.537337340014 }, { "content": "/// Formats `f64` with 2 decimal points.\n\npub fn table_float_display(x: &f64) -> String {\n\n format!(\"{0:.2}\", x)\n\n}\n\n\n", "file_path": "src/tools/metrics/tables.rs", "rank": 24, "score": 46249.0359243533 }, { "content": "/// Formats a table with our custom style.\n\n///\n\n/// Modifications:\n\n/// - [pseudo style](https://docs.rs/tabled/0.2.1/tabled/style/struct.Style.html#method.pseudo)\n\n/// - centered headers\n\n/// - right aligned data\n\npub fn fmt_table(table: Table) -> String {\n\n // table with pseudo style, right aligned data and center aligned headers\n\n table\n\n .with(Style::pseudo())\n\n .with(Modify::new(tabled::Full).with(Alignment::right()))\n\n .with(Modify::new(tabled::Head).with(Alignment::center_horizontal()))\n\n .to_string()\n\n}\n", "file_path": "src/tools/metrics/tables.rs", "rank": 25, "score": 46249.0359243533 }, { "content": "/// Returns the duration converted to milliseconds.\n\npub fn duration_as_ms(duration: Duration) -> f64 {\n\n duration.as_millis() as f64\n\n}\n\n\n", "file_path": "src/tools/metrics/tables.rs", "rank": 26, "score": 46249.0359243533 }, { "content": "#[derive(Clone)]\n\nstruct InnerNode {\n\n node: Node,\n\n handshake: Option<Handshake>,\n\n inbound_tx: Sender<(SocketAddr, Message)>,\n\n message_filter: MessageFilter,\n\n}\n\n\n\nimpl InnerNode {\n\n fn new(\n\n node: Node,\n\n tx: Sender<(SocketAddr, Message)>,\n\n message_filter: MessageFilter,\n\n handshake: Option<Handshake>,\n\n ) -> Self {\n\n let node = Self {\n\n node,\n\n inbound_tx: tx,\n\n message_filter,\n\n handshake,\n\n };\n", "file_path": "src/tools/synthetic_node.rs", "rank": 27, "score": 45976.89926849906 }, { "content": "/// Enables the [`SimpleRecorder`] as the\n\n/// [`metrics::Recorder`](https://docs.rs/metrics/0.16.0/metrics/trait.Recorder.html) sink.\n\npub fn enable_simple_recorder() -> Result<(), SetRecorderError> {\n\n // FIXME: This is a work-around while we don't have a test-runner\n\n // which can set this globally. Currently we are calling this\n\n // from every test which requires metrics. This will cause an\n\n // error when called multiple times.\n\n //\n\n // The correct implementation will pass on the result of metric::set_recorder\n\n // instead of masking it.\n\n let _ = metrics::set_recorder(&*SIMPLE_RECORDER);\n\n Ok(())\n\n}\n\n\n", "file_path": "src/tools/metrics/recorder.rs", "rank": 28, "score": 45753.323718124186 }, { "content": "/// Map of all gauges recorded.\n\npub fn gauges() -> Arc<Mutex<HashMap<Key, Gauge>>> {\n\n SIMPLE_RECORDER.gauges.clone()\n\n}\n\n\n", "file_path": "src/tools/metrics/recorder.rs", "rank": 29, "score": 42824.165839059 }, { "content": "/// Map of all histograms recorded.\n\npub fn histograms() -> Arc<Mutex<HashMap<Key, Histogram>>> {\n\n SIMPLE_RECORDER.histograms.clone()\n\n}\n\n\n", "file_path": "src/tools/metrics/recorder.rs", "rank": 30, "score": 42824.165839059 }, { "content": "/// Map of all counters recorded.\n\npub fn counters() -> Arc<Mutex<HashMap<Key, Counter>>> {\n\n SIMPLE_RECORDER.counters.clone()\n\n}\n\n\n", "file_path": "src/tools/metrics/recorder.rs", "rank": 31, "score": 42824.165839059 }, { "content": "/// Returns a random u32 which isn't the supplied value.\n\nfn random_non_valid_u32(rng: &mut ChaCha8Rng, value: u32) -> u32 {\n\n // Make sure the generated value isn't the same.\n\n let random_value = rng.gen();\n\n if value != random_value {\n\n random_value\n\n } else {\n\n random_value + 1\n\n }\n\n}\n\n\n", "file_path": "src/tools/fuzzing.rs", "rank": 32, "score": 40665.57879711718 }, { "content": "fn corrupt_message(rng: &mut ChaCha8Rng, message: &Message) -> Vec<u8> {\n\n let mut message_buffer = vec![];\n\n let header = message.encode(&mut message_buffer).unwrap();\n\n let mut header_buffer = vec![];\n\n header.encode(&mut header_buffer).unwrap();\n\n\n\n let mut corrupted_header = corrupt_bytes(rng, &header_buffer);\n\n let mut corrupted_message = corrupt_bytes(rng, &message_buffer);\n\n\n\n corrupted_header.append(&mut corrupted_message);\n\n\n\n // Contains header + message.\n\n corrupted_header\n\n}\n\n\n", "file_path": "src/tools/fuzzing.rs", "rank": 33, "score": 40623.199209210725 }, { "content": "fn corrupt_bytes(rng: &mut ChaCha8Rng, serialized: &[u8]) -> Vec<u8> {\n\n serialized\n\n .iter()\n\n .map(|byte| {\n\n if rng.gen_bool(CORRUPTION_PROBABILITY) {\n\n rng.gen()\n\n } else {\n\n *byte\n\n }\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/tools/fuzzing.rs", "rank": 34, "score": 40623.199209210725 }, { "content": "//! High level APIs and types for node setup and teardown.\n\n\n\nuse tracing::error;\n\n\n\nuse crate::{\n\n protocol::payload::{\n\n block::{Block, Headers},\n\n Hash, Inv,\n\n },\n\n setup::config::{NodeConfig, NodeKind, NodeMetaData, ZcashdConfigFile, ZebraConfigFile},\n\n tools::{\n\n message_filter::{Filter, MessageFilter},\n\n synthetic_node::SyntheticNode,\n\n },\n\n wait_until,\n\n};\n\n\n\nuse std::process::{Child, Command};\n\n\n\nuse std::{fs, io, net::SocketAddr, process::Stdio, time::Duration};\n", "file_path": "src/setup/node.rs", "rank": 35, "score": 40078.111238943704 }, { "content": " }\n\n\n\n Ok(())\n\n }\n\n\n\n async fn perform_initial_action(&self, mut synthetic_node: SyntheticNode) -> io::Result<()> {\n\n const TIMEOUT: Duration = Duration::from_secs(10);\n\n\n\n match self.config.initial_action {\n\n Action::None => {}\n\n Action::WaitForConnection => {\n\n // The synthetic node will accept the connection and handshake by itself.\n\n wait_until!(TIMEOUT, synthetic_node.num_connected() == 1);\n\n }\n\n Action::SeedWithTestnetBlocks(_) if self.meta.kind == NodeKind::Zebra => {\n\n unimplemented!(\"zebra doesn't support block seeding\");\n\n }\n\n Action::SeedWithTestnetBlocks(block_count) => {\n\n use crate::protocol::message::Message;\n\n\n", "file_path": "src/setup/node.rs", "rank": 36, "score": 40072.71966662495 }, { "content": " self.generate_config_file()?;\n\n\n\n let (stdout, stderr) = match self.config.log_to_stdout {\n\n true => (Stdio::inherit(), Stdio::inherit()),\n\n false => (Stdio::null(), Stdio::null()),\n\n };\n\n\n\n let process = Command::new(&self.meta.start_command)\n\n .current_dir(&self.meta.path)\n\n .args(&self.meta.start_args)\n\n .stdin(Stdio::null())\n\n .stdout(stdout)\n\n .stderr(stderr)\n\n .spawn()\n\n .expect(\"node failed to start\");\n\n\n\n self.process = Some(process);\n\n\n\n if let Some(synthetic_node) = synthetic_node {\n\n self.perform_initial_action(synthetic_node).await?;\n", "file_path": "src/setup/node.rs", "rank": 37, "score": 40066.01906016581 }, { "content": " let expected = Inv::new(inv_hashes);\n\n assert_eq!(inv, expected);\n\n\n\n // Send the blocks\n\n for block in blocks {\n\n synthetic_node\n\n .send_direct_message(source, Message::Block(Box::new(block)))?;\n\n }\n\n }\n\n\n\n (_, msg) => {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n format!(\"Expected GetData but got: {:?}\", msg),\n\n ))\n\n }\n\n }\n\n\n\n // Check that the node has received and processed all previous messages.\n\n synthetic_node.ping_pong_timeout(source, TIMEOUT).await?;\n", "file_path": "src/setup/node.rs", "rank": 38, "score": 40065.18701924449 }, { "content": " let headers = blocks.iter().map(|block| block.header.clone()).collect();\n\n synthetic_node\n\n .send_direct_message(source, Message::Headers(Headers::new(headers)))?;\n\n\n\n source\n\n }\n\n\n\n (_, msg) => {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n format!(\"Expected GetHeaders but got: {:?}\", msg),\n\n ));\n\n }\n\n };\n\n\n\n // respond to GetData(inv) for the initial blocks\n\n match synthetic_node.recv_message_timeout(TIMEOUT).await? {\n\n (source, Message::GetData(inv)) => {\n\n // The request must be for the initial blocks\n\n let inv_hashes = blocks.iter().map(|block| block.inv_hash()).collect();\n", "file_path": "src/setup/node.rs", "rank": 39, "score": 40064.60343917083 }, { "content": "\n\n/// Represents an instance of a node, its configuration and setup/teardown intricacies.\n\npub struct Node {\n\n /// Configuration definable in tests and written to the node's configuration file on start.\n\n config: NodeConfig,\n\n /// Type, path to binary, various commands for starting, stopping, cleanup, network\n\n /// configuration.\n\n meta: NodeMetaData,\n\n /// Process of the running node.\n\n process: Option<Child>,\n\n}\n\n\n\nimpl Node {\n\n /// Creates a new [`Node`] instance.\n\n ///\n\n /// Once created, it can be configured with calls to [`initial_peers`], [`max_peers`] and [`log_to_stdout`].\n\n ///\n\n /// [`Node`]: struct@Node\n\n /// [`NodeMetaData`]: struct@crate::setup::config::NodeMetaData\n\n /// [`initial_peers`]: method@Node::initial_peers\n", "file_path": "src/setup/node.rs", "rank": 40, "score": 40064.26787519264 }, { "content": "\n\n/// Actions to prepare node state on start.\n\npub enum Action {\n\n /// Performs no action\n\n None,\n\n /// Waits for the node to connect to a local listener, connection is then terminated.\n\n /// This is useful for indicating that the node has started and is available for\n\n /// other connections.\n\n WaitForConnection,\n\n /// Seeds the node with `n` blocks from the testnet chain, by connecting from a local socket\n\n /// and sending the appropriate data. After this, the connection is terminated.\n\n ///\n\n /// **Warning**: this currently only works for zcashd type nodes, for zebra the behaviour\n\n /// is equivalent to WaitForConnection.\n\n SeedWithTestnetBlocks(\n\n /// The number of initial testnet blocks to seed. Note that this is capped by the number of blocks available\n\n /// from [Block::initial_testnet_blocks].\n\n usize,\n\n ),\n\n}\n", "file_path": "src/setup/node.rs", "rank": 41, "score": 40063.78285099955 }, { "content": "\n\n /// Sets the initial action to undertake once the node has started. See [`Action`] for more\n\n /// information on what the actions pertain.\n\n pub fn initial_action(&mut self, action: Action) -> &mut Self {\n\n self.config.initial_action = action;\n\n self\n\n }\n\n\n\n /// Starts the node instance.\n\n ///\n\n /// This function will write the appropriate configuration file and run the start command\n\n /// provided in `config.toml`.\n\n pub async fn start(&mut self) -> io::Result<()> {\n\n // cleanup any previous runs (node.stop won't always be reached e.g. test panics, or SIGINT)\n\n self.cleanup()?;\n\n\n\n // Setup the listener if there is some initial action required\n\n let synthetic_node = match self.config.initial_action {\n\n Action::None => None,\n\n Action::WaitForConnection | Action::SeedWithTestnetBlocks(_) => {\n", "file_path": "src/setup/node.rs", "rank": 42, "score": 40063.2397310248 }, { "content": " ///\n\n /// The ip used to construct the addresses can be optionally set in the configuration file and\n\n /// otherwise defaults to localhost.\n\n pub fn initial_peers(&mut self, peers: Vec<SocketAddr>) -> &mut Self {\n\n self.config.initial_peers = peers.iter().map(|addr| format!(\"{}\", addr)).collect();\n\n\n\n self\n\n }\n\n\n\n /// Sets the maximum connection value for the node.\n\n pub fn max_peers(&mut self, max_peers: usize) -> &mut Self {\n\n self.config.max_peers = max_peers;\n\n self\n\n }\n\n\n\n /// Sets whether to log the node's output to Ziggurat's output stream.\n\n pub fn log_to_stdout(&mut self, log_to_stdout: bool) -> &mut Self {\n\n self.config.log_to_stdout = log_to_stdout;\n\n self\n\n }\n", "file_path": "src/setup/node.rs", "rank": 43, "score": 40062.59034706937 }, { "content": " /// [`max_peers`]: method@Node::max_peers\n\n /// [`log_to_stdout`]: method@Node::log_to_stdout\n\n pub fn new() -> io::Result<Self> {\n\n // Config (to be written to node configuration file).\n\n let config = NodeConfig::new()?;\n\n let meta = NodeMetaData::new(config.path.clone())?;\n\n\n\n Ok(Self {\n\n config,\n\n meta,\n\n process: None,\n\n })\n\n }\n\n\n\n /// Returns the (external) address of the node.\n\n pub fn addr(&self) -> SocketAddr {\n\n self.config.local_addr\n\n }\n\n\n\n /// Sets the initial peers (ports only) for the node.\n", "file_path": "src/setup/node.rs", "rank": 44, "score": 40061.21667733004 }, { "content": " // Start a synthetic node to perform the initial actions.\n\n let synthetic_node = SyntheticNode::builder()\n\n .with_full_handshake()\n\n .with_message_filter(\n\n MessageFilter::with_all_auto_reply()\n\n .with_getheaders_filter(Filter::Disabled)\n\n .with_getdata_filter(Filter::Disabled),\n\n )\n\n .build()\n\n .await?;\n\n\n\n self.config\n\n .initial_peers\n\n .insert(synthetic_node.listening_addr().to_string());\n\n\n\n Some(synthetic_node)\n\n }\n\n };\n\n\n\n // Generate config files for Zebra or Zcashd node.\n", "file_path": "src/setup/node.rs", "rank": 45, "score": 40061.20310650398 }, { "content": " }\n\n }\n\n\n\n // Setup is complete, we no longer require this synthetic node.\n\n synthetic_node.shut_down();\n\n\n\n Ok(())\n\n }\n\n\n\n /// Stops the node instance.\n\n ///\n\n /// The stop command will only be run if provided in the `config.toml` file as it may not be\n\n /// necessary to shutdown a node (killing the process is sometimes sufficient).\n\n pub fn stop(&mut self) -> io::Result<()> {\n\n if let Some(mut child) = self.process.take() {\n\n // Stop node process, and check for crash\n\n // (needs to happen before cleanup)\n\n let crashed = match child.try_wait()? {\n\n None => {\n\n child.kill()?;\n", "file_path": "src/setup/node.rs", "rank": 46, "score": 40061.014323105985 }, { "content": "\n\nimpl Drop for Node {\n\n fn drop(&mut self) {\n\n // We should not panic in Drop\n\n if let Err(err) = self.stop() {\n\n error!(\"Failed to stop node: {}\", err);\n\n }\n\n }\n\n}\n", "file_path": "src/setup/node.rs", "rank": 47, "score": 40060.81244469304 }, { "content": "\n\n fn generate_config_file(&self) -> io::Result<()> {\n\n let config_file_path = self.meta.kind.config_filepath(&self.config.path);\n\n let content = match self.meta.kind {\n\n NodeKind::Zebra => ZebraConfigFile::generate(&self.config)\n\n .map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?,\n\n NodeKind::Zcashd => ZcashdConfigFile::generate(&self.config),\n\n };\n\n\n\n fs::write(config_file_path, content)\n\n }\n\n\n\n fn cleanup(&self) -> io::Result<()> {\n\n self.cleanup_config_file()?;\n\n self.cleanup_cache()\n\n }\n\n\n\n fn cleanup_config_file(&self) -> io::Result<()> {\n\n let path = self.meta.kind.config_filepath(&self.config.path);\n\n match fs::remove_file(path) {\n", "file_path": "src/setup/node.rs", "rank": 48, "score": 40059.78472976149 }, { "content": " None\n\n }\n\n Some(exit_code) if exit_code.success() => {\n\n Some(\"but exited successfully somehow\".to_string())\n\n }\n\n Some(exit_code) => Some(format!(\"crashed with {}\", exit_code)),\n\n };\n\n\n\n self.cleanup()?;\n\n\n\n if let Some(crash_msg) = crashed {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidData,\n\n format!(\"Node exited early, {}\", crash_msg),\n\n ));\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n", "file_path": "src/setup/node.rs", "rank": 49, "score": 40059.51638402484 }, { "content": " // File may not exist, so we suppress the error.\n\n Err(e) if e.kind() != std::io::ErrorKind::NotFound => Err(e),\n\n _ => Ok(()),\n\n }\n\n }\n\n\n\n fn cleanup_cache(&self) -> io::Result<()> {\n\n // Zebra doesn't currently use a cache as it's configured in ephemeral mode.\n\n if let Some(path) = self.meta.kind.cache_path(&self.config.path) {\n\n if let Err(e) = fs::remove_dir_all(path) {\n\n // Directory may not exist, so we let that error through\n\n if e.kind() != std::io::ErrorKind::NotFound {\n\n return Err(e);\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/setup/node.rs", "rank": 50, "score": 40059.44184838574 }, { "content": " let genesis_block = Block::testnet_genesis();\n\n // initial blocks, skipping genesis as it doesn't get sent\n\n let blocks = Block::initial_testnet_blocks()\n\n .into_iter()\n\n .take(block_count)\n\n .skip(1)\n\n .collect::<Vec<_>>();\n\n\n\n // respond to GetHeaders(Block[0])\n\n let source = match synthetic_node.recv_message_timeout(TIMEOUT).await? {\n\n (source, Message::GetHeaders(locations)) => {\n\n // The request should be from the genesis hash onwards,\n\n // i.e. locator_hash = [genesis.hash], stop_hash = [0]\n\n assert_eq!(\n\n locations.block_locator_hashes,\n\n vec![genesis_block.double_sha256().unwrap()]\n\n );\n\n assert_eq!(locations.hash_stop, Hash::zeroed());\n\n\n\n // Reply with headers for the initial block headers\n", "file_path": "src/setup/node.rs", "rank": 51, "score": 40058.80766346723 }, { "content": "fn is_valid_message_bytes(bytes: &mut std::io::Cursor<&[u8]>) -> bool {\n\n let header = match MessageHeader::decode(bytes) {\n\n Ok(header) => header,\n\n Err(_) => return false,\n\n };\n\n\n\n // check magic\n\n if header.magic != MAGIC {\n\n return false;\n\n }\n\n\n\n Message::decode(header.command, bytes).is_ok()\n\n}\n\n\n\nasync fn simulate_peer(\n\n node_addr: SocketAddr,\n\n message_pairs: Vec<(Message, Message)>,\n\n corrupt_message: Vec<u8>,\n\n) {\n\n const READ_TIMEOUT: Duration = Duration::from_secs(2);\n", "file_path": "src/tests/resistance/stress_test.rs", "rank": 52, "score": 39937.45302014273 }, { "content": "/// Returns `n` random length sets of zeroes.\n\npub fn zeroes(rng: &mut ChaCha8Rng, n: usize) -> Vec<Vec<u8>> {\n\n (0..n)\n\n .map(|_| {\n\n let random_len: usize = rng.gen_range(1..(MAX_MESSAGE_LEN * 2));\n\n vec![0u8; random_len]\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/tools/fuzzing.rs", "rank": 53, "score": 39413.329302660844 }, { "content": "//! A lightweight node implementation to be used as peers in tests.\n\n\n\nuse crate::{\n\n protocol::{\n\n message::{constants::HEADER_LEN, Message, MessageHeader},\n\n payload::{codec::Codec, Nonce, Version},\n\n },\n\n tools::message_filter::{Filter, MessageFilter},\n\n};\n\n\n\nuse assert_matches::assert_matches;\n\nuse pea2pea::{\n\n connections::ConnectionSide,\n\n protocols::{Handshaking, Reading, Writing},\n\n Connection, KnownPeers, Node, NodeConfig, Pea2Pea,\n\n};\n\nuse tokio::{\n\n sync::mpsc::{self, Receiver, Sender},\n\n time::timeout,\n\n};\n", "file_path": "src/tools/synthetic_node.rs", "rank": 54, "score": 38847.00828289354 }, { "content": " /// - the connection breaks\n\n /// - an [`io::Error`] occurs\n\n ///\n\n /// Is useful for checking a node's response to a prior query.\n\n /// - if it was ignored, this call will succeed with `Ok(())`\n\n /// - if there was a reply, it will be contained in [`Unexpected`](PingPongError::Unexpected)\n\n /// - and [`ConnectionAborted`](PingPongError::ConnectionAborted) if the connection was terminated -\n\n ///\n\n /// [`Ping`]: enum@crate::protocol::message::Message::Ping\n\n /// [`Pong`]: enum@crate::protocol::message::Message::Pong\n\n /// [`Nonce`]: struct@crate::protocol::payload::Nonce\n\n pub async fn ping_pong_timeout(\n\n &mut self,\n\n target: SocketAddr,\n\n duration: Duration,\n\n ) -> Result<(), PingPongError> {\n\n const SLEEP: Duration = Duration::from_millis(10);\n\n\n\n let now = std::time::Instant::now();\n\n let ping_nonce = Nonce::default();\n", "file_path": "src/tools/synthetic_node.rs", "rank": 55, "score": 38844.023517848094 }, { "content": " SyntheticNodeBuilder::default()\n\n }\n\n\n\n /// Returns the listening address of the node.\n\n pub fn listening_addr(&self) -> SocketAddr {\n\n self.inner_node.node().listening_addr()\n\n }\n\n\n\n /// Connects to the target address.\n\n ///\n\n /// If the handshake protocol is enabled it will be executed as well.\n\n pub async fn connect(&self, target: SocketAddr) -> io::Result<()> {\n\n self.inner_node.node().connect(target).await?;\n\n\n\n Ok(())\n\n }\n\n\n\n /// Indicates if the `addr` is registered as a connected peer.\n\n pub fn is_connected(&self, addr: SocketAddr) -> bool {\n\n self.inner_node.node().is_connected(addr)\n", "file_path": "src/tools/synthetic_node.rs", "rank": 56, "score": 38838.498592198 }, { "content": " message_filter: MessageFilter::with_all_disabled(),\n\n }\n\n }\n\n}\n\n\n\nimpl SyntheticNodeBuilder {\n\n /// Creates a [`SyntheticNode`] with the current configuration\n\n pub async fn build(&self) -> io::Result<SyntheticNode> {\n\n // Create the pea2pea node from the config.\n\n let node = Node::new(self.network_config.clone()).await?;\n\n\n\n // Inbound channel size of 100 messages.\n\n let (tx, rx) = mpsc::channel(100);\n\n let inner_node = InnerNode::new(node, tx, self.message_filter.clone(), self.handshake);\n\n\n\n // Enable the read and write protocols\n\n inner_node.enable_reading();\n\n inner_node.enable_writing();\n\n\n\n Ok(SyntheticNode {\n", "file_path": "src/tools/synthetic_node.rs", "rank": 57, "score": 38838.37533470324 }, { "content": " fn write_message(\n\n &self,\n\n _target: SocketAddr,\n\n payload: &[u8],\n\n buffer: &mut [u8],\n\n ) -> io::Result<usize> {\n\n buffer[..payload.len()].copy_from_slice(payload);\n\n Ok(payload.len())\n\n }\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl Handshaking for InnerNode {\n\n async fn perform_handshake(&self, mut conn: Connection) -> io::Result<Connection> {\n\n match (self.handshake, !conn.side) {\n\n (Some(Handshake::Full), ConnectionSide::Initiator) => {\n\n // Possible bug: running zebra node results in internal pea2pea panics:\n\n // \"thread 'tokio-runtime-worker' panicked at 'internal error: entered unreachable code'\"\n\n // which gets \"fixed\" by reversing the parameters in Version::new -- no current insight into\n\n // why this is the case. The panic is triggered by the following code in pea2pea:\n", "file_path": "src/tools/synthetic_node.rs", "rank": 58, "score": 38837.69693610124 }, { "content": " }\n\n (Some(Handshake::VersionOnly), ConnectionSide::Initiator) => {\n\n Message::Version(Version::new(conn.addr, self.node().listening_addr()))\n\n .write_to_stream(conn.writer())\n\n .await?;\n\n\n\n let version = Message::read_from_stream(conn.reader()).await?;\n\n assert_matches!(version, Message::Version(..));\n\n }\n\n (Some(Handshake::VersionOnly), ConnectionSide::Responder) => {\n\n // Receive and send Version.\n\n let version = Message::read_from_stream(conn.reader()).await?;\n\n let node_addr = match version {\n\n Message::Version(version) => version.addr_from.addr,\n\n other => {\n\n let span = self.node().span().clone();\n\n error!(\n\n parent: span,\n\n \"received non-version message during handshake: {:?}\", other\n\n );\n", "file_path": "src/tools/synthetic_node.rs", "rank": 59, "score": 38837.1449112525 }, { "content": " panic!(\"Expected Version, got {:?}\", other);\n\n }\n\n };\n\n\n\n Message::Version(Version::new(node_addr, self.node().listening_addr()))\n\n .write_to_stream(conn.writer())\n\n .await?;\n\n }\n\n (None, _) => {}\n\n }\n\n\n\n Ok(conn)\n\n }\n\n}\n", "file_path": "src/tools/synthetic_node.rs", "rank": 60, "score": 38836.96323683957 }, { "content": " Message::Version(version) => version.addr_from.addr,\n\n other => {\n\n let span = self.node().span().clone();\n\n error!(\n\n parent: span,\n\n \"received non-version message during handshake: {:?}\", other\n\n );\n\n panic!(\"Expected Version, got {:?}\", other);\n\n }\n\n };\n\n\n\n Message::Version(Version::new(node_addr, self.node().listening_addr()))\n\n .write_to_stream(conn.writer())\n\n .await?;\n\n\n\n // Receive and send Verack.\n\n let verack = Message::read_from_stream(conn.reader()).await?;\n\n assert_matches!(verack, Message::Verack);\n\n\n\n Message::Verack.write_to_stream(conn.writer()).await?;\n", "file_path": "src/tools/synthetic_node.rs", "rank": 61, "score": 38836.961783372804 }, { "content": " // https://docs.rs/pea2pea/0.20.3/src/pea2pea/node.rs.html#201\n\n\n\n // Send and receive Version.\n\n Message::Version(Version::new(conn.addr, self.node().listening_addr()))\n\n .write_to_stream(conn.writer())\n\n .await?;\n\n\n\n let version = Message::read_from_stream(conn.reader()).await?;\n\n assert_matches!(version, Message::Version(..));\n\n\n\n // Send and receive Verack.\n\n Message::Verack.write_to_stream(conn.writer()).await?;\n\n\n\n let verack = Message::read_from_stream(conn.reader()).await?;\n\n assert_matches!(verack, Message::Verack);\n\n }\n\n (Some(Handshake::Full), ConnectionSide::Responder) => {\n\n // Receive and send Version.\n\n let version = Message::read_from_stream(conn.reader()).await?;\n\n let node_addr = match version {\n", "file_path": "src/tools/synthetic_node.rs", "rank": 62, "score": 38836.726264992896 }, { "content": " }\n\n\n\n fn send_direct_bytes(&self, target: SocketAddr, data: Vec<u8>) -> io::Result<()> {\n\n self.node.send_direct_message(target, data.into())\n\n }\n\n}\n\n\n\nimpl Pea2Pea for InnerNode {\n\n fn node(&self) -> &Node {\n\n &self.node\n\n }\n\n}\n\n\n\n#[async_trait::async_trait]\n\nimpl Reading for InnerNode {\n\n type Message = Message;\n\n\n\n fn read_message(\n\n &self,\n\n _source: SocketAddr,\n", "file_path": "src/tools/synthetic_node.rs", "rank": 63, "score": 38836.69453417161 }, { "content": " return Err(PingPongError::Unexpected(message.into()));\n\n }\n\n }\n\n }\n\n\n\n Err(PingPongError::Timeout(duration))\n\n }\n\n\n\n /// Waits for the target to disconnect by sending a [`Ping`] request. Errors if\n\n /// the target responds or doesn't disconnect within the timeout.\n\n ///\n\n /// [`Ping`]: enum@crate::protocol::message::Message::Ping\n\n pub async fn wait_for_disconnect(\n\n &mut self,\n\n target: SocketAddr,\n\n duration: Duration,\n\n ) -> io::Result<()> {\n\n match self.ping_pong_timeout(target, duration).await {\n\n Ok(_) => Err(Error::new(ErrorKind::Other, \"connection still active\")),\n\n Err(PingPongError::ConnectionAborted) => Ok(()),\n", "file_path": "src/tools/synthetic_node.rs", "rank": 64, "score": 38836.143417455816 }, { "content": " self.inner_node.send_direct_bytes(target, data)?;\n\n\n\n Ok(())\n\n }\n\n\n\n /// Reads a message from the inbound (internal) queue of the node.\n\n ///\n\n /// Messages are sent to the queue when unfiltered by the message filter.\n\n pub async fn recv_message(&mut self) -> (SocketAddr, Message) {\n\n match self.inbound_rx.recv().await {\n\n Some(message) => message,\n\n None => panic!(\"all senders dropped!\"),\n\n }\n\n }\n\n\n\n // Attempts to read a message from the inbound (internal) queue of the node before the timeout\n\n // duration has elapsed (seconds).\n\n // FIXME: logging?\n\n pub async fn recv_message_timeout(\n\n &mut self,\n", "file_path": "src/tools/synthetic_node.rs", "rank": 65, "score": 38835.626821870974 }, { "content": " inner_node,\n\n inbound_rx: rx,\n\n })\n\n }\n\n\n\n /// Creates `n` [`SyntheticNode`]'s with the current configuration, and also returns their listening address.\n\n pub async fn build_n(&self, n: usize) -> io::Result<(Vec<SyntheticNode>, Vec<SocketAddr>)> {\n\n let mut nodes = Vec::with_capacity(n);\n\n let mut addrs = Vec::with_capacity(n);\n\n for _ in 0..n {\n\n let node = self.build().await?;\n\n addrs.push(node.listening_addr());\n\n nodes.push(node);\n\n }\n\n\n\n Ok((nodes, addrs))\n\n }\n\n\n\n /// Sets the node's [`MessageFilter`] to [`Filter::AutoReply`].\n\n pub fn with_all_auto_reply(mut self) -> Self {\n", "file_path": "src/tools/synthetic_node.rs", "rank": 66, "score": 38835.514270667634 }, { "content": " }\n\n\n\n /// Returns the number of connected peers.\n\n pub fn num_connected(&self) -> usize {\n\n self.inner_node.node().num_connected()\n\n }\n\n\n\n /// Returns a reference to the node's known peers.\n\n pub fn known_peers(&self) -> &KnownPeers {\n\n self.inner_node.node().known_peers()\n\n }\n\n\n\n /// Returns the list of active connections for this node. Should be preferred over [`known_peers`] when querying active connections.\n\n pub fn connected_peers(&self) -> Vec<SocketAddr> {\n\n self.inner_node.node.connected_addrs()\n\n }\n\n\n\n /// Waits until the node has at least one connection, and\n\n /// returns its SocketAddr\n\n pub async fn wait_for_connection(&self) -> SocketAddr {\n", "file_path": "src/tools/synthetic_node.rs", "rank": 67, "score": 38835.369174557985 }, { "content": " VersionOnly,\n\n}\n\n\n\n/// A builder for [`SyntheticNode`].\n\n#[derive(Debug, Clone)]\n\npub struct SyntheticNodeBuilder {\n\n network_config: Option<NodeConfig>,\n\n handshake: Option<Handshake>,\n\n message_filter: MessageFilter,\n\n}\n\n\n\nimpl Default for SyntheticNodeBuilder {\n\n fn default() -> Self {\n\n Self {\n\n network_config: Some(NodeConfig {\n\n // Set localhost as the default IP.\n\n listener_ip: IpAddr::V4(Ipv4Addr::LOCALHOST),\n\n ..Default::default()\n\n }),\n\n handshake: None,\n", "file_path": "src/tools/synthetic_node.rs", "rank": 68, "score": 38835.368371582874 }, { "content": " self.message_filter = MessageFilter::with_all_auto_reply();\n\n self\n\n }\n\n\n\n /// Enables handshaking with [`Handshake::Full`].\n\n pub fn with_full_handshake(mut self) -> Self {\n\n self.handshake = Some(Handshake::Full);\n\n self\n\n }\n\n\n\n /// Enables handshaking with [`Handshake::VersionOnly`].\n\n pub fn with_version_exchange_handshake(mut self) -> Self {\n\n self.handshake = Some(Handshake::VersionOnly);\n\n self\n\n }\n\n\n\n /// Sets the node's [`MessageFilter`].\n\n pub fn with_message_filter(mut self, filter: MessageFilter) -> Self {\n\n self.message_filter = filter;\n\n self\n", "file_path": "src/tools/synthetic_node.rs", "rank": 69, "score": 38835.18582785359 }, { "content": " // Read the position from the cursor.\n\n Ok(Some((message, HEADER_LEN + bytes.position() as usize)))\n\n }\n\n\n\n async fn process_message(&self, source: SocketAddr, message: Self::Message) -> io::Result<()> {\n\n let span = self.node().span().clone();\n\n\n\n debug!(parent: span.clone(), \"processing {:?}\", message);\n\n match self.message_filter.message_filter_type(&message) {\n\n Filter::AutoReply => {\n\n // Autoreply with the appropriate response.\n\n let response = self.message_filter.reply_message(&message);\n\n\n\n debug!(parent: span, \"auto replying with {:?}\", response);\n\n self.send_direct_message(source, response)?;\n\n }\n\n\n\n Filter::Disabled => {\n\n // Send the message to the node's inbound queue.\n\n debug!(\n", "file_path": "src/tools/synthetic_node.rs", "rank": 70, "score": 38833.8253489659 }, { "content": "use tracing::*;\n\n\n\nuse std::{\n\n io::{self, Cursor, Error, ErrorKind},\n\n net::{IpAddr, Ipv4Addr, SocketAddr},\n\n time::Duration,\n\n};\n\n\n\n/// An [`Error`](std::error::Error) type for [`SyntheticNode::ping_pong_timeout`]\n\npub enum PingPongError {\n\n /// The connection was aborted during the [`Ping`](Message::Ping)-[`Pong`](Message::Pong) exchange.\n\n ConnectionAborted,\n\n /// An [io::Error] occurred during the [`Ping`](Message::Ping)-[`Pong`](Message::Pong) exchange.\n\n IoErr(io::Error),\n\n /// Timeout was exceeded before a [`Pong`](Message::Pong) was received.\n\n Timeout(Duration),\n\n /// A message was received which was not [`Pong`](Message::Pong), or the [Pong's nonce](Nonce) did not match.\n\n Unexpected(Box<Message>),\n\n}\n\n\n", "file_path": "src/tools/synthetic_node.rs", "rank": 71, "score": 38833.75430980153 }, { "content": " // FIXME: Consider specialising the longer debug strings, e.g.\n\n // IoErr and Unexpected.\n\n f.write_str(&format!(\"{:?}\", self))\n\n }\n\n}\n\n\n\nimpl std::error::Error for PingPongError {}\n\n\n\nimpl From<PingPongError> for io::Error {\n\n fn from(original: PingPongError) -> Self {\n\n use PingPongError::*;\n\n match original {\n\n ConnectionAborted => Error::new(ErrorKind::ConnectionAborted, \"Connection aborted\"),\n\n IoErr(err) => err,\n\n Timeout(duration) => Error::new(\n\n ErrorKind::TimedOut,\n\n format!(\"Timeout after {0:.3}s\", duration.as_secs_f64()),\n\n ),\n\n Unexpected(msg) => Error::new(\n\n ErrorKind::Other,\n\n format!(\"Expected Pong, received {:?}\", msg),\n\n ),\n\n }\n\n }\n\n}\n\n\n\n/// Enables tracing for all [`SyntheticNode`] instances (usually scoped by test).\n", "file_path": "src/tools/synthetic_node.rs", "rank": 72, "score": 38832.58103175585 }, { "content": " }\n\n\n\n /// Sets the node's write buffer size.\n\n pub fn with_max_write_buffer_size(mut self, size: usize) -> Self {\n\n let mut config = self.network_config.unwrap_or_default();\n\n config.conn_write_buffer_size = size;\n\n self.network_config = Some(config);\n\n self\n\n }\n\n}\n\n\n\n/// Convenient abstraction over a `pea2pea` node.\n\npub struct SyntheticNode {\n\n inner_node: InnerNode,\n\n inbound_rx: Receiver<(SocketAddr, Message)>,\n\n}\n\n\n\nimpl SyntheticNode {\n\n // FIXME: remove in favour of calling `SyntheticNodeBuilder::default()` or `new` directly?\n\n pub fn builder() -> SyntheticNodeBuilder {\n", "file_path": "src/tools/synthetic_node.rs", "rank": 73, "score": 38832.41975966333 }, { "content": " Err(err) => Err(err.into()),\n\n }\n\n }\n\n\n\n /// Gracefully shuts down the node.\n\n pub fn shut_down(&self) {\n\n self.inner_node.node().shut_down()\n\n }\n\n}\n\n\n\nimpl Drop for SyntheticNode {\n\n fn drop(&mut self) {\n\n self.shut_down();\n\n }\n\n}\n\n\n", "file_path": "src/tools/synthetic_node.rs", "rank": 74, "score": 38832.40065329857 }, { "content": "\n\n if handshake.is_some() {\n\n node.enable_handshaking();\n\n }\n\n\n\n node\n\n }\n\n\n\n fn send_direct_message(&self, target: SocketAddr, message: Message) -> io::Result<()> {\n\n let mut payload = vec![];\n\n let header = message.encode(&mut payload)?;\n\n\n\n // Encode the header and append the message to it.\n\n let mut buffer = Vec::with_capacity(HEADER_LEN + header.body_length as usize);\n\n header.encode(&mut buffer)?;\n\n buffer.append(&mut payload);\n\n\n\n self.node().send_direct_message(target, buffer.into())?;\n\n\n\n Ok(())\n", "file_path": "src/tools/synthetic_node.rs", "rank": 75, "score": 38831.753811548544 }, { "content": " parent: span,\n\n \"sending the message to the node's inbound queue\"\n\n );\n\n self.inbound_tx\n\n .send((source, message))\n\n .await\n\n .expect(\"receiver dropped!\");\n\n }\n\n\n\n Filter::Enabled => {\n\n // Ignore the message.\n\n debug!(parent: span, \"message was ignored by the filter\");\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl Writing for InnerNode {\n", "file_path": "src/tools/synthetic_node.rs", "rank": 76, "score": 38831.66369777302 }, { "content": " duration: Duration,\n\n ) -> io::Result<(SocketAddr, Message)> {\n\n match timeout(duration, self.recv_message()).await {\n\n Ok(message) => Ok(message),\n\n Err(_e) => Err(Error::new(\n\n ErrorKind::TimedOut,\n\n format!(\n\n \"could not read message after {0:.3}s\",\n\n duration.as_secs_f64()\n\n ),\n\n )),\n\n }\n\n }\n\n\n\n /// Sends [`Ping`], and expects [`Pong`] with a matching [`Nonce`] in reply.\n\n ///\n\n /// Uses polling to check that connection is still alive. Returns a [`PingPongError`] if:\n\n /// - a non-[`Pong`] message is received\n\n /// - a [`Pong`] with a non-matching [`Nonce`] is receives\n\n /// - the timeout expires\n", "file_path": "src/tools/synthetic_node.rs", "rank": 77, "score": 38830.46797444177 }, { "content": " const SLEEP: Duration = Duration::from_millis(10);\n\n loop {\n\n // Mutating the collection is alright since this is a copy of the connections and not the actually list.\n\n if let Some(addr) = self.connected_peers().pop() {\n\n return addr;\n\n }\n\n\n\n tokio::time::sleep(SLEEP).await;\n\n }\n\n }\n\n\n\n /// Sends a direct message to the target address.\n\n pub fn send_direct_message(&self, target: SocketAddr, message: Message) -> io::Result<()> {\n\n self.inner_node.send_direct_message(target, message)?;\n\n\n\n Ok(())\n\n }\n\n\n\n /// Sends bytes directly to the target address.\n\n pub fn send_direct_bytes(&self, target: SocketAddr, data: Vec<u8>) -> io::Result<()> {\n", "file_path": "src/tools/synthetic_node.rs", "rank": 78, "score": 38830.165301699504 }, { "content": " buffer: &[u8],\n\n ) -> io::Result<Option<(Self::Message, usize)>> {\n\n // Check buffer contains a full header.\n\n if buffer.len() < HEADER_LEN {\n\n return Ok(None);\n\n }\n\n\n\n // Decode header.\n\n let header_bytes = &buffer[..HEADER_LEN];\n\n let header = MessageHeader::decode(&mut Cursor::new(header_bytes))?;\n\n\n\n // Check buffer contains the announced message length.\n\n if buffer.len() < HEADER_LEN + header.body_length as usize {\n\n return Err(ErrorKind::InvalidData.into());\n\n }\n\n\n\n // Decode message.\n\n let mut bytes = Cursor::new(&buffer[HEADER_LEN..][..header.body_length as usize]);\n\n let message = Message::decode(header.command, &mut bytes)?;\n\n\n", "file_path": "src/tools/synthetic_node.rs", "rank": 79, "score": 38827.43781865108 }, { "content": "impl std::fmt::Debug for PingPongError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n let str = match self {\n\n PingPongError::ConnectionAborted => \"Connection aborted\".to_string(),\n\n PingPongError::IoErr(err) => format!(\"{:?}\", err),\n\n PingPongError::Timeout(duration) => {\n\n format!(\"Timeout after {0:.3}s\", duration.as_secs_f32())\n\n }\n\n PingPongError::Unexpected(msg) => match &**msg {\n\n Message::Pong(_) => \"Pong nonce did not match\".to_string(),\n\n non_pong => format!(\"Expected a matching Pong, but got {:?}\", non_pong),\n\n },\n\n };\n\n\n\n f.write_str(&str)\n\n }\n\n}\n\n\n\nimpl std::fmt::Display for PingPongError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n", "file_path": "src/tools/synthetic_node.rs", "rank": 80, "score": 38827.43781865108 }, { "content": " if let Err(err) = self.send_direct_message(target, Message::Ping(ping_nonce)) {\n\n if !self.is_connected(target) {\n\n return Err(PingPongError::ConnectionAborted);\n\n } else {\n\n return Err(PingPongError::IoErr(err));\n\n }\n\n }\n\n\n\n while now.elapsed() < duration {\n\n match self.recv_message_timeout(SLEEP).await {\n\n Err(_timeout) => {\n\n // Check that connection is still alive, so that we can exit sooner\n\n if !self.is_connected(target) {\n\n return Err(PingPongError::ConnectionAborted);\n\n }\n\n }\n\n Ok((_, Message::Pong(nonce))) if nonce == ping_nonce => {\n\n return Ok(());\n\n }\n\n Ok((_, message)) => {\n", "file_path": "src/tools/synthetic_node.rs", "rank": 81, "score": 38827.43781865108 }, { "content": "//! An implementation of the Zcash network protocol types and messages.\n\n\n\npub mod message;\n\npub mod payload;\n", "file_path": "src/protocol/mod.rs", "rank": 82, "score": 38750.741132457966 }, { "content": "/// Returns `n` random length sets of random bytes.\n\npub fn random_bytes(rng: &mut ChaCha8Rng, n: usize) -> Vec<Vec<u8>> {\n\n (0..n)\n\n .map(|_| {\n\n let random_len: usize = rng.gen_range(1..(64 * 1024));\n\n let random_payload: Vec<u8> = rng.sample_iter(Standard).take(random_len).collect();\n\n\n\n random_payload\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "src/tools/fuzzing.rs", "rank": 83, "score": 38727.58311359285 }, { "content": "fn generate_corrupt_messages(rng: &mut ChaCha8Rng, n: usize) -> Vec<Vec<u8>> {\n\n let message_pool = default_fuzz_messages();\n\n // generate a variety of corrupt messages and select n of them at random\n\n let mut possible_payloads = Vec::with_capacity(n * 6);\n\n possible_payloads.append(&mut zeroes(rng, n));\n\n possible_payloads.append(&mut encode_slightly_corrupted_messages(\n\n rng,\n\n n,\n\n &message_pool,\n\n ));\n\n possible_payloads.append(&mut encode_messages_with_corrupt_checksum(\n\n rng,\n\n n,\n\n &message_pool,\n\n ));\n\n possible_payloads.append(&mut encode_messages_with_corrupt_body_length(\n\n rng,\n\n n,\n\n &message_pool,\n\n ));\n", "file_path": "src/tests/resistance/stress_test.rs", "rank": 84, "score": 38015.10562559311 }, { "content": "/// Encodes a message and corrupts the checksum bytes.\n\npub fn encode_message_with_corrupt_checksum(rng: &mut ChaCha8Rng, message: &Message) -> Vec<u8> {\n\n let mut body_buffer = Vec::new();\n\n let mut header = message.encode(&mut body_buffer).unwrap();\n\n\n\n let mut buffer = Vec::with_capacity(body_buffer.len() + HEADER_LEN);\n\n header.checksum = random_non_valid_u32(rng, header.checksum);\n\n header.encode(&mut buffer).unwrap();\n\n buffer.append(&mut body_buffer);\n\n\n\n buffer\n\n}\n\n\n", "file_path": "src/tools/fuzzing.rs", "rank": 85, "score": 38015.10562559311 }, { "content": "//! Block-related types.\n\n\n\nuse crate::protocol::payload::{\n\n codec::Codec,\n\n inv::{InvHash, ObjectKind},\n\n read_n_bytes, Hash, ProtocolVersion, Tx, VarInt,\n\n};\n\n\n\nuse std::{\n\n convert::TryInto,\n\n io::{self, Cursor, Write},\n\n};\n\n\n\nuse sha2::Digest;\n\n\n\n/// The locator hash object, used to communicate chain state.\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct LocatorHashes {\n\n /// The protocol version.\n\n pub version: ProtocolVersion,\n", "file_path": "src/protocol/payload/block.rs", "rank": 86, "score": 37580.075262113576 }, { "content": "//! High level APIs and types for network messages.\n\n\n\npub mod constants;\n\n#[doc(hidden)]\n\npub mod stream_io;\n\n\n\nuse crate::protocol::{\n\n message::constants::*,\n\n payload::{\n\n block::{Block, Headers, LocatorHashes},\n\n codec::Codec,\n\n read_n_bytes, Addr, FilterAdd, FilterLoad, Inv, Nonce, Reject, Tx, Version,\n\n },\n\n};\n\n\n\nuse sha2::{Digest, Sha256};\n\n\n\nuse std::io::{self, Cursor, Write};\n\n\n\n/// The header of a network message.\n", "file_path": "src/protocol/message/mod.rs", "rank": 87, "score": 37575.32786525079 }, { "content": "//! Transaction-related types.\n\n\n\nuse sha2::Digest;\n\n\n\nuse crate::protocol::payload::{codec::Codec, read_n_bytes, Hash, VarInt};\n\n\n\nuse std::{\n\n convert::TryInto,\n\n io::{self, Cursor, Read, Write},\n\n};\n\n\n\nuse crate::protocol::payload::inv::{InvHash, ObjectKind};\n\n\n\n/// A Zcash transaction ([spec](https://zips.z.cash/protocol/canopy.pdf#txnencodingandconsensus)).\n\n///\n\n/// Supports V1-V4, V5 isn't yet stable.\n\n#[derive(Debug, PartialEq, Clone)]\n\npub enum Tx {\n\n V1(TxV1),\n\n V2(TxV2),\n", "file_path": "src/protocol/payload/tx.rs", "rank": 88, "score": 37575.17103107818 }, { "content": "//! Network message payload types.\n\n\n\nuse chrono::{DateTime, NaiveDateTime, Utc};\n\nuse rand::{thread_rng, Rng};\n\n\n\nuse std::io::{self, Cursor, Read, Write};\n\n\n\npub mod addr;\n\npub use addr::Addr;\n\n\n\npub mod block;\n\n\n\npub mod inv;\n\npub use inv::Inv;\n\n\n\npub mod tx;\n\npub use tx::Tx;\n\n\n\npub mod version;\n\npub use version::Version;\n", "file_path": "src/protocol/payload/mod.rs", "rank": 89, "score": 37572.40660008345 }, { "content": "//! Bloom filtering types, see [BIP 37](https://github.com/bitcoin/bips/blob/master/bip-0037.mediawiki).\n\n\n\nuse std::io::{self, Cursor, ErrorKind, Read, Write};\n\n\n\nuse crate::protocol::payload::{codec::Codec, read_n_bytes};\n\n\n\n/// A modification to an existing filter.\n\n#[derive(Debug, PartialEq, Default, Clone)]\n\npub struct FilterAdd {\n\n /// The data element to add to the current filter.\n\n pub data: Vec<u8>,\n\n}\n\n\n\n/// A new filter on the connection.\n\n#[derive(Debug, PartialEq, Default, Clone)]\n\npub struct FilterLoad {\n\n /// The filter itself.\n\n pub filter: Vec<u8>,\n\n /// The number of hash functions to use in this filter.\n\n pub hash_fn_count: u32,\n", "file_path": "src/protocol/payload/filter.rs", "rank": 90, "score": 37570.446989038246 }, { "content": " pub version: ProtocolVersion,\n\n /// The hash of the previous block.\n\n pub prev_block: Hash,\n\n /// The hash of the merkle root.\n\n pub merkle_root: Hash,\n\n /// Field usage varies depending on version, see spec.\n\n pub light_client_root: Hash,\n\n /// The block timestamp.\n\n pub timestamp: u32,\n\n /// An encoded version of the target threshold.\n\n pub bits: u32,\n\n /// The nonce used in the version messages, `Nonce(u64)`, is NOT the same as the nonce the\n\n /// block was generated with as it uses a `u32`.\n\n pub nonce: [u8; 32],\n\n /// The size of the Equihash solution in bytes (always `1344`).\n\n pub solution_size: VarInt,\n\n /// The Equihash solution.\n\n pub solution: [u8; 1344],\n\n}\n\n\n", "file_path": "src/protocol/payload/block.rs", "rank": 91, "score": 37569.40853103219 }, { "content": "//! Network address types.\n\n\n\nuse crate::protocol::payload::{codec::Codec, read_n_bytes, read_timestamp};\n\n\n\nuse std::convert::TryInto;\n\n\n\nuse chrono::{DateTime, Utc};\n\n\n\nuse std::{\n\n io::{self, Cursor, Read, Write},\n\n net::{IpAddr::*, Ipv6Addr, SocketAddr},\n\n};\n\n\n\n/// A list of network addresses, used for peering.\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct Addr {\n\n pub addrs: Vec<NetworkAddr>,\n\n}\n\n\n\nimpl Addr {\n", "file_path": "src/protocol/payload/addr.rs", "rank": 92, "score": 37569.2642643041 }, { "content": "\n\npub mod reject;\n\npub use reject::Reject;\n\n\n\nuse crate::protocol::message::constants::MAX_MESSAGE_LEN;\n\n\n\nuse self::codec::Codec;\n\n\n\npub mod codec;\n\n\n\npub mod filter;\n\npub use filter::{FilterAdd, FilterLoad};\n\n\n\n/// A `u64`-backed nonce.\n\n#[derive(Debug, PartialEq, Copy, Clone)]\n\npub struct Nonce(u64);\n\n\n\nimpl Default for Nonce {\n\n fn default() -> Self {\n\n Self(thread_rng().gen())\n", "file_path": "src/protocol/payload/mod.rs", "rank": 93, "score": 37569.12215949475 }, { "content": "\n\nimpl ProtocolVersion {\n\n /// The current protocol version.\n\n pub fn current() -> Self {\n\n Self(170_013)\n\n }\n\n}\n\n\n\nimpl Codec for ProtocolVersion {\n\n fn encode(&self, buffer: &mut Vec<u8>) -> io::Result<()> {\n\n buffer.write_all(&self.0.to_le_bytes())?;\n\n\n\n Ok(())\n\n }\n\n\n\n fn decode(bytes: &mut Cursor<&[u8]>) -> io::Result<Self> {\n\n let version = u32::from_le_bytes(read_n_bytes(bytes)?);\n\n\n\n Ok(Self(version))\n\n }\n", "file_path": "src/protocol/payload/mod.rs", "rank": 94, "score": 37567.80607189928 }, { "content": " /// for use by [Block].\n\n fn decode_without_tx_count(bytes: &mut Cursor<&[u8]>) -> io::Result<Self> {\n\n let version = ProtocolVersion::decode(bytes)?;\n\n let prev_block = Hash::decode(bytes)?;\n\n let merkle_root = Hash::decode(bytes)?;\n\n let light_client_root = Hash::decode(bytes)?;\n\n\n\n let timestamp = u32::from_le_bytes(read_n_bytes(bytes)?);\n\n\n\n let bits = u32::from_le_bytes(read_n_bytes(bytes)?);\n\n let nonce = read_n_bytes(bytes)?;\n\n\n\n let solution_size = VarInt::decode(bytes)?;\n\n let solution = read_n_bytes(bytes)?;\n\n\n\n Ok(Self {\n\n version,\n\n prev_block,\n\n merkle_root,\n\n light_client_root,\n", "file_path": "src/protocol/payload/block.rs", "rank": 95, "score": 37567.78978532679 }, { "content": "pub const GETHEADERS_COMMAND: [u8; 12] = *b\"getheaders\\0\\0\";\n\npub const HEADERS_COMMAND: [u8; 12] = *b\"headers\\0\\0\\0\\0\\0\";\n\npub const GETBLOCKS_COMMAND: [u8; 12] = *b\"getblocks\\0\\0\\0\";\n\npub const BLOCK_COMMAND: [u8; 12] = *b\"block\\0\\0\\0\\0\\0\\0\\0\";\n\npub const GETDATA_COMMAND: [u8; 12] = *b\"getdata\\0\\0\\0\\0\\0\";\n\npub const INV_COMMAND: [u8; 12] = *b\"inv\\0\\0\\0\\0\\0\\0\\0\\0\\0\";\n\npub const NOTFOUND_COMMAND: [u8; 12] = *b\"notfound\\0\\0\\0\\0\";\n\npub const MEMPOOL_COMMAND: [u8; 12] = *b\"mempool\\0\\0\\0\\0\\0\";\n\npub const TX_COMMAND: [u8; 12] = *b\"tx\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\";\n\npub const REJECT_COMMAND: [u8; 12] = *b\"reject\\0\\0\\0\\0\\0\\0\";\n\npub const FILTERLOAD_COMMAND: [u8; 12] = *b\"filterload\\0\\0\";\n\npub const FILTERADD_COMMAND: [u8; 12] = *b\"filteradd\\0\\0\\0\";\n\npub const FILTERCLEAR_COMMAND: [u8; 12] = *b\"filterclear\\0\";\n", "file_path": "src/protocol/message/constants.rs", "rank": 96, "score": 37567.786493010455 }, { "content": "//! Reject payload types.\n\n\n\nuse crate::protocol::payload::{codec::Codec, read_n_bytes, VarStr};\n\n\n\nuse std::io::{self, Cursor, Read, Write};\n\n\n\n/// A reject message payload.\n\n#[derive(Debug, PartialEq, Clone)]\n\npub struct Reject {\n\n /// The type of message rejected.\n\n pub message: VarStr,\n\n /// The code of the reason for rejection.\n\n pub ccode: CCode,\n\n /// The reason.\n\n pub reason: VarStr,\n\n /// Optional extra data provided by some errors.\n\n /// Currently, all errors which provide this field fill it with\n\n /// the TXID or block header hash of the object being rejected,\n\n /// so the field is 32 bytes.\n\n ///\n", "file_path": "src/protocol/payload/reject.rs", "rank": 97, "score": 37567.752864413626 }, { "content": " GetData(Inv),\n\n Inv(Inv),\n\n NotFound(Inv),\n\n MemPool,\n\n Tx(Tx),\n\n Reject(Reject),\n\n FilterLoad(FilterLoad),\n\n FilterAdd(FilterAdd),\n\n FilterClear,\n\n}\n\n\n\nimpl Message {\n\n // FIXME: implement Codec?\n\n /// Encodes a message into the supplied buffer and returns its header.\n\n pub fn encode(&self, buffer: &mut Vec<u8>) -> io::Result<MessageHeader> {\n\n let header = match self {\n\n Self::Version(version) => {\n\n version.encode(buffer)?;\n\n MessageHeader::new(VERSION_COMMAND, buffer)\n\n }\n", "file_path": "src/protocol/message/mod.rs", "rank": 98, "score": 37567.55706889732 }, { "content": " timestamp,\n\n bits,\n\n nonce,\n\n solution_size,\n\n solution,\n\n })\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use hex::FromHex;\n\n\n\n use super::*;\n\n use crate::vectors::*;\n\n\n\n #[test]\n\n #[ignore]\n\n fn testnet_genesis_round_trip() {\n\n let block_bytes = &BLOCK_TESTNET_GENESIS_BYTES[..];\n", "file_path": "src/protocol/payload/block.rs", "rank": 99, "score": 37566.48765611455 } ]
Rust
crates/sentry-actix/src/lib.rs
sharingcloud/github-scbot
953ba1ae7f3bb06c37084756458a1ddb53c8fa65
#![doc(html_favicon_url = "https://sentry-brand.storage.googleapis.com/favicon.ico")] #![doc(html_logo_url = "https://sentry-brand.storage.googleapis.com/sentry-glyph-black.png")] #![warn(missing_docs)] #![allow(deprecated)] #![allow(clippy::type_complexity)] use std::{borrow::Cow, pin::Pin, sync::Arc}; use actix_web::{ dev::{Service, ServiceRequest, ServiceResponse, Transform}, Error, }; use futures_util::{ future::{ok, Future, Ready}, FutureExt, }; use sentry_core::{ protocol::{ClientSdkPackage, Event, Request}, types::Uuid, Hub, SentryFutureExt, }; #[cfg(feature = "eyre")] mod eyre; #[cfg(feature = "eyre")] pub use eyre::WrapEyre; pub struct SentryBuilder { middleware: Sentry, } impl SentryBuilder { pub fn finish(self) -> Sentry { self.middleware } pub fn with_hub(mut self, hub: Arc<Hub>) -> Self { self.middleware.hub = Some(hub); self } pub fn with_default_hub(mut self) -> Self { self.middleware.hub = None; self } pub fn emit_header(mut self, val: bool) -> Self { self.middleware.emit_header = val; self } pub fn capture_server_errors(mut self, val: bool) -> Self { self.middleware.capture_server_errors = val; self } } #[derive(Clone)] pub struct Sentry { hub: Option<Arc<Hub>>, emit_header: bool, capture_server_errors: bool, } impl Sentry { pub fn new() -> Self { Sentry { hub: None, emit_header: false, capture_server_errors: true, } } pub fn builder() -> SentryBuilder { Sentry::new().into_builder() } pub fn into_builder(self) -> SentryBuilder { SentryBuilder { middleware: self } } } impl Default for Sentry { fn default() -> Self { Sentry::new() } } impl<S, B> Transform<S> for Sentry where S: Service<Request = ServiceRequest, Response = ServiceResponse<B>, Error = Error>, S::Future: 'static, { type Error = Error; type Future = Ready<Result<Self::Transform, Self::InitError>>; type InitError = (); type Request = ServiceRequest; type Response = ServiceResponse<B>; type Transform = SentryMiddleware<S>; fn new_transform(&self, service: S) -> Self::Future { ok(SentryMiddleware { service, inner: self.clone(), }) } } pub struct SentryMiddleware<S> { service: S, inner: Sentry, } impl<S, B> Service for SentryMiddleware<S> where S: Service<Request = ServiceRequest, Response = ServiceResponse<B>, Error = Error>, S::Future: 'static, { type Error = Error; type Future = Pin<Box<dyn Future<Output = Result<Self::Response, Self::Error>>>>; type Request = ServiceRequest; type Response = ServiceResponse<B>; fn poll_ready( &mut self, cx: &mut std::task::Context<'_>, ) -> std::task::Poll<Result<(), Self::Error>> { self.service.poll_ready(cx) } fn call(&mut self, req: ServiceRequest) -> Self::Future { let inner = self.inner.clone(); let hub = Arc::new(Hub::new_from_top( inner.hub.clone().unwrap_or_else(Hub::main), )); let client = hub.client(); let track_sessions = client.as_ref().map_or(false, |client| { let options = client.options(); options.auto_session_tracking && options.session_mode == sentry_core::SessionMode::Request }); if track_sessions { hub.start_session(); } let with_pii = client .as_ref() .map_or(false, |client| client.options().send_default_pii); let (tx, sentry_req) = sentry_request_from_http(&req, with_pii); hub.configure_scope(|scope| { scope.set_transaction(tx.as_deref()); scope.add_event_processor(Box::new(move |event| { let evt = process_event(event, &sentry_req); Some(evt) })); }); let fut = self.service.call(req).bind_hub(hub.clone()); async move { let mut res: Self::Response = match fut.await { Ok(res) => res, Err(e) => { if inner.capture_server_errors { process_error(hub, &e); } return Err(e); } }; if inner.capture_server_errors && res.response().status().is_server_error() { if let Some(e) = res.response().error() { let event_id = process_error(hub, e); if inner.emit_header { res.response_mut().headers_mut().insert( "x-sentry-event".parse().unwrap(), event_id.to_simple_ref().to_string().parse().unwrap(), ); } } } Ok(res) } .boxed_local() } } #[cfg(feature = "eyre")] fn process_eyre_report(hub: Arc<Hub>, e: &actix_web::Error) -> Option<Uuid> { use sentry_eyre::EyreHubExt; e.as_error::<WrapEyre>() .map(|report| hub.capture_eyre(report)) } #[cfg(not(feature = "eyre"))] fn process_eyre_report(_hub: Arc<Hub>, _e: &actix_web::Error) -> Option<Uuid> { None } fn process_error(hub: Arc<Hub>, e: &actix_web::Error) -> Uuid { process_eyre_report(hub.clone(), e).unwrap_or_else(|| hub.capture_error(e)) } fn sentry_request_from_http(request: &ServiceRequest, with_pii: bool) -> (Option<String>, Request) { let transaction = if let Some(name) = request.match_name() { Some(String::from(name)) } else { request.match_pattern() }; let mut sentry_req = Request { url: format!( "{}://{}{}", request.connection_info().scheme(), request.connection_info().host(), request.uri() ) .parse() .ok(), method: Some(request.method().to_string()), headers: request .headers() .iter() .map(|(k, v)| (k.to_string(), v.to_str().unwrap_or_default().to_string())) .collect(), ..Default::default() }; if with_pii { if let Some(remote) = request.connection_info().remote_addr() { sentry_req.env.insert("REMOTE_ADDR".into(), remote.into()); } }; (transaction, sentry_req) } fn process_event(mut event: Event<'static>, request: &Request) -> Event<'static> { if event.request.is_none() { event.request = Some(request.clone()); } if let Some(sdk) = event.sdk.take() { let mut sdk = sdk.into_owned(); sdk.packages.push(ClientSdkPackage { name: "sentry-actix".into(), version: env!("CARGO_PKG_VERSION").into(), }); event.sdk = Some(Cow::Owned(sdk)); } event }
#![doc(html_favicon_url = "https://sentry-brand.storage.googleapis.com/favicon.ico")] #![doc(html_logo_url = "https://sentry-brand.storage.googleapis.com/sentry-glyph-black.png")] #![warn(missing_docs)] #![allow(deprecated)] #![allow(clippy::type_complexity)] use std::{borrow::Cow, pin::Pin, sync::Arc}; use actix_web::{ dev::{Service, ServiceRequest, ServiceResponse, Transform}, Error, }; use futures_util::{ future::{ok, Future, Ready}, FutureExt, }; use sentry_core::{ protocol::{ClientSdkPackage, Event, Request}, types::Uuid, Hub, SentryFutureExt, }; #[cfg(feature = "eyre")] mod eyre; #[cfg(feature = "eyre")] pub use eyre::WrapEyre; pub struct SentryBuilder { middleware: Sentry, } impl SentryBuilder { pub fn finish(self) -> Sentry { self.middleware } pub fn with_hub(mut self, hub: Arc<Hub>) -> Self { self.middleware.hub = Some(hub); self } pub fn with_default_hub(mut self) -> Self { self.middleware.hub = None; self } pub fn emit_header(mut self, val: bool) -> Self { self.middleware.emit_header = val; self } pub fn capture_server_errors(mut self, val: bool) -> Self { self.middleware.capture_server_errors = val; self } } #[derive(Clone)] pub struct Sentry { hub: Option<Arc<Hub>>, emit_header: bool, capture_server_errors: bool, } impl Sentry { pub fn ne
pub fn builder() -> SentryBuilder { Sentry::new().into_builder() } pub fn into_builder(self) -> SentryBuilder { SentryBuilder { middleware: self } } } impl Default for Sentry { fn default() -> Self { Sentry::new() } } impl<S, B> Transform<S> for Sentry where S: Service<Request = ServiceRequest, Response = ServiceResponse<B>, Error = Error>, S::Future: 'static, { type Error = Error; type Future = Ready<Result<Self::Transform, Self::InitError>>; type InitError = (); type Request = ServiceRequest; type Response = ServiceResponse<B>; type Transform = SentryMiddleware<S>; fn new_transform(&self, service: S) -> Self::Future { ok(SentryMiddleware { service, inner: self.clone(), }) } } pub struct SentryMiddleware<S> { service: S, inner: Sentry, } impl<S, B> Service for SentryMiddleware<S> where S: Service<Request = ServiceRequest, Response = ServiceResponse<B>, Error = Error>, S::Future: 'static, { type Error = Error; type Future = Pin<Box<dyn Future<Output = Result<Self::Response, Self::Error>>>>; type Request = ServiceRequest; type Response = ServiceResponse<B>; fn poll_ready( &mut self, cx: &mut std::task::Context<'_>, ) -> std::task::Poll<Result<(), Self::Error>> { self.service.poll_ready(cx) } fn call(&mut self, req: ServiceRequest) -> Self::Future { let inner = self.inner.clone(); let hub = Arc::new(Hub::new_from_top( inner.hub.clone().unwrap_or_else(Hub::main), )); let client = hub.client(); let track_sessions = client.as_ref().map_or(false, |client| { let options = client.options(); options.auto_session_tracking && options.session_mode == sentry_core::SessionMode::Request }); if track_sessions { hub.start_session(); } let with_pii = client .as_ref() .map_or(false, |client| client.options().send_default_pii); let (tx, sentry_req) = sentry_request_from_http(&req, with_pii); hub.configure_scope(|scope| { scope.set_transaction(tx.as_deref()); scope.add_event_processor(Box::new(move |event| { let evt = process_event(event, &sentry_req); Some(evt) })); }); let fut = self.service.call(req).bind_hub(hub.clone()); async move { let mut res: Self::Response = match fut.await { Ok(res) => res, Err(e) => { if inner.capture_server_errors { process_error(hub, &e); } return Err(e); } }; if inner.capture_server_errors && res.response().status().is_server_error() { if let Some(e) = res.response().error() { let event_id = process_error(hub, e); if inner.emit_header { res.response_mut().headers_mut().insert( "x-sentry-event".parse().unwrap(), event_id.to_simple_ref().to_string().parse().unwrap(), ); } } } Ok(res) } .boxed_local() } } #[cfg(feature = "eyre")] fn process_eyre_report(hub: Arc<Hub>, e: &actix_web::Error) -> Option<Uuid> { use sentry_eyre::EyreHubExt; e.as_error::<WrapEyre>() .map(|report| hub.capture_eyre(report)) } #[cfg(not(feature = "eyre"))] fn process_eyre_report(_hub: Arc<Hub>, _e: &actix_web::Error) -> Option<Uuid> { None } fn process_error(hub: Arc<Hub>, e: &actix_web::Error) -> Uuid { process_eyre_report(hub.clone(), e).unwrap_or_else(|| hub.capture_error(e)) } fn sentry_request_from_http(request: &ServiceRequest, with_pii: bool) -> (Option<String>, Request) { let transaction = if let Some(name) = request.match_name() { Some(String::from(name)) } else { request.match_pattern() }; let mut sentry_req = Request { url: format!( "{}://{}{}", request.connection_info().scheme(), request.connection_info().host(), request.uri() ) .parse() .ok(), method: Some(request.method().to_string()), headers: request .headers() .iter() .map(|(k, v)| (k.to_string(), v.to_str().unwrap_or_default().to_string())) .collect(), ..Default::default() }; if with_pii { if let Some(remote) = request.connection_info().remote_addr() { sentry_req.env.insert("REMOTE_ADDR".into(), remote.into()); } }; (transaction, sentry_req) } fn process_event(mut event: Event<'static>, request: &Request) -> Event<'static> { if event.request.is_none() { event.request = Some(request.clone()); } if let Some(sdk) = event.sdk.take() { let mut sdk = sdk.into_owned(); sdk.packages.push(ClientSdkPackage { name: "sentry-actix".into(), version: env!("CARGO_PKG_VERSION").into(), }); event.sdk = Some(Cow::Owned(sdk)); } event }
w() -> Self { Sentry { hub: None, emit_header: false, capture_server_errors: true, } }
function_block-function_prefixed
[ { "content": "/// Captures an [`eyre::Report`].\n\n///\n\n/// This will capture an eyre report as a sentry event if a\n\n/// [`sentry::Client`](../../struct.Client.html) is initialised, otherwise it will be a\n\n/// no-op. The event is dispatched to the thread-local hub, with semantics as described in\n\n/// [`Hub::current`].\n\n///\n\n/// See [module level documentation](index.html) for more information.\n\n///\n\n/// [`eyre::Report`]: https://docs.rs/eyre/*/eyre/struct.Report.html\n\npub fn capture_eyre(e: &eyre::Report) -> Uuid {\n\n Hub::with_active(|hub| hub.capture_eyre(e))\n\n}\n\n\n", "file_path": "crates/sentry-eyre/src/lib.rs", "rank": 5, "score": 188798.81999454467 }, { "content": "/// Hub extension methods for working with [`eyre`].\n\n///\n\n/// [`eyre`]: https://docs.rs/eyre\n\npub trait EyreHubExt {\n\n /// Captures an [`eyre::Report`] on a specific hub.\n\n ///\n\n /// [`eyre::Report`]: https://docs.rs/eyre/*/eyre/struct.Report.html\n\n fn capture_eyre(&self, e: &eyre::Report) -> Uuid;\n\n}\n\n\n\nimpl EyreHubExt for Hub {\n\n fn capture_eyre(&self, e: &eyre::Report) -> Uuid {\n\n let err: &dyn std::error::Error = e.as_ref();\n\n let mut evt = sentry_core::event_from_error(err);\n\n\n\n // Add traceback\n\n if let Some(bt) = e.backtrace() {\n\n if let Some(mut st) = backtrace_to_stacktrace(bt) {\n\n if let Some(client) = self.client() {\n\n process_event_stacktrace(&mut st, client.options());\n\n }\n\n\n\n if let Some(mut exc) = evt.exception.last_mut() {\n\n exc.stacktrace = Some(st);\n\n }\n\n }\n\n }\n\n\n\n self.capture_event(evt)\n\n }\n\n}\n", "file_path": "crates/sentry-eyre/src/lib.rs", "rank": 6, "score": 178889.60155395253 }, { "content": "fn extract_event_from_request(req: &HttpRequest) -> Option<EventType> {\n\n req.headers()\n\n .get(GITHUB_EVENT_HEADER)\n\n .and_then(|x| x.to_str().ok())\n\n .and_then(|x| EventType::try_from(x).ok())\n\n}\n\n\n\npub(crate) async fn event_handler(\n\n req: HttpRequest,\n\n mut payload: web::Payload,\n\n ctx: web::Data<Arc<AppContext>>,\n\n) -> ActixResult<HttpResponse> {\n\n // Route event depending on header\n\n if let Some(event_type) = extract_event_from_request(&req) {\n\n if let Ok(body) = convert_payload_to_string(&mut payload).await {\n\n sentry::configure_scope(|scope| {\n\n scope.set_extra(\"Event type\", event_type.to_str().into());\n\n scope.set_extra(\"Payload\", body.clone().into());\n\n });\n\n\n", "file_path": "crates/github_scbot_server/src/webhook/mod.rs", "rank": 7, "score": 158544.92817964757 }, { "content": "/// Initialize command line.\n\npub fn initialize_command_line() -> eyre::Result<()> {\n\n // Prepare startup\n\n let config = configure_startup()?;\n\n\n\n async fn sync(config: Config, cmd: SubCommand, no_input: bool) -> eyre::Result<()> {\n\n let pool = establish_pool_connection(&config)?;\n\n run_migrations(&pool)?;\n\n\n\n let db_adapter = DatabaseAdapter::new(pool);\n\n let api_adapter = GithubAPIAdapter::new(config.clone());\n\n let redis_adapter = RedisAdapter::new(&config.redis_address);\n\n let ctx = CommandContext {\n\n config,\n\n db_adapter: Box::new(db_adapter),\n\n api_adapter: Box::new(api_adapter),\n\n redis_adapter: Box::new(redis_adapter),\n\n no_input,\n\n };\n\n\n\n cmd.execute(ctx).await\n", "file_path": "crates/github_scbot_cli/src/lib.rs", "rank": 8, "score": 140358.03066422578 }, { "content": "/// Check if a signature is valid.\n\npub fn is_valid_signature<'a>(signature: &str, body: &'a [u8], secret: &str) -> bool {\n\n let digest = Sha256::new();\n\n let mut hmac = Hmac::new(digest, secret.as_bytes());\n\n hmac.input(body);\n\n let expected_signature = hmac.result();\n\n\n\n crypto::util::fixed_time_eq(\n\n hex::encode(expected_signature.code()).as_bytes(),\n\n signature.as_bytes(),\n\n )\n\n}\n\n\n\n/// Convert Actix payload to bytes.\n\npub async fn convert_payload_to_bytes(payload: &mut Payload) -> Result<Bytes, Box<dyn Error>> {\n\n let mut body = BytesMut::new();\n\n\n\n while let Some(chunk) = payload.next().await {\n\n body.extend_from_slice(&chunk?);\n\n }\n\n\n", "file_path": "crates/github_scbot_server/src/utils.rs", "rank": 9, "score": 132412.92282215843 }, { "content": "fn env_to_bool(name: &str, default: bool) -> bool {\n\n env::var(name).map(|e| !e.is_empty()).unwrap_or(default)\n\n}\n\n\n", "file_path": "crates/github_scbot_conf/src/config.rs", "rank": 10, "score": 128013.64375506013 }, { "content": "/// Configure webhook handlers.\n\npub fn configure_webhook_handlers(cfg: &mut web::ServiceConfig) {\n\n cfg.service(web::resource(\"\").route(web::post().to(event_handler)));\n\n}\n", "file_path": "crates/github_scbot_server/src/webhook/mod.rs", "rank": 11, "score": 124548.21416063313 }, { "content": "/// Handle `Help` command.\n\npub fn handle_help_command(\n\n config: &Config,\n\n comment_author: &str,\n\n) -> Result<CommandExecutionResult> {\n\n let comment = format!(\n\n \"Hello **{}** ! I am a GitHub helper bot ! :robot:\\n\\\n\n You can ping me with a command in the format: `{} <command> (<arguments>)`\\n\\\n\n \\n\\\n\n Supported commands:\\n\\\n\n - `noqa+`: _Skip QA validation_\\n\\\n\n - `noqa-`: _Enable QA validation_\\n\\\n\n - `qa+`: _Mark QA as passed_\\n\\\n\n - `qa-`: _Mark QA as failed_\\n\\\n\n - `qa?`: _Mark QA as waiting_\\n\\\n\n - `nochecks+`: _Skip checks validation_\\n\\\n\n - `nochecks-`: _Enable checks validation_\\n\\\n\n - `automerge+`: _Enable auto-merge for this PR (once all checks pass)_\\n\\\n\n - `automerge-`: _Disable auto-merge for this PR_\\n\\\n\n - `lock+ <reason?>`: _Lock a pull-request (block merge)_\\n\\\n\n - `lock- <reason?>`: _Unlock a pull-request (unblock merge)_\\n\\\n", "file_path": "crates/github_scbot_logic/src/commands/handlers.rs", "rank": 12, "score": 116851.2932836271 }, { "content": "/// Handle `AdminHelp` command.\n\npub fn handle_admin_help_command(\n\n config: &Config,\n\n comment_author: &str,\n\n) -> Result<CommandExecutionResult> {\n\n let comment = format!(\n\n \"Hello **{}** ! I am a GitHub helper bot ! :robot:\\n\\\n\n You can ping me with a command in the format: `{} <command> (<arguments>)`\\n\\\n\n \\n\\\n\n Supported admin commands:\\n\\\n\n - `admin-help`: _Show this comment_\\n\\\n\n - `admin-enable`: _Enable me on a pull request with manual interaction_\\n\\\n\n - `admin-disable`: _Disable me on a pull request with manual interaction_\\n\\\n\n - `admin-set-default-needed-reviewers <count>`: _Set default needed reviewers count for this repository_\\n\\\n\n - `admin-set-default-merge-strategy <merge|squash|rebase>`: _Set default merge strategy for this repository_\\n\\\n\n - `admin-set-default-pr-title-regex <regex?>`: _Set default PR title validation regex for this repository_\\n\\\n\n - `admin-set-default-automerge+`: _Set automerge enabled for this repository_\\n\\\n\n - `admin-set-default-automerge-`: _Set automerge disabled for this repository_\\n\\\n\n - `admin-set-default-qa-status+`: _Enable QA validation by default for this repository_\\n\\\n\n - `admin-set-default-qa-status-`: _Disable QA validation by default for this repository_\\n\\\n\n - `admin-set-default-checks-status+`: _Enable checks validation by default for this repository_\\n\\\n", "file_path": "crates/github_scbot_logic/src/commands/handlers.rs", "rank": 13, "score": 114978.69004902066 }, { "content": "#[test]\n\nfn test_pull_request_opened_event_parsing() -> ServerResult<()> {\n\n assert_eq!(\n\n parse_pull_request_event(fixtures::PULL_REQUEST_OPENED_DATA)?,\n\n GhPullRequestEvent {\n\n action: GhPullRequestAction::Opened,\n\n number: 1214,\n\n pull_request: GhPullRequest {\n\n number: 1214,\n\n state: GhPullRequestState::Open,\n\n locked: false,\n\n title: \"This is a PR\".to_string(),\n\n user: GhUser {\n\n login: \"me\".to_string()\n\n },\n\n body: Some(\"Ceci est\\nle corps de la \\nPR\".to_string()),\n\n created_at: chrono::DateTime::parse_from_rfc3339(\"2020-11-13T17:34:23Z\")\n\n .expect(\"bad date\")\n\n .with_timezone(&chrono::Utc),\n\n updated_at: chrono::DateTime::parse_from_rfc3339(\"2020-11-13T17:34:23Z\")\n\n .expect(\"bad date\")\n", "file_path": "crates/github_scbot_server/src/webhook/tests/webhook.rs", "rank": 14, "score": 111106.6025866188 }, { "content": "#[test]\n\nfn test_pull_request_labeled_event_parsing() -> ServerResult<()> {\n\n assert_eq!(\n\n parse_pull_request_event(fixtures::PULL_REQUEST_LABELED_DATA)?,\n\n GhPullRequestEvent {\n\n action: GhPullRequestAction::Labeled,\n\n number: 1214,\n\n pull_request: GhPullRequest {\n\n number: 1214,\n\n state: GhPullRequestState::Open,\n\n locked: false,\n\n title: \"This is a PR\".to_string(),\n\n user: GhUser {\n\n login: \"me\".to_string()\n\n },\n\n body: Some(\"This is a PR body\".to_string()),\n\n created_at: chrono::DateTime::parse_from_rfc3339(\"2020-11-13T17:34:23Z\")\n\n .expect(\"bad date\")\n\n .with_timezone(&chrono::Utc),\n\n updated_at: chrono::DateTime::parse_from_rfc3339(\"2020-11-13T17:39:42Z\")\n\n .expect(\"bad date\")\n", "file_path": "crates/github_scbot_server/src/webhook/tests/webhook.rs", "rank": 15, "score": 111106.6025866188 }, { "content": "/// Configure application startup.\n\npub fn configure_startup() -> Result<Config> {\n\n dotenv::dotenv().ok();\n\n github_scbot_sentry::eyre::install().ok();\n\n\n\n let config = Config::from_env();\n\n self::logging::configure_logging(&config);\n\n\n\n self::validation::validate_configuration(&config)?;\n\n Ok(config)\n\n}\n", "file_path": "crates/github_scbot_conf/src/lib.rs", "rank": 16, "score": 109931.22308158173 }, { "content": "pub fn configure_logging(config: &Config) {\n\n LogTracer::init().expect(\"Unable to setup log tracer.\");\n\n\n\n let log_config = std::env::var(\"RUST_LOG\").unwrap_or_else(|_| DEFAULT_ENV_CONFIG.to_string());\n\n\n\n if config.logging_use_bunyan {\n\n let app_name = concat!(env!(\"CARGO_PKG_NAME\"), \"-\", env!(\"CARGO_PKG_VERSION\")).to_string();\n\n let layer = BunyanFormattingLayer::new(app_name, std::io::stdout);\n\n let subscriber = Registry::default()\n\n .with(EnvFilter::from_str(&log_config).expect(\"Bad log configuration\"))\n\n .with(JsonStorageLayer)\n\n .with(layer);\n\n tracing::subscriber::set_global_default(subscriber).unwrap();\n\n } else {\n\n let subscriber = tracing_subscriber::fmt().finish();\n\n tracing::subscriber::set_global_default(subscriber).unwrap();\n\n }\n\n}\n", "file_path": "crates/github_scbot_conf/src/logging.rs", "rank": 17, "score": 109931.22308158173 }, { "content": "fn parse_event_type<'de, T>(event_type: EventType, body: &'de str) -> Result<T>\n\nwhere\n\n T: Deserialize<'de>,\n\n{\n\n serde_json::from_str(body).map_err(|e| ServerError::EventParseError(event_type, e))\n\n}\n\n\n", "file_path": "crates/github_scbot_server/src/webhook/mod.rs", "rank": 18, "score": 109889.50999978586 }, { "content": "/// Get uninitialized client.\n\npub fn get_uninitialized_client() -> Result<Octocrab> {\n\n Octocrab::builder().build().map_err(ApiError::from)\n\n}\n\n\n\nasync fn get_authentication_credentials(\n\n config: &Config,\n\n api_adapter: &dyn IAPIAdapter,\n\n) -> Result<String> {\n\n if config.github_api_token.is_empty() {\n\n create_installation_access_token(config, api_adapter).await\n\n } else {\n\n Ok(config.github_api_token.clone())\n\n }\n\n}\n\n\n", "file_path": "crates/github_scbot_ghapi/src/auth.rs", "rank": 19, "score": 108058.61984697527 }, { "content": "/// Validate configuration.\n\npub fn validate_configuration(config: &Config) -> Result<()> {\n\n validate_env_vars(config)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n // RSA key specifically generated for these tests.\n\n const SAMPLE_RSA_KEY: &str = r\"\n\n-----BEGIN RSA PUBLIC KEY-----\n\nMIIBigKCAYEAzEWMCHfwGGXxwFDRtHn43opUTW/qMXUoLH7KLpO0meL9jv/TNnI5\n\ntotrx/AbnqpKI50TNpYKfw08C9/WC3SZMuyudBOSShXmDjq1yVOM7p9+gjjw5O78\n\n60WqyiUbxOHOIz4CfgoEr23h9I916SCGzqEVTCHvlDE5qQcdNoHeYdohWUTMGxKs\n\niRMbbHsNvD56zJ8U4AOjOb4J2410ZMx+VQGXeFtZvWYL2EFq1ZiGoo1ZIUZPRImO\n\naxGG0RhzwQdaiktCP7ENjwpr5MBsKlwXFOEb6LdeaCAOqOd05qf4yphzBbLiLK7Y\n\nCZbQ5S3QVQMrn0ycdtFlWt0kAVps9WdB+8izDehuN+pozTm+mjehFsEEj4REGyHu\n\nH3iwEyuGr90vKWEht1Wfvt9C4guBhoLQlSwzgTqNgbHDXiasITmMUwzsgxyASxop\n\n7ih/0aNRO/HfV7rQgFwMrCfPijZJkQHyougprERZJD6U9pPvAIow3G535LpT7mwC\n\n2zEcABBQBwtxAgMBAAE=\n", "file_path": "crates/github_scbot_conf/src/validation.rs", "rank": 20, "score": 105262.5368495726 }, { "content": "/// Run migrations.\n\npub fn run_migrations(pool: &DbPool) -> Result<()> {\n\n embedded_migrations::run(&*pool.get()?)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/github_scbot_database/src/lib.rs", "rank": 21, "score": 103492.93327441733 }, { "content": "#[proc_macro_derive(SCGetter, attributes(get, get_ref, get_as, get_try_from))]\n\npub fn add_scgetter(input: TokenStream) -> TokenStream {\n\n let ast: DeriveInput = syn::parse(input).unwrap();\n\n\n\n let name = &ast.ident;\n\n let generics = &ast.generics;\n\n let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();\n\n\n\n if let Data::Struct(DataStruct { ref fields, .. }) = ast.data {\n\n let generated = fields\n\n .iter()\n\n .filter_map(|field| {\n\n if has_tag(field.attrs.iter(), \"get_ref\") {\n\n let field_name = field.clone().ident.unwrap();\n\n let ty = field.ty.clone();\n\n let fn_name = Ident::new(&format!(\"{}\", field_name), Span::call_site());\n\n let doc = field.attrs.iter().filter(|v| {\n\n v.parse_meta()\n\n .map(|meta| meta.path().is_ident(\"doc\"))\n\n .unwrap_or(false)\n\n });\n", "file_path": "crates/github_scbot_database_macros/src/lib.rs", "rank": 22, "score": 100230.50527767983 }, { "content": "pub fn configure_debug_handlers(cfg: &mut web::ServiceConfig) {\n\n cfg.service(web::resource(\"panic\").route(web::get().to(panic_route)));\n\n cfg.service(web::resource(\"error\").route(web::get().to(error_route)));\n\n cfg.service(web::resource(\"error-nest\").route(web::get().to(error_route_nest)));\n\n}\n\n\n\nasync fn error_route() -> ActixResult<HttpResponse> {\n\n will_error().await.map_err(WrapEyre::to_http_error)?;\n\n\n\n Ok(HttpResponse::Ok().json(serde_json::json!({\"message\": \"ok\"})))\n\n}\n\n\n\nasync fn error_route_nest() -> ActixResult<HttpResponse> {\n\n will_error_nest().await.map_err(WrapEyre::to_http_error)?;\n\n\n\n Ok(HttpResponse::Ok().json(serde_json::json!({\"message\": \"ok\"})))\n\n}\n\n\n\nasync fn panic_route() -> ActixResult<HttpResponse> {\n\n panic!(\"Oh noes, a panic.\")\n", "file_path": "crates/github_scbot_server/src/debug.rs", "rank": 23, "score": 97770.61488045276 }, { "content": "/// Establish a connection to a database pool.\n\npub fn establish_pool_connection(config: &Config) -> Result<DbPool> {\n\n ConnectionBuilder::configure(config).build_pool()\n\n}\n\n\n", "file_path": "crates/github_scbot_database/src/lib.rs", "rank": 24, "score": 97770.61488045276 }, { "content": "#[derive(Debug, Deserialize, Serialize)]\n\nstruct ImportExportModel {\n\n repositories: Vec<RepositoryModel>,\n\n pull_requests: Vec<PullRequestModel>,\n\n reviews: Vec<ReviewModel>,\n\n merge_rules: Vec<MergeRuleModel>,\n\n accounts: Vec<AccountModel>,\n\n external_accounts: Vec<ExternalAccountModel>,\n\n external_account_rights: Vec<ExternalAccountRightModel>,\n\n}\n\n\n\n/// Export database models to JSON.\n\npub async fn export_models_to_json<W>(\n\n db_adapter: &dyn IDatabaseAdapter,\n\n writer: &mut W,\n\n) -> Result<()>\n\nwhere\n\n W: Write,\n\n{\n\n let model = ImportExportModel {\n\n repositories: db_adapter.repository().list().await?,\n", "file_path": "crates/github_scbot_database/src/import_export/mod.rs", "rank": 25, "score": 95676.28394237574 }, { "content": "/// Handle `Ping` command.\n\npub fn handle_ping_command(comment_author: &str) -> Result<CommandExecutionResult> {\n\n let comment = format!(\"**{}** pong!\", comment_author);\n\n Ok(CommandExecutionResult::builder()\n\n .with_action(ResultAction::AddReaction(GhReactionType::Eyes))\n\n .with_action(ResultAction::PostComment(comment))\n\n .build())\n\n}\n\n\n\n/// Handle `Gif` command.\n\npub async fn handle_gif_command(\n\n config: &Config,\n\n api_adapter: &dyn IAPIAdapter,\n\n search_terms: &str,\n\n) -> Result<CommandExecutionResult> {\n\n Ok(CommandExecutionResult::builder()\n\n .with_action(ResultAction::AddReaction(GhReactionType::Eyes))\n\n .with_action(ResultAction::PostComment(\n\n GifPoster::generate_random_gif_comment(config, api_adapter, search_terms).await?,\n\n ))\n\n .build())\n", "file_path": "crates/github_scbot_logic/src/commands/handlers.rs", "rank": 26, "score": 93468.54475407349 }, { "content": "#[async_trait]\n\npub trait IPullRequestDbAdapter {\n\n /// Creates a pull request.\n\n async fn create(&self, entry: PullRequestCreation) -> Result<PullRequestModel>;\n\n /// Fetch status comment ID from a pull request ID.\n\n async fn fetch_status_comment_id(&self, pull_request_id: i32) -> Result<i32>;\n\n /// Lists available pull requests.\n\n async fn list(&self) -> Result<Vec<PullRequestModel>>;\n\n /// Lists available pull requests from a repository path.\n\n async fn list_from_repository_path(&self, path: &str) -> Result<Vec<PullRequestModel>>;\n\n /// Gets an existing pull request from a repository and a pull request number.\n\n async fn get_from_repository_and_number(\n\n &self,\n\n repository: &RepositoryModel,\n\n number: u64,\n\n ) -> Result<PullRequestModel>;\n\n /// Gets an existing pull request from a repository path and a pull request number.\n\n async fn get_from_repository_path_and_number(\n\n &self,\n\n path: &str,\n\n number: u64,\n", "file_path": "crates/github_scbot_database/src/models/pulls/adapter.rs", "rank": 27, "score": 89039.03159301495 }, { "content": "#[test]\n\nfn test_ping_event_parsing() -> ServerResult<()> {\n\n assert_eq!(\n\n parse_ping_event(fixtures::PING_EVENT_DATA)?,\n\n GhPingEvent {\n\n zen: \"Favor focus over features.\".to_string(),\n\n hook_id: 12_345_678,\n\n repository: Some(GhRepository {\n\n name: \"test-repo\".to_string(),\n\n full_name: \"Example/test-repo\".to_string(),\n\n owner: GhUser {\n\n login: \"Example\".to_string()\n\n }\n\n }),\n\n sender: Some(GhUser {\n\n login: \"Example\".to_string()\n\n })\n\n }\n\n );\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/github_scbot_server/src/webhook/tests/webhook.rs", "rank": 28, "score": 85229.57420057058 }, { "content": "#[test]\n\nfn test_review_submitted_event_parsing() -> ServerResult<()> {\n\n assert_eq!(\n\n parse_review_event(fixtures::PULL_REQUEST_REVIEW_SUBMITTED_DATA)?,\n\n GhReviewEvent {\n\n action: GhReviewAction::Submitted,\n\n review: GhReview {\n\n user: GhUser {\n\n login: \"me\".to_string()\n\n },\n\n submitted_at: chrono::DateTime::parse_from_rfc3339(\"2020-11-13T17:25:46Z\")\n\n .expect(\"bad date\")\n\n .with_timezone(&chrono::Utc),\n\n state: GhReviewState::ChangesRequested\n\n },\n\n pull_request: GhPullRequest {\n\n number: 1206,\n\n state: GhPullRequestState::Open,\n\n locked: false,\n\n title: \"This is a PR\".to_string(),\n\n user: GhUser {\n", "file_path": "crates/github_scbot_server/src/webhook/tests/webhook.rs", "rank": 29, "score": 83641.35355766954 }, { "content": "#[test]\n\nfn test_issue_comment_created_event_parsing() -> ServerResult<()> {\n\n assert_eq!(\n\n parse_issue_comment_event(fixtures::ISSUE_COMMENT_CREATED_DATA)?,\n\n GhIssueCommentEvent {\n\n action: GhIssueCommentAction::Created,\n\n changes: None,\n\n issue: GhIssue {\n\n number: 1,\n\n title: \"Add the webhook module\".to_string(),\n\n user: GhUser {\n\n login: \"me\".to_string()\n\n },\n\n labels: vec![],\n\n state: GhIssueState::Open,\n\n created_at: chrono::DateTime::parse_from_rfc3339(\"2020-11-15T15:49:48Z\")\n\n .expect(\"bad date\")\n\n .with_timezone(&chrono::Utc),\n\n updated_at: chrono::DateTime::parse_from_rfc3339(\"2020-11-15T16:13:15Z\")\n\n .expect(\"bad date\")\n\n .with_timezone(&chrono::Utc),\n", "file_path": "crates/github_scbot_server/src/webhook/tests/webhook.rs", "rank": 30, "score": 82133.83241925482 }, { "content": "#[test]\n\nfn test_check_suite_completed_event_parsing() -> ServerResult<()> {\n\n assert_eq!(\n\n parse_check_suite_event(fixtures::CHECK_SUITE_COMPLETED_DATA)?,\n\n GhCheckSuiteEvent {\n\n action: GhCheckSuiteAction::Completed,\n\n check_suite: GhCheckSuite {\n\n id: 12_345_678,\n\n head_branch: \"head-branch\".to_string(),\n\n head_sha: \"12345678123456781234567812345678\".to_string(),\n\n status: GhCheckStatus::Completed,\n\n conclusion: Some(GhCheckConclusion::Failure),\n\n pull_requests: vec![GhPullRequestShort {\n\n number: 1214,\n\n head: GhBranchShort {\n\n reference: \"head-branch\".to_string(),\n\n sha: \"12345678123456781234567812345678\".to_string(),\n\n },\n\n base: GhBranchShort {\n\n reference: \"stable\".to_string(),\n\n sha: \"12345678123456781234567812345678\".to_string(),\n", "file_path": "crates/github_scbot_server/src/webhook/tests/webhook.rs", "rank": 31, "score": 82133.83241925482 }, { "content": "fn has_tag<'a, T: Iterator<Item = &'a Attribute>>(mut attribs: T, tag_name: &str) -> bool {\n\n attribs\n\n .find_map(|v| {\n\n let meta = v.parse_meta().expect(\"failed to parse attr meta data\");\n\n if meta.path().is_ident(tag_name) {\n\n Some(meta)\n\n } else {\n\n None\n\n }\n\n })\n\n .is_some()\n\n}\n\n\n", "file_path": "crates/github_scbot_database_macros/src/lib.rs", "rank": 32, "score": 81425.22281210872 }, { "content": "//! Adds support for capturing Sentry errors from [`eyre::Result`].\n\n//!\n\n//! This integration adds a new event *source*, which allows you to create events directly\n\n//! from an [`eyre::Result`] struct. As it is only an event source it does not need to be enabled in the call to\n\n//! [`sentry::init`](https://docs.rs/sentry/*/sentry/fn.init.html).\n\n//!\n\n//! This integration does not need to be installed, instead it provides an extra function to\n\n//! capture [`eyre::Result`], optionally exposing it as a method on the\n\n//! [`sentry::Hub`](https://docs.rs/sentry/*/sentry/struct.Hub.html) using the\n\n//! [`EyreHubExt`] trait.\n\n//!\n\n//! Like a plain [`std::error::Error`] being captured, [`eyre::Result`] is captured with a\n\n//! chain of all error sources, if present. See\n\n//! [`sentry::capture_error`](https://docs.rs/sentry/*/sentry/fn.capture_error.html) for\n\n//! details of this.\n\n//!\n\n//! # Example\n\n//!\n\n//! ```ignore\n\n//! use sentry_eyre::capture_eyre;\n", "file_path": "crates/sentry-eyre/src/lib.rs", "rank": 33, "score": 80437.99610026654 }, { "content": "//!\n\n//! fn function_that_might_fail() -> eyre::Result<()> {\n\n//! Err(eyre::eyre!(\"some kind of error\"))\n\n//! }\n\n//!\n\n//! if let Err(err) = function_that_might_fail() {\n\n//! capture_eyre(&err);\n\n//! }\n\n//! ```\n\n//!\n\n//! [`eyre::Error`]: https://docs.rs/eyre/*/eyre/struct.Report.html\n\n\n\n#![doc(html_favicon_url = \"https://sentry-brand.storage.googleapis.com/favicon.ico\")]\n\n#![doc(html_logo_url = \"https://sentry-brand.storage.googleapis.com/sentry-glyph-black.png\")]\n\n#![warn(missing_docs)]\n\n#![deny(unsafe_code)]\n\n\n\nuse sentry_backtrace::{backtrace_to_stacktrace, process_event_stacktrace};\n\nuse sentry_core::{types::Uuid, Hub};\n\npub use stable_eyre;\n", "file_path": "crates/sentry-eyre/src/lib.rs", "rank": 34, "score": 80436.3464291203 }, { "content": "//! Eyre report wrapper.\n\n\n\nuse std::fmt;\n\n\n\nuse actix_web::{\n\n dev::HttpResponseBuilder,\n\n http::{header, StatusCode},\n\n Error, HttpResponse, ResponseError,\n\n};\n\nuse stable_eyre::eyre;\n\n\n\n/// Eyre Report wrapper.\n\npub struct WrapEyre {\n\n report: eyre::Report,\n\n status_code: StatusCode,\n\n}\n\n\n\nimpl WrapEyre {\n\n /// Create eyre wrapper.\n\n pub fn new(report: eyre::Report, status_code: StatusCode) -> Self {\n", "file_path": "crates/sentry-actix/src/eyre.rs", "rank": 35, "score": 80435.03000099969 }, { "content": "use stable_eyre::{eyre, BacktraceExt};\n\n\n\n/// Captures an [`eyre::Report`].\n\n///\n\n/// This will capture an eyre report as a sentry event if a\n\n/// [`sentry::Client`](../../struct.Client.html) is initialised, otherwise it will be a\n\n/// no-op. The event is dispatched to the thread-local hub, with semantics as described in\n\n/// [`Hub::current`].\n\n///\n\n/// See [module level documentation](index.html) for more information.\n\n///\n\n/// [`eyre::Report`]: https://docs.rs/eyre/*/eyre/struct.Report.html\n", "file_path": "crates/sentry-eyre/src/lib.rs", "rank": 36, "score": 80433.10685752532 }, { "content": " Self {\n\n report,\n\n status_code,\n\n }\n\n }\n\n\n\n /// Convert any error\n\n pub fn to_http_error<E: Into<WrapEyre>>(e: E) -> Error {\n\n e.into().into()\n\n }\n\n}\n\n\n\nimpl fmt::Display for WrapEyre {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n fmt::Display::fmt(&self.report, f)\n\n }\n\n}\n\n\n\nimpl fmt::Debug for WrapEyre {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n", "file_path": "crates/sentry-actix/src/eyre.rs", "rank": 37, "score": 80432.19005653048 }, { "content": " fmt::Debug::fmt(&self.report, f)\n\n }\n\n}\n\n\n\nimpl std::ops::Deref for WrapEyre {\n\n type Target = eyre::Report;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.report\n\n }\n\n}\n\n\n\nimpl ResponseError for WrapEyre {\n\n fn status_code(&self) -> StatusCode {\n\n self.status_code\n\n }\n\n\n\n fn error_response(&self) -> HttpResponse {\n\n HttpResponseBuilder::new(self.status_code())\n\n .set_header(header::CONTENT_TYPE, \"application/json; charset=utf-8\")\n\n .body(serde_json::json!({\n\n \"error\": self.report.to_string()\n\n }))\n\n }\n\n}\n", "file_path": "crates/sentry-actix/src/eyre.rs", "rank": 38, "score": 80428.00596701782 }, { "content": "fn validate_api_credentials(config: &Config) -> Result<(), ApiConfigError> {\n\n // Check token first\n\n if config.github_api_token.is_empty() {\n\n match validate_github_app_config(config) {\n\n // If private key is missing, you might want to use token instead.\n\n Err(ApiConfigError::MissingPrivateKey) => Err(ApiConfigError::MissingToken),\n\n res => res,\n\n }\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "crates/github_scbot_conf/src/validation.rs", "rank": 39, "score": 78965.30815460505 }, { "content": "fn validate_github_app_config(config: &Config) -> Result<(), ApiConfigError> {\n\n // Check Private key\n\n if config.github_app_private_key.is_empty() {\n\n Err(ApiConfigError::MissingPrivateKey)\n\n } else {\n\n match JwtUtils::parse_encoding_key(&config.github_app_private_key) {\n\n Err(_) => Err(ApiConfigError::InvalidPrivateKey),\n\n Ok(_) => {\n\n // Check App ID\n\n if config.github_app_id == 0 {\n\n Err(ApiConfigError::MissingAppId)\n\n } else if config.github_app_installation_id == 0 {\n\n Err(ApiConfigError::MissingInstallationId)\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/github_scbot_conf/src/validation.rs", "rank": 40, "score": 77458.10319694315 }, { "content": "use argh::FromArgs;\n\nuse async_trait::async_trait;\n\nuse github_scbot_sentry::eyre::Result;\n\n\n\nuse super::{Command, CommandContext};\n\n\n\nmod list;\n\nmod set_merge_strategy;\n\nmod show;\n\nmod sync;\n\n\n\nuse self::{\n\n list::PullRequestListCommand, set_merge_strategy::PullRequestSetMergeStrategyCommand,\n\n show::PullRequestShowCommand, sync::PullRequestSyncCommand,\n\n};\n\n\n\n/// manage pull requests.\n\n#[derive(FromArgs)]\n\n#[argh(subcommand, name = \"pull-requests\")]\n\npub(crate) struct PullRequestCommand {\n", "file_path": "crates/github_scbot_cli/src/commands/pull_request/mod.rs", "rank": 41, "score": 69662.65261847836 }, { "content": "#[async_trait(?Send)]\n\nimpl Command for PullRequestSubCommand {\n\n async fn execute(self, ctx: CommandContext) -> Result<()> {\n\n match self {\n\n Self::List(sub) => sub.execute(ctx).await,\n\n Self::Show(sub) => sub.execute(ctx).await,\n\n Self::Sync(sub) => sub.execute(ctx).await,\n\n Self::SetMergeStrategy(sub) => sub.execute(ctx).await,\n\n }\n\n }\n\n}\n", "file_path": "crates/github_scbot_cli/src/commands/pull_request/mod.rs", "rank": 42, "score": 69652.41519220563 }, { "content": " #[argh(subcommand)]\n\n inner: PullRequestSubCommand,\n\n}\n\n\n\n#[async_trait(?Send)]\n\nimpl Command for PullRequestCommand {\n\n async fn execute(self, ctx: CommandContext) -> Result<()> {\n\n self.inner.execute(ctx).await\n\n }\n\n}\n\n\n\n#[derive(FromArgs)]\n\n#[argh(subcommand)]\n\npub(crate) enum PullRequestSubCommand {\n\n Show(PullRequestShowCommand),\n\n Sync(PullRequestSyncCommand),\n\n SetMergeStrategy(PullRequestSetMergeStrategyCommand),\n\n List(PullRequestListCommand),\n\n}\n\n\n", "file_path": "crates/github_scbot_cli/src/commands/pull_request/mod.rs", "rank": 43, "score": 69651.97135810606 }, { "content": "#[derive(FromArgs)]\n\n#[argh(description = \"SharingCloud PR Bot\")]\n\nstruct Args {\n\n #[argh(subcommand)]\n\n cmd: Option<SubCommand>,\n\n\n\n /// do not ask for input.\n\n #[argh(switch)]\n\n no_input: bool,\n\n\n\n /// show version.\n\n #[argh(switch)]\n\n version: bool,\n\n}\n\n\n", "file_path": "crates/github_scbot_cli/src/lib.rs", "rank": 44, "score": 67659.46906516736 }, { "content": "struct ConnectionBuilder {\n\n database_url: String,\n\n pool_size: u32,\n\n}\n\n\n\nimpl ConnectionBuilder {\n\n fn configure(config: &Config) -> Self {\n\n Self {\n\n database_url: config.database_url.clone(),\n\n pool_size: config.database_pool_size,\n\n }\n\n }\n\n\n\n fn build_pool(self) -> Result<DbPool> {\n\n let manager = ConnectionManager::<PgConnection>::new(&self.database_url);\n\n Ok(Pool::builder().max_size(self.pool_size).build(manager)?)\n\n }\n\n}\n", "file_path": "crates/github_scbot_database/src/lib.rs", "rank": 45, "score": 66458.78711793569 }, { "content": "#[derive(Debug, Serialize, Deserialize)]\n\nstruct JwtClaims {\n\n iat: u64,\n\n exp: u64,\n\n iss: u64,\n\n}\n\n\n", "file_path": "crates/github_scbot_ghapi/src/auth.rs", "rank": 46, "score": 66458.78711793569 }, { "content": "#[derive(Debug, Serialize, Deserialize)]\n\nstruct InstallationTokenResponse {\n\n token: String,\n\n expires_at: String,\n\n}\n\n\n\n/// Get an authenticated GitHub client builder.\n\npub async fn get_client_builder(\n\n config: &Config,\n\n api_adapter: &dyn IAPIAdapter,\n\n) -> Result<OctocrabBuilder> {\n\n let token = get_authentication_credentials(config, api_adapter).await?;\n\n Ok(Octocrab::builder().personal_token(token))\n\n}\n\n\n", "file_path": "crates/github_scbot_ghapi/src/auth.rs", "rank": 47, "score": 65332.309863558 }, { "content": "fn main() {\n\n if let Err(err) = github_scbot_cli::initialize_command_line() {\n\n eprintln!(\"{}\", format!(\"ERROR: {:?}\", err).red());\n\n std::process::exit(1);\n\n }\n\n}\n", "file_path": "crates/github_scbot_cli/src/main.rs", "rank": 48, "score": 63793.901027192696 }, { "content": "#[async_trait]\n\npub trait IReviewDbAdapter {\n\n /// Creates a new review.\n\n async fn create(&self, entry: ReviewCreation) -> Result<ReviewModel>;\n\n /// Lists available reviews.\n\n async fn list(&self) -> Result<Vec<ReviewModel>>;\n\n /// Lists reviews from pull request ID.\n\n async fn list_from_pull_request_id(&self, pull_request_id: i32) -> Result<Vec<ReviewModel>>;\n\n /// Lists reviews from pull request and username.\n\n async fn get_from_pull_request_and_username(\n\n &self,\n\n repository: &RepositoryModel,\n\n pull_request: &PullRequestModel,\n\n username: &str,\n\n ) -> Result<ReviewModel>;\n\n /// Removes an existing review.\n\n async fn remove(&self, entry: ReviewModel) -> Result<()>;\n\n /// Removes all existing reviews for a pull request ID.\n\n async fn remove_all_for_pull_request(&self, pull_request_id: i32) -> Result<()>;\n\n /// Update.\n\n async fn update(&self, entry: &mut ReviewModel, update: ReviewUpdate) -> Result<()>;\n", "file_path": "crates/github_scbot_database/src/models/review/adapter.rs", "rank": 49, "score": 58562.295056541174 }, { "content": "#[async_trait]\n\npub trait IRepositoryDbAdapter {\n\n /// Creates a new repository.\n\n async fn create(&self, entry: RepositoryCreation) -> Result<RepositoryModel>;\n\n /// Lists available repositories.\n\n async fn list(&self) -> Result<Vec<RepositoryModel>>;\n\n /// Gets repository from ID.\n\n async fn get_from_id(&self, id: i32) -> Result<RepositoryModel>;\n\n /// Gets repository from owner and name.\n\n async fn get_from_owner_and_name(&self, owner: &str, name: &str) -> Result<RepositoryModel>;\n\n /// Updates repository.\n\n async fn update(&self, entry: &mut RepositoryModel, update: RepositoryUpdate) -> Result<()>;\n\n}\n\n\n\n/// Concrete repository DB adapter.\n\npub struct RepositoryDbAdapter {\n\n pool: DbPool,\n\n}\n\n\n\nimpl RepositoryDbAdapter {\n\n /// Creates a new repository DB adapter.\n", "file_path": "crates/github_scbot_database/src/models/repository/adapter.rs", "rank": 50, "score": 58562.295056541174 }, { "content": "<p align=\"center\">\n\n <a href=\"https://sentry.io\" target=\"_blank\" align=\"center\">\n\n <img src=\"https://sentry-brand.storage.googleapis.com/sentry-logo-black.png\" width=\"280\">\n\n </a>\n\n</p>\n\n\n\n# sentry-eyre\n\n\n\nAdds support for capturing Sentry errors from `eyre::Report`.\n\n\n\n## Example\n\n\n\n```rust\n\nuse sentry_eyre::capture_eyre;\n\n\n\nfn function_that_might_fail() -> eyre::Result<()> {\n\n Err(eyre::eyre!(\"some kind of error\"))\n\n}\n\n\n\nif let Err(err) = function_that_might_fail() {\n\n capture_eyre(&err);\n\n}\n\n```\n\n\n\n## Resources\n\n\n\nLicense: Apache-2.0\n", "file_path": "crates/sentry-eyre/README.md", "rank": 51, "score": 57959.457449625734 }, { "content": "#[async_trait]\n\npub trait IAccountDbAdapter {\n\n /// Creates a new account.\n\n async fn create(&self, entry: AccountModel) -> Result<AccountModel>;\n\n /// Gets account from username.\n\n async fn get_from_username(&self, username: &str) -> Result<AccountModel>;\n\n /// Lists available accounts.\n\n async fn list(&self) -> Result<Vec<AccountModel>>;\n\n /// Lists available admin accounts.\n\n async fn list_admin_accounts(&self) -> Result<Vec<AccountModel>>;\n\n /// Removes a specific account.\n\n async fn remove(&self, entry: AccountModel) -> Result<()>;\n\n /// Saves and updates a specific account.\n\n async fn save(&self, entry: &mut AccountModel) -> Result<()>;\n\n}\n\n\n\n/// Concrete account DB adapter.\n\npub struct AccountDbAdapter {\n\n pool: DbPool,\n\n}\n\n\n", "file_path": "crates/github_scbot_database/src/models/auth/account/adapter.rs", "rank": 52, "score": 57675.85746384531 }, { "content": "#[async_trait]\n\npub trait IHistoryWebhookDbAdapter {\n\n /// Creates a new history webhook entry.\n\n async fn create(&self, entry: HistoryWebhookCreation) -> Result<HistoryWebhookModel>;\n\n /// Lists existing history webhook entries.\n\n async fn list(&self) -> Result<Vec<HistoryWebhookModel>>;\n\n /// Lists existing history webhook entries for repository.\n\n async fn list_from_repository_id(&self, repository_id: i32)\n\n -> Result<Vec<HistoryWebhookModel>>;\n\n /// Removes all history webhook entries.\n\n async fn remove_all(&self) -> Result<()>;\n\n}\n\n\n\n/// Concrete history webhook DB adapter.\n\npub struct HistoryWebhookDbAdapter {\n\n pool: DbPool,\n\n}\n\n\n\nimpl HistoryWebhookDbAdapter {\n\n /// Creates a new history webhook DB adapter.\n\n pub fn new(pool: DbPool) -> Self {\n", "file_path": "crates/github_scbot_database/src/models/history/adapter.rs", "rank": 53, "score": 57675.85746384531 }, { "content": "ALTER TABLE review ADD COLUMN approved bool;\n", "file_path": "migrations/2022-01-13-175550_approved/up.sql", "rank": 54, "score": 57254.53524856675 }, { "content": "#[async_trait]\n\npub trait IMergeRuleDbAdapter {\n\n /// Creates a new merge rule entry.\n\n async fn create(&self, entry: MergeRuleCreation) -> Result<MergeRuleModel>;\n\n /// Gets a merge rule from branches.\n\n async fn get_from_branches(\n\n &self,\n\n repository: &RepositoryModel,\n\n base_branch: &RuleBranch,\n\n head_branch: &RuleBranch,\n\n ) -> Result<MergeRuleModel>;\n\n /// Lists merge rules from a repository ID.\n\n async fn list_from_repository_id(&self, repository_id: i32) -> Result<Vec<MergeRuleModel>>;\n\n /// Lists existing merge rules.\n\n async fn list(&self) -> Result<Vec<MergeRuleModel>>;\n\n /// Remove a specific merge rule.\n\n async fn remove(&self, entry: MergeRuleModel) -> Result<()>;\n\n /// Update.\n\n async fn update(&self, entry: &mut MergeRuleModel, update: MergeRuleUpdate) -> Result<()>;\n\n}\n\n\n", "file_path": "crates/github_scbot_database/src/models/merge_rule/adapter.rs", "rank": 55, "score": 56836.87199149836 }, { "content": "#[async_trait]\n\npub trait IExternalAccountDbAdapter {\n\n /// Creates a new external account.\n\n async fn create(&self, entry: ExternalAccountModel) -> Result<ExternalAccountModel>;\n\n /// Gets an external account from username.\n\n async fn get_from_username(&self, username: &str) -> Result<ExternalAccountModel>;\n\n /// Lists available external accounts.\n\n async fn list(&self) -> Result<Vec<ExternalAccountModel>>;\n\n /// Removes a specific external account.\n\n async fn remove(&self, entry: ExternalAccountModel) -> Result<()>;\n\n /// Saves and updates a specific external account.\n\n async fn save(&self, entry: &mut ExternalAccountModel) -> Result<()>;\n\n}\n\n\n\n/// Concrete external account DB adapter.\n\npub struct ExternalAccountDbAdapter {\n\n pool: DbPool,\n\n}\n\n\n\nimpl ExternalAccountDbAdapter {\n\n /// Creates a new external account DB adapter.\n", "file_path": "crates/github_scbot_database/src/models/auth/external_account/adapter.rs", "rank": 56, "score": 56041.62770732041 }, { "content": "#[async_trait]\n\npub trait IRedisAdapter: Send + Sync {\n\n /// Tries to lock a resource.\n\n async fn try_lock_resource<'a>(&'a self, name: &str) -> Result<LockStatus<'a>, RedisError>;\n\n /// Checks if resource exists.\n\n async fn has_resource(&self, name: &str) -> Result<bool, RedisError>;\n\n /// Deletes a resource if it exists.\n\n async fn del_resource(&self, name: &str) -> Result<(), RedisError>;\n\n /// Wait for a resource lock, until timeout.\n\n async fn wait_lock_resource<'a>(\n\n &'a self,\n\n name: &str,\n\n timeout_ms: u64,\n\n ) -> Result<LockStatus<'a>, RedisError> {\n\n // Try each 100ms\n\n let mut elapsed_time = 0;\n\n let millis = 100;\n\n let duration = Duration::from_millis(millis);\n\n\n\n loop {\n\n match self.try_lock_resource(name).await? {\n", "file_path": "crates/github_scbot_redis/src/interface.rs", "rank": 57, "score": 55901.781907037235 }, { "content": "fn trace_call(method: &str) {\n\n debug!(message = \"GitHub API call\", method = method)\n\n}\n\n\n\n#[async_trait(?Send)]\n\nimpl IAPIAdapter for GithubAPIAdapter {\n\n async fn issue_labels_list(\n\n &self,\n\n owner: &str,\n\n name: &str,\n\n issue_number: u64,\n\n ) -> Result<Vec<String>> {\n\n trace_call(\"issue_labels_list\");\n\n\n\n Ok(self\n\n .get_client()\n\n .await?\n\n .issues(owner, name)\n\n .list_labels_for_issue(issue_number)\n\n .send()\n", "file_path": "crates/github_scbot_ghapi/src/adapter/github.rs", "rank": 58, "score": 55568.39140104383 }, { "content": "/// Database adapter.\n\npub trait IDatabaseAdapter: Send + Sync {\n\n /// Gets account DB adapter.\n\n fn account(&self) -> &dyn IAccountDbAdapter;\n\n /// Gets external account DB adapter.\n\n fn external_account(&self) -> &dyn IExternalAccountDbAdapter;\n\n /// Gets external account right DB adapter.\n\n fn external_account_right(&self) -> &dyn IExternalAccountRightDbAdapter;\n\n /// Gets history webhook DB adapter.\n\n fn history_webhook(&self) -> &dyn IHistoryWebhookDbAdapter;\n\n /// Gets merge rule DB adapter.\n\n fn merge_rule(&self) -> &dyn IMergeRuleDbAdapter;\n\n /// Gets pull request DB adapter.\n\n fn pull_request(&self) -> &dyn IPullRequestDbAdapter;\n\n /// Gets repository DB adapter.\n\n fn repository(&self) -> &dyn IRepositoryDbAdapter;\n\n /// Gets review DB adapter.\n\n fn review(&self) -> &dyn IReviewDbAdapter;\n\n}\n\n\n\n/// Concrete database adapter.\n", "file_path": "crates/github_scbot_database/src/models/adapter.rs", "rank": 59, "score": 54963.74927268669 }, { "content": "#[async_trait(?Send)]\n\npub trait IAPIAdapter: Send + Sync {\n\n /// List labels from a target issue.\n\n async fn issue_labels_list(\n\n &self,\n\n owner: &str,\n\n name: &str,\n\n issue_number: u64,\n\n ) -> Result<Vec<String>>;\n\n /// Replace all labels for a target issue.\n\n async fn issue_labels_replace_all(\n\n &self,\n\n owner: &str,\n\n name: &str,\n\n issue_number: u64,\n\n labels: &[String],\n\n ) -> Result<()>;\n\n /// Get user permissions from a repository.\n\n async fn user_permissions_get(\n\n &self,\n\n owner: &str,\n", "file_path": "crates/github_scbot_ghapi/src/adapter/interface.rs", "rank": 60, "score": 54963.74927268669 }, { "content": "#[async_trait]\n\npub trait IExternalAccountRightDbAdapter {\n\n /// Lists available external account rights.\n\n async fn list(&self) -> Result<Vec<ExternalAccountRightModel>>;\n\n /// Lists available external accounts rights for username.\n\n async fn list_rights(&self, username: &str) -> Result<Vec<ExternalAccountRightModel>>;\n\n /// Gets external account right for username on repository.\n\n async fn get_right(\n\n &self,\n\n username: &str,\n\n repository: &RepositoryModel,\n\n ) -> Result<ExternalAccountRightModel>;\n\n /// Adds right to username on repository.\n\n async fn add_right(\n\n &self,\n\n username: &str,\n\n repository: &RepositoryModel,\n\n ) -> Result<ExternalAccountRightModel>;\n\n /// Removes right from username on repository.\n\n async fn remove_right(&self, username: &str, repository: &RepositoryModel) -> Result<()>;\n\n /// Removes all rights from username.\n", "file_path": "crates/github_scbot_database/src/models/auth/external_account_right/adapter.rs", "rank": 61, "score": 54569.35782917908 }, { "content": "fn get_base_url(config: &Config) -> String {\n\n config\n\n .test_database_url\n\n .split('/')\n\n .take(3)\n\n .collect::<Vec<_>>()\n\n .join(\"/\")\n\n}\n\n\n", "file_path": "crates/github_scbot_database/src/tests.rs", "rank": 62, "score": 53094.87057428859 }, { "content": "fn validate_env_vars(config: &Config) -> Result<()> {\n\n #[inline]\n\n fn _missing(error: &mut String, name: &str) {\n\n error.push('\\n');\n\n error.push_str(&format!(\" - Missing env. var.: {}\", name));\n\n }\n\n\n\n #[inline]\n\n fn _invalid_key(error: &mut String, name: &str) {\n\n error.push('\\n');\n\n error.push_str(&format!(\" - Invalid private key: {}\", name));\n\n }\n\n\n\n let mut error = String::new();\n\n\n\n // Check server configuration\n\n if config.server_bind_ip.is_empty() {\n\n _missing(&mut error, \"BOT_SERVER_BIND_IP\");\n\n }\n\n if config.server_bind_port == 0 {\n", "file_path": "crates/github_scbot_conf/src/validation.rs", "rank": 63, "score": 53094.87057428859 }, { "content": "fn get_bind_address(config: &Config) -> String {\n\n format!(\"{}:{}\", config.server_bind_ip, config.server_bind_port)\n\n}\n\n\n\nasync fn run_bot_server_internal(ip_with_port: String, context: AppContext) -> Result<()> {\n\n let context = Arc::new(context);\n\n let cloned_context = context.clone();\n\n let prometheus = PrometheusMetrics::new(\"api\", Some(\"/metrics\"), None);\n\n\n\n let mut server = HttpServer::new(move || {\n\n let mut app = App::new()\n\n .data(context.clone())\n\n .wrap(prometheus.clone())\n\n .wrap(Sentry::new())\n\n .wrap(Logger::default())\n\n .wrap(TracingLogger)\n\n .service(\n\n web::scope(\"/external\")\n\n .wrap(HttpAuthentication::bearer(jwt_auth_validator))\n\n .wrap(Cors::permissive())\n", "file_path": "crates/github_scbot_server/src/server.rs", "rank": 64, "score": 53094.87057428859 }, { "content": "fn create_app_token(config: &Config) -> Result<String> {\n\n // GitHub App authentication documentation\n\n // https://docs.github.com/en/developers/apps/authenticating-with-github-apps#authenticating-as-a-github-app\n\n\n\n let now_ts = TimeUtils::now_timestamp();\n\n let claims = JwtClaims {\n\n // Issued at time\n\n iat: now_ts,\n\n // Expiration time, 1 minute\n\n exp: now_ts + 60,\n\n // GitHub App Identifier\n\n iss: config.github_app_id,\n\n };\n\n\n\n JwtUtils::create_jwt(&config.github_app_private_key, &claims)\n\n .map_err(|e| ApiError::JWTError(e.to_string()))\n\n}\n\n\n\nasync fn create_installation_access_token(\n\n config: &Config,\n", "file_path": "crates/github_scbot_ghapi/src/auth.rs", "rank": 65, "score": 50869.6289076235 }, { "content": "fn get_tag_attr(attr: &Attribute) -> Result<Ident> {\n\n let list: Meta = attr.parse_args()?;\n\n let ident = list.path().get_ident().expect(\"missing ident\");\n\n Ok(Ident::new(&format!(\"{}\", ident), Span::call_site()))\n\n}\n", "file_path": "crates/github_scbot_database_macros/src/lib.rs", "rank": 66, "score": 50033.73991209814 }, { "content": "fn env_to_u64(name: &str, default: u64) -> u64 {\n\n env::var(name)\n\n .map(|e| e.parse().unwrap_or(default))\n\n .unwrap_or(default)\n\n}\n\n\n", "file_path": "crates/github_scbot_conf/src/config.rs", "rank": 67, "score": 49688.92036249227 }, { "content": "fn env_to_str(name: &str, default: &str) -> String {\n\n env::var(name)\n\n .unwrap_or_else(|_e| default.to_string())\n\n .replace(\"\\\\n\", \"\\n\")\n\n}\n", "file_path": "crates/github_scbot_conf/src/config.rs", "rank": 68, "score": 49688.92036249227 }, { "content": "fn env_to_u32(name: &str, default: u32) -> u32 {\n\n env::var(name)\n\n .map(|e| e.parse().unwrap_or(default))\n\n .unwrap_or(default)\n\n}\n\n\n", "file_path": "crates/github_scbot_conf/src/config.rs", "rank": 69, "score": 49688.92036249227 }, { "content": "fn env_to_u16(name: &str, default: u16) -> u16 {\n\n env::var(name)\n\n .map(|e| e.parse().unwrap_or(default))\n\n .unwrap_or(default)\n\n}\n\n\n", "file_path": "crates/github_scbot_conf/src/config.rs", "rank": 70, "score": 49688.92036249227 }, { "content": "fn create_postgres_connection(base_url: &str) -> Result<PgConnection> {\n\n let url = format!(\"{}/postgres\", base_url);\n\n Ok(PgConnection::establish(&url)?)\n\n}\n\n\n", "file_path": "crates/github_scbot_database/src/tests.rs", "rank": 71, "score": 49241.430667410896 }, { "content": "ALTER TABLE repository ADD COLUMN default_automerge bool NOT NULL DEFAULT false;\n", "file_path": "migrations/2021-09-20-094351_default-automerge/up.sql", "rank": 72, "score": 48208.35697501732 }, { "content": "ALTER TABLE repository ADD COLUMN manual_interaction bool NOT NULL DEFAULT false;\n", "file_path": "migrations/2021-05-04-142456_manual_interaction/up.sql", "rank": 73, "score": 48208.35697501732 }, { "content": "fn create_database(conn: &PgConnection, db_name: &str) -> Result<()> {\n\n diesel::sql_query(format!(r#\"CREATE DATABASE {};\"#, db_name)).execute(conn)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/github_scbot_database/src/tests.rs", "rank": 74, "score": 48060.72212227966 }, { "content": "fn drop_database(conn: &PgConnection, db_name: &str) -> Result<()> {\n\n diesel::sql_query(format!(r#\"DROP DATABASE IF EXISTS {};\"#, db_name)).execute(conn)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/github_scbot_database/src/tests.rs", "rank": 75, "score": 48060.72212227966 }, { "content": "fn terminate_connections(conn: &PgConnection, db_name: &str) -> Result<()> {\n\n diesel::sql_query(format!(\n\n r#\"SELECT pg_terminate_backend(pg_stat_activity.pid)\n\n FROM pg_stat_activity\n\n WHERE datname = '{}'\n\n AND pid <> pg_backend_pid();\"#,\n\n db_name\n\n ))\n\n .execute(conn)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/github_scbot_database/src/tests.rs", "rank": 76, "score": 48060.72212227966 }, { "content": "ALTER TABLE repository ADD COLUMN default_enable_checks BOOL NOT NULL default true;\n", "file_path": "migrations/2021-09-21-081512_skip_checks/up.sql", "rank": 77, "score": 47367.270026625396 }, { "content": "ALTER TABLE repository ADD COLUMN default_enable_qa BOOL NOT NULL default true;\n", "file_path": "migrations/2021-09-21-081512_skip_checks/up.sql", "rank": 78, "score": 47367.270026625396 }, { "content": "fn setup_test_db(base_url: &str, db_name: &str) -> Result<()> {\n\n {\n\n let conn = create_postgres_connection(base_url)?;\n\n terminate_connections(&conn, db_name)?;\n\n drop_database(&conn, db_name)?;\n\n create_database(&conn, db_name)?;\n\n }\n\n\n\n {\n\n let conn = create_db_connection(base_url, db_name)?;\n\n diesel_migrations::run_pending_migrations(&conn)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/github_scbot_database/src/tests.rs", "rank": 79, "score": 47308.67111484173 }, { "content": "fn teardown_test_db(base_url: &str, db_name: &str) -> Result<()> {\n\n let conn = create_postgres_connection(base_url)?;\n\n terminate_connections(&conn, db_name)?;\n\n drop_database(&conn, db_name)\n\n}\n\n\n\n/// Using test database.\n\n#[allow(clippy::missing_panics_doc)]\n\npub async fn using_test_db<F, Fut, E>(db_name: &str, test: F) -> Result<()>\n\nwhere\n\n E: std::fmt::Debug,\n\n F: FnOnce(Config, DbPool) -> Fut,\n\n Fut: Future<Output = core::result::Result<(), E>>,\n\n{\n\n let mut config = Config::from_env();\n\n config.bot_username = \"test-bot\".into();\n\n\n\n let base_url = get_base_url(&config);\n\n teardown_test_db(&base_url, db_name)?;\n\n setup_test_db(&base_url, db_name)?;\n", "file_path": "crates/github_scbot_database/src/tests.rs", "rank": 80, "score": 47308.67111484173 }, { "content": "fn create_pool(base_url: &str, db_name: &str) -> Result<DbPool> {\n\n let url = format!(\"{}/{}\", base_url, db_name);\n\n let manager = ConnectionManager::<PgConnection>::new(&url);\n\n Ok(Pool::builder().build(manager)?)\n\n}\n\n\n", "file_path": "crates/github_scbot_database/src/tests.rs", "rank": 81, "score": 45547.091588218056 }, { "content": "fn env_to_optional_u16(name: &str, default: Option<u16>) -> Option<u16> {\n\n env::var(name)\n\n .map(|e| e.parse::<u16>().map(Some).unwrap_or(default))\n\n .unwrap_or(default)\n\n}\n\n\n", "file_path": "crates/github_scbot_conf/src/config.rs", "rank": 82, "score": 45327.436543777905 }, { "content": "fn create_db_connection(base_url: &str, db_name: &str) -> Result<PgConnection> {\n\n let url = format!(\"{}/{}\", base_url, db_name);\n\n Ok(PgConnection::establish(&url)?)\n\n}\n\n\n", "file_path": "crates/github_scbot_database/src/tests.rs", "rank": 83, "score": 44866.86943226792 }, { "content": "DROP TABLE pull_request;\n", "file_path": "migrations/2021-02-22-161906_squash-1/down.sql", "rank": 84, "score": 40209.91132741142 }, { "content": "\n\n// Middleware factory is `Transform` trait from actix-service crate\n\n// `S` - type of the next service\n\n// `B` - type of response's body\n\nimpl<S, B> Transform<S> for VerifySignature\n\nwhere\n\n S: Service<Request = ServiceRequest, Response = ServiceResponse<B>, Error = Error> + 'static,\n\n S::Future: 'static,\n\n B: 'static,\n\n{\n\n type Error = Error;\n\n type Future = Ready<Result<Self::Transform, Self::InitError>>;\n\n type InitError = ();\n\n type Request = ServiceRequest;\n\n type Response = ServiceResponse<B>;\n\n type Transform = VerifySignatureMiddleware<S>;\n\n\n\n fn new_transform(&self, service: S) -> Self::Future {\n\n ok(VerifySignatureMiddleware {\n\n enabled: self.enabled,\n", "file_path": "crates/github_scbot_server/src/middlewares.rs", "rank": 99, "score": 38950.68635411145 } ]
Rust
src/address.rs
jkilpatr/clarity
e75e1419095f02cd52e7e3213b1e2226a312ed02
use serde::Serialize; use serde::Serializer; use std::str; use std::str::FromStr; use utils::bytes_to_hex_str; use utils::{hex_str_to_bytes, ByteDecodeError}; #[derive(PartialEq, Debug, Clone, Eq, PartialOrd, Hash, Deserialize)] pub struct Address { data: Vec<u8>, } impl Serialize for Address { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { if *self == Address::default() { serializer.serialize_bytes(&[]) } else { serializer.serialize_bytes(&self.data) } } } impl Address { pub fn new() -> Address { Address { data: Vec::new() } } pub fn as_bytes(&self) -> &[u8] { &self.data } } impl Default for Address { fn default() -> Address { Address { data: Vec::new() } } } impl From<[u8; 20]> for Address { fn from(val: [u8; 20]) -> Address { Address { data: val.to_vec() } } } impl<'a> From<&'a [u8]> for Address { fn from(val: &'a [u8]) -> Address { Address { data: val.to_vec() } } } #[derive(Fail, Debug, PartialEq)] pub enum AddressError { #[fail(display = "Address should be exactly 40 bytes")] InvalidLengthError, #[fail(display = "Unable to decode bytes: {}", _0)] DecodeError(ByteDecodeError), #[fail(display = "Checksum error")] ChecksumError, #[fail(display = "Invalid checksum")] InvalidChecksum, } impl From<ByteDecodeError> for AddressError { fn from(e: ByteDecodeError) -> AddressError { AddressError::DecodeError(e) } } impl FromStr for Address { type Err = AddressError; fn from_str(s: &str) -> Result<Self, Self::Err> { if s.len() == 0 { return Ok(Address::default()); } let s = if s.starts_with("0x") { &s[2..] } else { &s }; if s.len() == 40 || s.len() == 48 { Ok(Address { data: hex_str_to_bytes(&s)?, }) } else { Err(AddressError::InvalidLengthError) } } } impl ToString for Address { fn to_string(&self) -> String { bytes_to_hex_str(&self.data) } } #[test] #[should_panic] fn decode_invalid_length() { "123".parse::<Address>().unwrap(); } #[test] #[should_panic] fn decode_invalid_character() { "\u{012345}123456789012345678901234567890123456" .parse::<Address>() .unwrap(); } #[test] fn decode() { let address: Address = "1234567890123456789012345678901234567890" .parse::<Address>() .unwrap(); assert_eq!( address, Address::from([ 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90 ]) ); } #[test] fn serialize_null_address() { use serde_rlp::ser::to_bytes; let address = Address::new(); assert_eq!(to_bytes(&address).unwrap(), [128]); } #[test] fn serialize_padded_address() { use serde_rlp::ser::to_bytes; let address: Address = "00000000000000000000000000000000000000c0".parse().unwrap(); assert_eq!( to_bytes(&address).unwrap(), [148, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xc0] ); } #[test] #[should_panic] fn address_less_than_20_filler() { let address: Address = "0b9331677e6ebf".parse().unwrap(); } #[test] fn handle_prefixed() { let address: Address = "0x000000000000000000000000000b9331677e6ebf" .parse() .unwrap(); assert_eq!( address, Address::from([ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x0b, 0x93, 0x31, 0x67, 0x7e, 0x6e, 0xbf ]) ); } #[test] fn hashed() { use std::collections::HashMap; let a = Address::from_str("0x000000000000000000000000000b9331677e6ebf").unwrap(); let b = Address::from_str("0x00000000000000000000000000000000deadbeef").unwrap(); let mut map = HashMap::new(); map.insert(a.clone(), "Foo"); map.insert(b.clone(), "Bar"); assert_eq!(map.get(&a).unwrap(), &"Foo"); assert_eq!(map.get(&b).unwrap(), &"Bar"); } #[test] fn ordered() { let a = Address::from_str("0x000000000000000000000000000000000000000a").unwrap(); let b = Address::from_str("0x000000000000000000000000000000000000000b").unwrap(); let c = Address::from_str("0x000000000000000000000000000000000000000c").unwrap(); assert!(c > b); assert!(b > a); assert!(b < c); assert!(a < c); assert_ne!(a, b); assert_ne!(b, c); assert_ne!(a, c); }
use serde::Serialize; use serde::Serializer; use std::str; use std::str::FromStr; use utils::bytes_to_hex_str; use utils::{hex_str_to_bytes, ByteDecodeError}; #[derive(PartialEq, Debug, Clone, Eq, PartialOrd, Hash, Deserialize)] pub struct Address { data: Vec<u8>, } impl Serialize for Address { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { if *self == Address::default() { serializer.serialize_bytes(&[]) } else { serializer.serialize_bytes(&self.data) } } } impl Address { pub fn new() -> Address { Address { data: Vec::new() } } pub fn as_bytes(&self) -> &[u8] { &self.data } } impl Default for Address { fn default() -> Address { Address { data: Vec::new() } } } impl From<[u8; 20]> for Address { fn from(val: [u8; 20]) -> Address { Address { data: val.to_vec() } } } impl<'a> From<&'a [u8]> for Address { fn from(val: &'a [u8]) -> Address { Address { data: val.to_vec() } } } #[derive(Fail, Debug, PartialEq)] pub enum AddressError { #[fail(display = "Address should be exactly 40 bytes")] InvalidLengthError, #[fail(display = "Unable to decode bytes: {}", _0)] DecodeError(ByteDecodeError), #[fail(display = "Checksum error")] ChecksumError, #[fail(display = "Invalid checksum")] InvalidChecksum, } impl From<ByteDecodeError> for AddressError { fn from(e: ByteDecodeError) -> AddressError { AddressError::DecodeError(e) } } impl FromStr for Address { type Err = AddressError; fn from_str(s: &str) -> Result<Self, Self::Err> { if s.len() == 0 { return Ok(Address::default()); } let s = if s.starts_with("0x") { &s[2..] } else { &s }; if s.len() == 40 || s.len() == 48 { Ok(Address { data: hex_str_to_bytes(&s)?, }) } else { Err(AddressError::InvalidLengthError) } } } impl ToString for Address { fn to_string(&self) -> String { bytes_to_hex_str(&self.data) } } #[test] #[should_panic] fn decode_invalid_length() { "123".parse::<Address>().unwrap(); } #[test] #[should_panic] fn decode_invalid_character() { "\u{012345}123456789012345678901234567890123456" .parse::<Address>() .unwrap(); } #[test] fn decode() { let address: Address = "1234567890123456789012345678901234567890" .parse::<Address>() .unwrap(); assert_eq!( address, Address::from([ 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90, 0x12, 0x34, 0x56, 0x78, 0x90 ]) ); } #[test] fn serialize_null_address() { use serde_rlp::ser::to_bytes; let address = Address::new(); assert_eq!(to_bytes(&address).unwrap(), [128]); } #[test] fn serialize_padded_address() { use serde_rlp::ser::to_bytes; let address: Address = "00000000000000000000000000000000000000c0".parse().unwrap(); assert_eq!( to_bytes(&address).unwrap(), [148, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xc0] ); } #[test] #[should_panic] fn address_less_than_20_filler() { let address: Address = "0b9331677e6ebf".parse().unwrap(); } #[test] fn handle_prefixed() { let address: Address = "0x000000000000000000000000000b9331677e6ebf" .parse() .unwrap(); assert_eq!( address, Address::from([ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0x0b, 0x93, 0x31, 0x67, 0x7e, 0x6e, 0xbf ]) ); } #[test] fn hashed() { use std::collections::HashMap; let a = Address::from_str("0x000000000000000000000000000b9331677e6ebf").unw
e(), "Bar"); assert_eq!(map.get(&a).unwrap(), &"Foo"); assert_eq!(map.get(&b).unwrap(), &"Bar"); } #[test] fn ordered() { let a = Address::from_str("0x000000000000000000000000000000000000000a").unwrap(); let b = Address::from_str("0x000000000000000000000000000000000000000b").unwrap(); let c = Address::from_str("0x000000000000000000000000000000000000000c").unwrap(); assert!(c > b); assert!(b > a); assert!(b < c); assert!(a < c); assert_ne!(a, b); assert_ne!(b, c); assert_ne!(a, c); }
rap(); let b = Address::from_str("0x00000000000000000000000000000000deadbeef").unwrap(); let mut map = HashMap::new(); map.insert(a.clone(), "Foo"); map.insert(b.clon
function_block-random_span
[ { "content": "/// A function that takes a hexadecimal representation of bytes\n\n/// back into a stream of bytes.\n\npub fn hex_str_to_bytes(s: &str) -> Result<Vec<u8>, ByteDecodeError> {\n\n let s = if s.starts_with(\"0x\") { &s[2..] } else { s };\n\n s.as_bytes()\n\n .chunks(2)\n\n .map(|ch| {\n\n str::from_utf8(&ch)\n\n .map_err(|e| ByteDecodeError::DecodeError(e))\n\n .and_then(|res| {\n\n u8::from_str_radix(&res, 16).map_err(|e| ByteDecodeError::ParseError(e))\n\n })\n\n }).collect()\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 0, "score": 182127.56849082903 }, { "content": "pub fn bytes_to_hex_str(bytes: &[u8]) -> String {\n\n bytes\n\n .iter()\n\n .map(|b| format!(\"{:0>2x?}\", b))\n\n .fold(String::new(), |acc, x| acc + &x)\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 1, "score": 173893.77733834306 }, { "content": "/// Given a signature it derives a Method ID\n\npub fn derive_method_id(signature: &str) -> [u8; 4] {\n\n let digest = Keccak256::digest(signature.as_bytes());\n\n debug_assert!(digest.len() >= 4);\n\n let mut result: [u8; 4] = Default::default();\n\n result.copy_from_slice(&digest[0..4]);\n\n result\n\n}\n\n\n", "file_path": "src/abi.rs", "rank": 3, "score": 123155.95213347567 }, { "content": "fn default_gas_limit() -> String {\n\n \"0\".to_owned()\n\n}\n\n\n", "file_path": "tests/transaction_tests.rs", "rank": 4, "score": 119974.68394914723 }, { "content": "/// Pad bytes with zeros at the beggining.\n\npub fn zpad(bytes: &[u8], len: usize) -> Vec<u8> {\n\n if bytes.len() >= len {\n\n return bytes.to_vec();\n\n }\n\n let mut pad = vec![0u8; len - bytes.len()];\n\n pad.extend(bytes);\n\n pad\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 5, "score": 115882.36169271014 }, { "content": "#[test]\n\nfn clone() {\n\n let a = BigEndianInt::zero();\n\n let b = a.clone();\n\n assert_eq!(a, b);\n\n}\n\n\n", "file_path": "src/types.rs", "rank": 6, "score": 110402.16893786428 }, { "content": "/// A helper function that encodes both signature and a list of tokens.\n\npub fn encode_call(sig: &str, tokens: &[Token]) -> Vec<u8> {\n\n let mut wtr = vec![];\n\n wtr.extend(&derive_method_id(sig));\n\n wtr.extend(encode_tokens(tokens));\n\n wtr\n\n}\n", "file_path": "src/abi.rs", "rank": 9, "score": 110268.97269422 }, { "content": "fn load_filler(fixture: &TestFixture) -> HashMap<String, TestFiller> {\n\n // Load filler\n\n let mut filler_path = get_fixtures_path();\n\n filler_path.push(&fixture.info.source);\n\n let file = File::open(&filler_path)\n\n .unwrap_or_else(|_| panic!(\"Unable to open filler {:?}\", filler_path));\n\n let reader = BufReader::new(file);\n\n let json_data: Value = serde_json::from_reader(reader).unwrap();\n\n serde_json::from_value(json_data)\n\n .unwrap_or_else(|e| panic!(\"Unable to deserialize filler at {:?}: {}\", filler_path, e))\n\n}\n\n\n", "file_path": "tests/transaction_tests.rs", "rank": 11, "score": 99101.36458048286 }, { "content": "fn load_fixtures(path: &Path) -> HashMap<String, TestFixture> {\n\n // Read JSON in advance before running this particular test.\n\n // This way we can construct human readable test name based on the JSON contents, and\n\n let file = File::open(&path).unwrap_or_else(|_| panic!(\"Could not open file {:?}\", path));\n\n let buffered_reader = BufReader::new(file);\n\n let json_data: Value =\n\n serde_json::from_reader(buffered_reader).expect(\"Unable to read JSON file\");\n\n // Deserialize fixture object\n\n serde_json::from_value(json_data).unwrap()\n\n}\n\n\n", "file_path": "tests/transaction_tests.rs", "rank": 12, "score": 98471.31866092405 }, { "content": "#[test]\n\nfn bytes_raises_decode_error() {\n\n match hex_str_to_bytes(&\"\\u{012345}deadbeef\".to_owned()).unwrap_err() {\n\n ByteDecodeError::DecodeError(_) => assert!(true),\n\n _ => assert!(false),\n\n }\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 13, "score": 97356.82678751182 }, { "content": "/// This one is a very simplified ABI encoder that takes a bunch of tokens,\n\n/// and serializes them.\n\n///\n\n/// This version is greatly simplified and doesn't support nested arrays etc.\n\n///\n\n/// Use with caution!\n\npub fn encode_tokens(tokens: &[Token]) -> Vec<u8> {\n\n // This is the result data buffer\n\n let mut res = Vec::new();\n\n\n\n // A cache of dynamic data buffers that are stored here.\n\n let mut dynamic_data: Vec<Vec<u8>> = Vec::new();\n\n\n\n for ref token in tokens.iter() {\n\n match token.serialize() {\n\n SerializedToken::Static(data) => res.extend(&data),\n\n SerializedToken::Dynamic(data) => {\n\n // This is the offset for dynamic data that is calculated\n\n // based on the lengtho f all dynamic data buffers stored,\n\n // and added to the \"base\" offset which is all tokens length.\n\n // The base offset is assumed to be 32 * len(tokens) which is true\n\n // since dynamic data is actually an static variable of size of\n\n // 32 bytes.\n\n let dynamic_offset = dynamic_data\n\n .iter()\n\n .map(|data| data.len() as u64)\n", "file_path": "src/abi.rs", "rank": 14, "score": 89930.90277599203 }, { "content": "#[test]\n\nfn serialize() {\n\n use serde_rlp::ser::to_bytes;\n\n let value: BigEndianInt =\n\n \"115792089237316195423570985008687907853269984665640564039457584007913129639934\"\n\n .parse()\n\n .unwrap();\n\n assert_eq!(\n\n to_bytes(&value).expect(\"Unable to serialize BigEndianInt\"),\n\n vec![\n\n 160, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255,\n\n 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 254,\n\n ]\n\n );\n\n}\n\n\n", "file_path": "src/types.rs", "rank": 16, "score": 86504.71894830906 }, { "content": "#[test]\n\nfn decode_bytes() {\n\n assert_eq!(\n\n hex_str_to_bytes(&\"deadbeef\".to_owned()).expect(\"Unable to decode\"),\n\n [222, 173, 190, 239]\n\n );\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 18, "score": 82534.94145925903 }, { "content": "#[test]\n\nfn serialize_zeros() {\n\n use serde_rlp::ser::to_bytes;\n\n let value: BigEndianInt = \"0\".parse().unwrap();\n\n assert_eq!(\n\n to_bytes(&value).expect(\"Unable to serialize zero\"),\n\n vec![128]\n\n );\n\n}\n\n\n", "file_path": "src/types.rs", "rank": 19, "score": 82200.60154165582 }, { "content": "#[test]\n\nfn into_array_of_32_bytes() {\n\n let bytes: [u8; 32] = BigEndianInt::from(1024u64).into();\n\n assert_eq!(\n\n bytes,\n\n [\n\n 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,\n\n 0, 4, 0\n\n ]\n\n );\n\n}\n\n\n", "file_path": "src/types.rs", "rank": 20, "score": 82006.90720477092 }, { "content": "#[derive(Deserialize, Debug, Clone)]\n\nstruct TestFixture {\n\n #[serde(rename = \"Byzantium\")]\n\n byzantium: TestFixtureNetwork,\n\n #[serde(rename = \"Constantinople\")]\n\n constantinople: TestFixtureNetwork,\n\n #[serde(rename = \"EIP150\")]\n\n eip150: TestFixtureNetwork,\n\n #[serde(rename = \"EIP158\")]\n\n eip158: TestFixtureNetwork,\n\n #[serde(rename = \"Frontier\")]\n\n frontier: TestFixtureNetwork,\n\n #[serde(rename = \"Homestead\")]\n\n homestead: TestFixtureNetwork,\n\n #[serde(rename = \"_info\")]\n\n info: TestFixtureInfo,\n\n rlp: String,\n\n}\n\n\n", "file_path": "tests/transaction_tests.rs", "rank": 21, "score": 80936.47209046537 }, { "content": "#[derive(Deserialize, Debug, Clone)]\n\nstruct TestFiller {\n\n // I.e. [{\"network\": [\"ALL\"], \"result\": \"invalid\"}]\n\n #[serde(default = \"Vec::new\")]\n\n expect: Vec<TestFillerExpect>,\n\n // This is kind of unnatural in our environment, but there is at least\n\n // one test case where they have more transaction params than expected.\n\n // It doesn't really matter in our case because we operate on structs,\n\n // without any dynamic fields, but just to be sure, we can\n\n // verify that this map has exactly 9 elements (or all expected\n\n // elements exists).\n\n transaction: Option<TestFillerTransaction>,\n\n}\n\n\n", "file_path": "tests/transaction_tests.rs", "rank": 22, "score": 80936.47209046537 }, { "content": "#[test]\n\n#[should_panic]\n\nfn invalid_data() {\n\n let key = \"\\u{012345}c85ef7d79691fe79573b1a7064c19c1a9819ebdbd1faaab1a8ec92344438\";\n\n assert_eq!(key.len(), 64);\n\n PrivateKey::from_str(key).unwrap();\n\n}\n\n\n", "file_path": "src/private_key.rs", "rank": 23, "score": 79122.87033603083 }, { "content": "#[derive(Deserialize, Debug, Clone)]\n\nstruct TestFixtureNetwork {\n\n hash: Option<String>,\n\n sender: Option<String>,\n\n}\n\n\n", "file_path": "tests/transaction_tests.rs", "rank": 24, "score": 78806.15245565986 }, { "content": "#[derive(Deserialize, Debug, Clone)]\n\nstruct TestFillerExpect {\n\n /// I.e. [\"ALL\"]\n\n network: HashSet<String>,\n\n /// I.e. \"invalid\"\n\n result: String,\n\n /// I.e. 40 bytes characters\n\n sender: Option<String>,\n\n}\n\n\n", "file_path": "tests/transaction_tests.rs", "rank": 25, "score": 78806.15245565986 }, { "content": "#[derive(Deserialize, Debug, Clone)]\n\nstruct TestFixtureInfo {\n\n comment: String,\n\n filledwith: String,\n\n lllcversion: String,\n\n source: String,\n\n #[serde(rename = \"sourceHash\")]\n\n source_hash: String,\n\n}\n\n\n", "file_path": "tests/transaction_tests.rs", "rank": 26, "score": 78806.15245565986 }, { "content": "#[derive(Deserialize, Debug, Clone)]\n\nstruct TestFillerTransaction {\n\n data: String,\n\n #[serde(rename = \"gasLimit\", default = \"default_gas_limit\")]\n\n gas_limit: String,\n\n #[serde(rename = \"gasPrice\")]\n\n gas_price: String,\n\n nonce: String,\n\n to: String,\n\n #[serde(default = \"String::new\")]\n\n value: String,\n\n v: String,\n\n r: String,\n\n s: String,\n\n}\n\n\n", "file_path": "tests/transaction_tests.rs", "rank": 27, "score": 78806.15245565986 }, { "content": "#[test]\n\nfn bytes_raises_parse_error() {\n\n match hex_str_to_bytes(&\"Lorem ipsum\".to_owned()).unwrap_err() {\n\n ByteDecodeError::ParseError(_) => assert!(true),\n\n _ => assert!(false),\n\n }\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 28, "score": 75328.03416881496 }, { "content": "#[test]\n\nfn decode_odd_amount_of_bytes() {\n\n assert_eq!(hex_str_to_bytes(&\"f\".to_owned()).unwrap(), vec![15]);\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 29, "score": 75224.28773437336 }, { "content": "fn tests() -> Vec<TestDescAndFn> {\n\n let mut res = Vec::new();\n\n\n\n let mut testdir = get_fixtures_path();\n\n testdir.push(\"TransactionTests\");\n\n if !testdir.is_dir() {\n\n panic!(\"Directory does not exists. Did you remember to execute \\\"git submodule update --init\\\"?\");\n\n }\n\n visit_dirs(&testdir, &mut |entry| match make_test(entry.path()) {\n\n Some(tests) => res.extend(tests),\n\n None => (),\n\n }).unwrap();\n\n res\n\n}\n\n\n", "file_path": "tests/transaction_tests.rs", "rank": 30, "score": 68831.8251933957 }, { "content": "fn main() {\n\n let args: Vec<_> = env::args().collect();\n\n test::test_main(&args, tests());\n\n}\n", "file_path": "tests/transaction_tests.rs", "rank": 31, "score": 64918.133879321154 }, { "content": "#[test]\n\nfn zero() {\n\n let a = BigEndianInt::zero();\n\n assert_eq!(a, \"0\".parse().unwrap());\n\n}\n\n\n", "file_path": "src/types.rs", "rank": 34, "score": 60320.29858076528 }, { "content": "#[test]\n\nfn from_unsigned() {\n\n let _a = BigEndianInt::from(1u8);\n\n let _b = BigEndianInt::from(2u16);\n\n let _c = BigEndianInt::from(3u32);\n\n let _d = BigEndianInt::from(4u64);\n\n}\n\n\n", "file_path": "src/types.rs", "rank": 35, "score": 60320.29858076528 }, { "content": "#[test]\n\nfn compares() {\n\n let a = BigEndianInt::from(42u64);\n\n let b = BigEndianInt::from(42u64);\n\n assert_eq!(a, b);\n\n}\n\n\n", "file_path": "src/types.rs", "rank": 36, "score": 60320.29858076528 }, { "content": "#[test]\n\nfn construct() {\n\n use num_traits::One;\n\n let one = BigUint::one();\n\n let big_uint: BigEndianInt = one.into();\n\n assert_eq!(big_uint, 1u32.into());\n\n}\n", "file_path": "src/types.rs", "rank": 37, "score": 60320.29858076528 }, { "content": "#[test]\n\nfn extract() {\n\n use num_traits::One;\n\n let one = BigEndianInt::from(1u8);\n\n let big_uint: BigUint = one.into();\n\n assert_eq!(big_uint, BigUint::one());\n\n}\n\n\n", "file_path": "src/types.rs", "rank": 38, "score": 60320.29858076528 }, { "content": "/// Takes a path to JSON file and returns a test\n\nfn make_test(path: PathBuf) -> Option<Vec<TestDescAndFn>> {\n\n // For now all the test and filler data is parsed upfront,\n\n // to only create tests that contains data that we're able to parse.\n\n // This means only tests that have filler \"transaction\" values can be verified.\n\n // Once serde-rlp's decoder is merged upstream we can do two way verification.\n\n\n\n // Test case is always an object with a single key\n\n // Grab name of the actual test together with its value\n\n\n\n let fixtures = load_fixtures(path.as_path());\n\n assert_eq!(fixtures.len(), 1);\n\n let (_, fixtures) = fixtures.into_iter().nth(0).unwrap();\n\n // Load filler data\n\n let filler = load_filler(&fixtures);\n\n assert_eq!(filler.len(), 1);\n\n let (_name, filler) = filler.into_iter().nth(0).unwrap();\n\n\n\n // Obvious expected failure as there are no expect values\n\n if filler.expect.len() == 0 {\n\n let mut desc = TestDesc::new(DynTestName(format!(\n", "file_path": "tests/transaction_tests.rs", "rank": 39, "score": 59758.838888168866 }, { "content": "fn test_fn(fixtures: &TestFixture, filler: &TestFiller, expect: Option<&TestFillerExpect>) {\n\n let raw_rlp_bytes = hex_str_to_bytes(&fixtures.rlp)\n\n .unwrap_or_else(|e| panic!(\"Unable to decode {}: {}\", fixtures.rlp, e));\n\n // Try to decode the bytes into a Vec of Bytes which will enforce structure of a n-element vector with bytearrays.\n\n let data: Vec<Bytes> = match from_bytes(&raw_rlp_bytes) {\n\n Ok(data) => {\n\n if filler.transaction.is_none() {\n\n assert_eq!(filler.expect.len(), 0);\n\n panic!(\"Decoding of this RLP data should fail\");\n\n }\n\n\n\n data\n\n }\n\n Err(e) => {\n\n panic!(\"Decoding failed correctly with {:?}\", e);\n\n return;\n\n }\n\n };\n\n // A valid decoded transaction has exactly 9 elements.\n\n assert_eq!(data.len(), 9);\n", "file_path": "tests/transaction_tests.rs", "rank": 40, "score": 59567.65119901096 }, { "content": "#[test]\n\nfn new_signature() {\n\n let sig = Signature::new(1u32.into(), 2u32.into(), 3u32.into());\n\n assert_eq!(sig.v, 1u32.into());\n\n assert_eq!(sig.r, 2u32.into());\n\n assert_eq!(sig.s, 3u32.into());\n\n}\n", "file_path": "src/signature.rs", "rank": 41, "score": 58285.49381788297 }, { "content": "#[test]\n\nfn encode_bytes() {\n\n assert_eq!(bytes_to_hex_str(&vec![0xf]), \"0f\".to_owned());\n\n assert_eq!(bytes_to_hex_str(&vec![0xff]), \"ff\".to_owned());\n\n assert_eq!(\n\n bytes_to_hex_str(&vec![0xde, 0xad, 0xbe, 0xef]),\n\n \"deadbeef\".to_owned()\n\n );\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 42, "score": 57972.19032774818 }, { "content": "#[test]\n\nfn encode_address() {\n\n use utils::bytes_to_hex_str;\n\n let result = encode_tokens(&[\"0x00000000000000000000000000000000deadbeef\"\n\n .parse::<Address>()\n\n .expect(\"Unable to parse address\")\n\n .into()]);\n\n assert!(result.len() % 8 == 0);\n\n assert_eq!(\n\n result[..]\n\n .chunks(32)\n\n .map(|c| bytes_to_hex_str(&c))\n\n .collect::<Vec<String>>(),\n\n vec![\"00000000000000000000000000000000000000000000000000000000deadbeef\".to_owned(),]\n\n );\n\n}\n\n\n", "file_path": "src/abi.rs", "rank": 44, "score": 57606.93402930091 }, { "content": "#[test]\n\nfn verify_zpad_exact() {\n\n assert_eq!(zpad(&[1, 2, 3, 4], 4), [1, 2, 3, 4]);\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 45, "score": 55750.212428530176 }, { "content": "fn get_fixtures_path() -> PathBuf {\n\n let mut path = Path::new(env!(\"CARGO_MANIFEST_DIR\")).to_path_buf();\n\n path.push(\"tests\");\n\n path.push(\"fixtures\");\n\n path\n\n}\n\n\n", "file_path": "tests/transaction_tests.rs", "rank": 46, "score": 55274.47296329392 }, { "content": "#[test]\n\nfn parse_address_1() {\n\n use utils::bytes_to_hex_str;\n\n // https://github.com/ethereum/tests/blob/b44cea1cccf1e4b63a05d1ca9f70f2063f28da6d/BasicTests/txtest.json\n\n let key: PrivateKey = \"c85ef7d79691fe79573b1a7064c19c1a9819ebdbd1faaab1a8ec92344438aaf4\"\n\n .parse()\n\n .unwrap();\n\n assert_eq!(\n\n key.to_bytes(),\n\n [\n\n 0xc8, 0x5e, 0xf7, 0xd7, 0x96, 0x91, 0xfe, 0x79, 0x57, 0x3b, 0x1a, 0x70, 0x64, 0xc1,\n\n 0x9c, 0x1a, 0x98, 0x19, 0xeb, 0xdb, 0xd1, 0xfa, 0xaa, 0xb1, 0xa8, 0xec, 0x92, 0x34,\n\n 0x44, 0x38, 0xaa, 0xf4\n\n ]\n\n );\n\n\n\n // geth account import <(echo c85ef7d79691fe79573b1a7064c19c1a9819ebdbd1faaab1a8ec92344438aaf4)\n\n assert_eq!(\n\n bytes_to_hex_str(&key.to_public_key().unwrap().as_bytes()),\n\n \"cd2a3d9f938e13cd947ec05abc7fe734df8dd826\"\n\n );\n\n}\n\n\n", "file_path": "src/private_key.rs", "rank": 47, "score": 55108.55724139144 }, { "content": "#[test]\n\nfn parse_address_2() {\n\n use utils::bytes_to_hex_str;\n\n // https://github.com/ethereum/tests/blob/b44cea1cccf1e4b63a05d1ca9f70f2063f28da6d/BasicTests/txtest.json\n\n let key: PrivateKey = \"c87f65ff3f271bf5dc8643484f66b200109caffe4bf98c4cb393dc35740b28c0\"\n\n .parse()\n\n .unwrap();\n\n assert_eq!(\n\n key.to_bytes(),\n\n [\n\n 0xc8, 0x7f, 0x65, 0xff, 0x3f, 0x27, 0x1b, 0xf5, 0xdc, 0x86, 0x43, 0x48, 0x4f, 0x66,\n\n 0xb2, 0x00, 0x10, 0x9c, 0xaf, 0xfe, 0x4b, 0xf9, 0x8c, 0x4c, 0xb3, 0x93, 0xdc, 0x35,\n\n 0x74, 0x0b, 0x28, 0xc0\n\n ]\n\n );\n\n\n\n // geth account import <(echo c87f65ff3f271bf5dc8643484f66b200109caffe4bf98c4cb393dc35740b28c0)\n\n assert_eq!(\n\n bytes_to_hex_str(&key.to_public_key().unwrap().as_bytes()),\n\n \"13978aee95f38490e9769c39b2773ed763d9cd5f\"\n\n );\n\n}\n\n\n", "file_path": "src/private_key.rs", "rank": 48, "score": 55108.55724139144 }, { "content": "#[test]\n\n#[should_panic]\n\nfn zero_address() {\n\n // A key full of zeros is an invalid private key.\n\n let key = PrivateKey::new();\n\n key.to_public_key().unwrap();\n\n}\n", "file_path": "src/private_key.rs", "rank": 49, "score": 55108.45252882113 }, { "content": "#[test]\n\nfn test_basictests_txtest_2() {\n\n use serde_rlp::ser::to_bytes;\n\n // https://github.com/ethereum/tests/blob/b44cea1cccf1e4b63a05d1ca9f70f2063f28da6d/BasicTests/txtest.json\n\n let tx = Transaction {\n\n nonce: \"0\".parse().unwrap(),\n\n gas_price: \"1000000000000\".parse().unwrap(),\n\n gas_limit: \"10000\".parse().unwrap(),\n\n to: Address::new(),\n\n value: \"0\".parse().unwrap(),\n\n data: hex_str_to_bytes(\"6025515b525b600a37f260003556601b596020356000355760015b525b54602052f260255860005b525b54602052f2\").unwrap(),\n\n signature: None\n\n };\n\n // Unsigned\n\n let lhs = to_bytes(&tx).unwrap();\n\n let lhs = bytes_to_hex_str(&lhs);\n\n let rhs = \"f83f8085e8d4a510008227108080af6025515b525b600a37f260003556601b596020356000355760015b525b54602052f260255860005b525b54602052f2808080\".to_owned();\n\n assert_eq!(lhs, rhs);\n\n\n\n // Signed\n\n let key: PrivateKey = \"c87f65ff3f271bf5dc8643484f66b200109caffe4bf98c4cb393dc35740b28c0\"\n", "file_path": "src/transaction.rs", "rank": 50, "score": 54650.21547547737 }, { "content": "#[test]\n\nfn test_basictests_txtest_1() {\n\n use serde_rlp::ser::to_bytes;\n\n // https://github.com/ethereum/tests/blob/b44cea1cccf1e4b63a05d1ca9f70f2063f28da6d/BasicTests/txtest.json\n\n let tx = Transaction {\n\n nonce: BigEndianInt::from_str_radix(\"00\", 16).unwrap(),\n\n gas_price: \"1000000000000\".parse().unwrap(),\n\n gas_limit: \"10000\".parse().unwrap(),\n\n to: \"13978aee95f38490e9769c39b2773ed763d9cd5f\".parse().unwrap(),\n\n value: \"10000000000000000\".parse().unwrap(),\n\n data: Vec::new(),\n\n signature: None,\n\n };\n\n // Unsigned\n\n let lhs = to_bytes(&tx).unwrap();\n\n let lhs = bytes_to_hex_str(&lhs);\n\n let rhs =\n\n \"eb8085e8d4a510008227109413978aee95f38490e9769c39b2773ed763d9cd5f872386f26fc1000080808080\"\n\n .to_owned();\n\n assert_eq!(lhs, rhs);\n\n\n", "file_path": "src/transaction.rs", "rank": 51, "score": 54650.21547547737 }, { "content": "#[test]\n\nfn test_vitaliks_eip_158_vitalik_12_json() {\n\n use serde_rlp::ser::to_bytes;\n\n // https://github.com/ethereum/tests/blob/69f55e8608126e6470c2888a5b344c93c1550f40/TransactionTests/ttEip155VitaliksEip158/Vitalik_12.json\n\n let tx = Transaction {\n\n nonce: BigEndianInt::from_str_radix(\"0e\", 16).unwrap(),\n\n gas_price: BigEndianInt::from_str_radix(\"00\", 16).unwrap(),\n\n gas_limit: BigEndianInt::from_str_radix(\"0493e0\", 16).unwrap(),\n\n to: Address::new(), // \"\" - zeros only\n\n value: BigEndianInt::from_str_radix(\"00\", 16).unwrap(),\n\n data: hex_str_to_bytes(\"60f2ff61000080610011600039610011565b6000f3\").unwrap(),\n\n signature: Some(Signature::new(\n\n BigEndianInt::from_str_radix(\"1c\", 16).unwrap(),\n\n BigEndianInt::from_str_radix(\n\n \"a310f4d0b26207db76ba4e1e6e7cf1857ee3aa8559bcbc399a6b09bfea2d30b4\",\n\n 16,\n\n ).unwrap(),\n\n BigEndianInt::from_str_radix(\n\n \"6dff38c645a1486651a717ddf3daccb4fd9a630871ecea0758ddfcf2774f9bc6\",\n\n 16,\n\n ).unwrap(),\n", "file_path": "src/transaction.rs", "rank": 52, "score": 50433.71659479703 }, { "content": "#[test]\n\nfn test_vitaliks_eip_158_vitalik_1_json() {\n\n use serde_rlp::ser::to_bytes;\n\n // https://github.com/ethereum/tests/blob/69f55e8608126e6470c2888a5b344c93c1550f40/TransactionTests/ttEip155VitaliksEip158/Vitalik_12.json\n\n let tx = Transaction {\n\n nonce: BigEndianInt::from_str_radix(\"00\", 16).unwrap(),\n\n gas_price: BigEndianInt::from_str_radix(\"04a817c800\", 16).unwrap(),\n\n gas_limit: BigEndianInt::from_str_radix(\"5208\", 16).unwrap(),\n\n to: \"3535353535353535353535353535353535353535\".parse().unwrap(),\n\n value: BigEndianInt::from_str_radix(\"00\", 16).unwrap(),\n\n data: Vec::new(),\n\n signature: Some(Signature::new(\n\n BigEndianInt::from_str_radix(\"25\", 16).unwrap(),\n\n BigEndianInt::from_str_radix(\n\n \"044852b2a670ade5407e78fb2863c51de9fcb96542a07186fe3aeda6bb8a116d\",\n\n 16,\n\n ).unwrap(),\n\n BigEndianInt::from_str_radix(\n\n \"044852b2a670ade5407e78fb2863c51de9fcb96542a07186fe3aeda6bb8a116d\",\n\n 16,\n\n ).unwrap(),\n\n )),\n\n };\n\n let lhs = to_bytes(&tx).unwrap();\n\n let lhs = bytes_to_hex_str(&lhs);\n\n let rhs = \"f864808504a817c800825208943535353535353535353535353535353535353535808025a0044852b2a670ade5407e78fb2863c51de9fcb96542a07186fe3aeda6bb8a116da0044852b2a670ade5407e78fb2863c51de9fcb96542a07186fe3aeda6bb8a116d\".to_owned();\n\n assert_eq!(lhs, rhs);\n\n}\n\n\n", "file_path": "src/transaction.rs", "rank": 53, "score": 50433.71659479703 }, { "content": "fn visit_dirs(dir: &Path, cb: &mut FnMut(&DirEntry)) -> io::Result<()> {\n\n if dir.is_dir() {\n\n for entry in fs::read_dir(dir)? {\n\n let entry = entry?;\n\n let path = entry.path();\n\n if path.is_dir() {\n\n visit_dirs(&path, cb)?;\n\n } else {\n\n cb(&entry);\n\n }\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/transaction_tests.rs", "rank": 54, "score": 48147.94164132981 }, { "content": "#[test]\n\nfn encode_f() {\n\n use utils::bytes_to_hex_str;\n\n let result = encode_tokens(&[\n\n 0x123u32.into(),\n\n vec![0x456u32, 0x789u32].into(),\n\n Token::Bytes(\"1234567890\".as_bytes().to_vec()),\n\n \"Hello, world!\".into(),\n\n ]);\n\n assert!(result.len() % 8 == 0);\n\n assert_eq!(\n\n result[..]\n\n .chunks(32)\n\n .map(|c| bytes_to_hex_str(&c))\n\n .collect::<Vec<String>>(),\n\n vec![\n\n \"0000000000000000000000000000000000000000000000000000000000000123\".to_owned(),\n\n \"0000000000000000000000000000000000000000000000000000000000000080\".to_owned(),\n\n \"3132333435363738393000000000000000000000000000000000000000000000\".to_owned(),\n\n \"00000000000000000000000000000000000000000000000000000000000000e0\".to_owned(),\n\n \"0000000000000000000000000000000000000000000000000000000000000002\".to_owned(),\n\n \"0000000000000000000000000000000000000000000000000000000000000456\".to_owned(),\n\n \"0000000000000000000000000000000000000000000000000000000000000789\".to_owned(),\n\n \"000000000000000000000000000000000000000000000000000000000000000d\".to_owned(),\n\n \"48656c6c6f2c20776f726c642100000000000000000000000000000000000000\".to_owned(),\n\n ]\n\n );\n\n}\n\n\n", "file_path": "src/abi.rs", "rank": 55, "score": 34818.8751504454 }, { "content": "#[test]\n\nfn derive_f() {\n\n use utils::bytes_to_hex_str;\n\n assert_eq!(\n\n bytes_to_hex_str(&derive_method_id(\"f(uint256,uint32[],bytes10,bytes)\")),\n\n \"8be65246\"\n\n );\n\n}\n\n\n", "file_path": "src/abi.rs", "rank": 56, "score": 34818.8751504454 }, { "content": "#[test]\n\nfn derive_bar() {\n\n use utils::bytes_to_hex_str;\n\n assert_eq!(\n\n bytes_to_hex_str(&derive_method_id(\"bar(bytes3[2])\")),\n\n \"fce353f6\"\n\n );\n\n}\n\n\n", "file_path": "src/abi.rs", "rank": 57, "score": 33487.45320991613 }, { "content": "#[test]\n\nfn encode_simple() {\n\n use utils::bytes_to_hex_str;\n\n let result = encode_tokens(&[69u32.into(), true.into()]);\n\n assert_eq!(\n\n bytes_to_hex_str(&result),\n\n concat!(\n\n \"0000000000000000000000000000000000000000000000000000000000000045\",\n\n \"0000000000000000000000000000000000000000000000000000000000000001\"\n\n )\n\n );\n\n}\n\n\n", "file_path": "src/abi.rs", "rank": 58, "score": 33487.45320991613 }, { "content": "#[test]\n\nfn encode_sam() {\n\n use utils::bytes_to_hex_str;\n\n let result = encode_tokens(&[\"dave\".into(), true.into(), vec![1u32, 2u32, 3u32].into()]);\n\n assert!(result.len() % 8 == 0);\n\n assert_eq!(\n\n bytes_to_hex_str(&result),\n\n concat![\n\n // the location of the data part of the first parameter\n\n // (dynamic type), measured in bytes from the start of the\n\n // arguments block. In this case, 0x60.\n\n \"0000000000000000000000000000000000000000000000000000000000000060\",\n\n // the second parameter: boolean true.\n\n \"0000000000000000000000000000000000000000000000000000000000000001\",\n\n // the location of the data part of the third parameter\n\n // (dynamic type), measured in bytes. In this case, 0xa0.\n\n \"00000000000000000000000000000000000000000000000000000000000000a0\",\n\n // the data part of the first argument, it starts with the length\n\n // of the byte array in elements, in this case, 4.\n\n \"0000000000000000000000000000000000000000000000000000000000000004\",\n\n // the contents of the first argument: the UTF-8 (equal to ASCII\n", "file_path": "src/abi.rs", "rank": 59, "score": 33487.45320991613 }, { "content": "#[test]\n\nfn derive_sam() {\n\n use utils::bytes_to_hex_str;\n\n assert_eq!(\n\n bytes_to_hex_str(&derive_method_id(\"sam(bytes,bool,uint256[])\")),\n\n \"a5643bf2\"\n\n );\n\n}\n\n\n", "file_path": "src/abi.rs", "rank": 60, "score": 33487.45320991613 }, { "content": "#[test]\n\nfn encode_dynamic_only() {\n\n use utils::bytes_to_hex_str;\n\n let result = encode_tokens(&[\"foo\".into(), \"bar\".into()]);\n\n assert!(result.len() % 8 == 0);\n\n assert_eq!(\n\n result[..]\n\n .chunks(32)\n\n .map(|c| bytes_to_hex_str(&c))\n\n .collect::<Vec<String>>(),\n\n vec![\n\n \"0000000000000000000000000000000000000000000000000000000000000040\".to_owned(),\n\n \"0000000000000000000000000000000000000000000000000000000000000080\".to_owned(),\n\n \"0000000000000000000000000000000000000000000000000000000000000003\".to_owned(),\n\n \"666f6f0000000000000000000000000000000000000000000000000000000000\".to_owned(),\n\n \"0000000000000000000000000000000000000000000000000000000000000003\".to_owned(),\n\n \"6261720000000000000000000000000000000000000000000000000000000000\".to_owned(),\n\n ]\n\n );\n\n}\n\n\n", "file_path": "src/abi.rs", "rank": 61, "score": 33487.45320991613 }, { "content": "#[test]\n\nfn derive_baz() {\n\n use utils::bytes_to_hex_str;\n\n assert_eq!(\n\n bytes_to_hex_str(&derive_method_id(\"baz(uint32,bool)\")),\n\n \"cdcd77c0\"\n\n );\n\n}\n\n\n", "file_path": "src/abi.rs", "rank": 62, "score": 33487.45320991613 }, { "content": "#[test]\n\nfn verify_zpad() {\n\n assert_eq!(zpad(&[1, 2, 3, 4], 8), [0, 0, 0, 0, 1, 2, 3, 4]);\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 63, "score": 33487.45320991613 }, { "content": "#[test]\n\n#[should_panic]\n\nfn too_short() {\n\n PrivateKey::from_str(\"abcdef\").unwrap();\n\n}\n\n\n", "file_path": "src/private_key.rs", "rank": 64, "score": 33487.348497345825 }, { "content": "extern crate clarity;\n\nextern crate num_traits;\n\nextern crate rustc_test as test;\n\nextern crate serde_bytes;\n\nextern crate serde_json;\n\nextern crate serde_rlp;\n\n#[macro_use]\n\nextern crate serde_derive;\n\nuse clarity::utils::{bytes_to_hex_str, hex_str_to_bytes};\n\nuse clarity::{Address, BigEndianInt, Signature, Transaction};\n\nuse num_traits::Zero;\n\nuse serde_bytes::Bytes;\n\nuse serde_json::{Error, Value};\n\nuse serde_rlp::de::from_bytes;\n\nuse serde_rlp::ser::to_bytes;\n\nuse std::collections::HashMap;\n\nuse std::collections::HashSet;\n\nuse std::env;\n\nuse std::fs::{self, DirEntry, File};\n\nuse std::io;\n\nuse std::io::BufReader;\n\nuse std::iter::FromIterator;\n\nuse std::path::{Path, PathBuf};\n\nuse test::{DynTestFn, DynTestName, ShouldPanic, TestDesc, TestDescAndFn};\n\n\n", "file_path": "tests/transaction_tests.rs", "rank": 65, "score": 32697.211728973314 }, { "content": " gas_price: raw_params.gas_price.parse().unwrap_or(BigEndianInt::zero()),\n\n gas_limit: raw_params\n\n .gas_limit\n\n .parse()\n\n .expect(\"Unable to parse gas_limit\"),\n\n to: raw_params.to.parse().expect(\"Unable to parse address\"),\n\n value: raw_params.value.parse().unwrap_or(BigEndianInt::zero()),\n\n data: hex_str_to_bytes(&raw_params.data).expect(\"Unable to parse data\"),\n\n signature: Some(Signature::new(\n\n raw_params.v.parse().expect(\"Unable to parse v\"),\n\n raw_params.r.parse().expect(\"Unable to parse r\"),\n\n raw_params.s.parse().expect(\"Unable to parse s\"),\n\n )),\n\n };\n\n\n\n // Compare decoded transaction based on RLP and a transaction based on TX.\n\n // No need to go through validation for `decoded_tx` since we can rely on the equality here,\n\n // and assume if tx is valid, then decoded_tx is valid as well.\n\n assert_eq!(decoded_tx, tx);\n\n\n", "file_path": "tests/transaction_tests.rs", "rank": 66, "score": 32695.70263159182 }, { "content": " \"{}\",\n\n path.strip_prefix(get_fixtures_path())\n\n .unwrap()\n\n .to_string_lossy()\n\n .to_string()\n\n )));\n\n assert!(filler.transaction.is_none());\n\n desc.should_panic = ShouldPanic::Yes;\n\n\n\n let test = TestDescAndFn {\n\n desc: desc,\n\n testfn: DynTestFn(Box::new(move || {\n\n test_fn(&fixtures, &filler, None);\n\n })),\n\n };\n\n\n\n return Some(vec![test]);\n\n }\n\n\n\n // This stores all tests per all networks\n", "file_path": "tests/transaction_tests.rs", "rank": 67, "score": 32694.99773935941 }, { "content": "\n\n desc.should_panic = if &expect.result == \"invalid\" {\n\n ShouldPanic::Yes\n\n } else if &expect.result == \"valid\" {\n\n ShouldPanic::No\n\n } else {\n\n panic!(\"Unknown expect result {}\", &expect.result);\n\n };\n\n\n\n // TODO: I couldn't figure a better way to pass those values to the closure without cloning.\n\n let a = fixtures.clone();\n\n let b = filler.clone();\n\n let c = expect.clone();\n\n\n\n let test = TestDescAndFn {\n\n desc: desc,\n\n testfn: DynTestFn(Box::new(move || {\n\n test_fn(&a, &b, Some(&c));\n\n })),\n\n };\n\n tests.push(test);\n\n }\n\n Some(tests)\n\n}\n\n\n", "file_path": "tests/transaction_tests.rs", "rank": 68, "score": 32694.940914046045 }, { "content": " &filler\n\n );\n\n\n\n // We have verified that case already so unwrapping an expect data is safe.\n\n let expect = expect.expect(\"Expect should be available at this point\");\n\n\n\n // TODO: Change v to u64 so it would validate overflow when decoding/creating (v <= 2**64-1 so it can't overflow)\n\n assert!(tx.signature.as_ref().unwrap().v <= \"18446744073709551615\".parse().unwrap());\n\n\n\n // Since Homestead we have to verify if 0<s<secpk1n/2\n\n if HashSet::from_iter(\n\n vec![\"Homestead\", \"EIP150\"]\n\n .into_iter()\n\n .map(String::from)\n\n .collect::<Vec<String>>(),\n\n ).is_subset(&expect.network)\n\n {\n\n let res = tx.signature.as_ref().unwrap().check_low_s_homestead();\n\n if expect.result == \"invalid\" {\n\n res.unwrap_err();\n", "file_path": "tests/transaction_tests.rs", "rank": 69, "score": 32694.298267330425 }, { "content": " let mut tests = Vec::new();\n\n\n\n for expect in &filler.expect {\n\n // let networks = vec![\"a\", \"b\"].fjoin(\",\");\n\n let networks = expect\n\n .network\n\n .iter()\n\n .map(|s| &s[..])\n\n .collect::<Vec<&str>>()\n\n .join(\",\");\n\n // for network in expect.network.iter() {\n\n let mut desc = TestDesc::new(DynTestName(format!(\n\n \"{}@{}@{}\",\n\n path.strip_prefix(get_fixtures_path())\n\n .unwrap()\n\n .to_string_lossy()\n\n .to_string(),\n\n networks,\n\n expect.result\n\n )));\n", "file_path": "tests/transaction_tests.rs", "rank": 70, "score": 32693.45731676009 }, { "content": " let sender = tx.sender().unwrap();\n\n if !expect.sender.is_none() {\n\n // Compare only if we know we have sender provided\n\n assert_eq!(\n\n &bytes_to_hex_str(&sender.as_bytes()),\n\n expect.sender.as_ref().unwrap()\n\n );\n\n }\n\n\n\n // Verify network id\n\n let network_id = tx.signature.as_ref().unwrap().network_id();\n\n\n\n if HashSet::from_iter(\n\n vec![\"Byzantium\", \"Constantinople\", \"EIP158\"]\n\n .into_iter()\n\n .map(String::from)\n\n .collect::<Vec<String>>(),\n\n ).is_subset(&expect.network)\n\n {\n\n // Since Spurious Dragon\n\n assert!(network_id.is_some() || network_id.unwrap() == 1u32.into());\n\n } else {\n\n // Before Spurious Dragon\n\n assert!(network_id.is_none());\n\n }\n\n}\n\n\n", "file_path": "tests/transaction_tests.rs", "rank": 71, "score": 32693.310094470075 }, { "content": "\n\n let decoded_tx = Transaction {\n\n nonce: (&*data[0]).into(),\n\n gas_price: (&*data[1]).into(),\n\n gas_limit: (&*data[2]).into(),\n\n to: (&*data[3]).into(),\n\n value: (&*data[4]).into(),\n\n data: (&*data[5]).into(),\n\n signature: Some(Signature::new(\n\n (&*data[6]).into(),\n\n (&*data[7]).into(),\n\n (&*data[8]).into(),\n\n )),\n\n };\n\n\n\n // We skipped all fillers without transaction data, so now this unwrap is safe.\n\n let raw_params = filler.transaction.as_ref().unwrap();\n\n // Create a tx based on filler params\n\n let tx = Transaction {\n\n nonce: raw_params.nonce.parse().unwrap_or(BigEndianInt::zero()),\n", "file_path": "tests/transaction_tests.rs", "rank": 72, "score": 32692.68532245233 }, { "content": " // Encoding of our transaction\n\n let our_rlp = to_bytes(&tx).unwrap();\n\n // All rlp's Fixtures\n\n assert!(fixtures.rlp.starts_with(\"0x\"));\n\n\n\n assert!(tx.is_valid(), \"{:?} {:?} {:?}\", tx, raw_params, filler);\n\n assert!(\n\n tx.signature.as_ref().unwrap().is_valid(),\n\n \"{:?} {:?} {:?}\",\n\n tx.signature.as_ref().unwrap(),\n\n raw_params,\n\n filler\n\n );\n\n // Comparing our encoding with the \"ground truth\" in the fixture\n\n assert_eq!(\n\n bytes_to_hex_str(&our_rlp),\n\n &fixtures.rlp[2..],\n\n \"{:?} != {:?} (filler {:?})\",\n\n &tx,\n\n &raw_params,\n", "file_path": "tests/transaction_tests.rs", "rank": 73, "score": 32691.06761288614 }, { "content": " } else if expect.result == \"valid\" {\n\n res.unwrap();\n\n } else {\n\n unreachable!(\"This case is validated before\");\n\n }\n\n }\n\n\n\n // Since Constantinople verify if 0<s<secpk1n/2 and s != 0\n\n if expect.network.contains(&\"Constantinople\".to_owned()) {\n\n let res = tx.signature.as_ref().unwrap().check_low_s_metropolis();\n\n if expect.result == \"invalid\" {\n\n res.unwrap_err();\n\n } else if expect.result == \"valid\" {\n\n res.unwrap();\n\n } else {\n\n unreachable!(\"This case is validated before\");\n\n }\n\n }\n\n\n\n // Retrieving sender key is also validating parameters\n", "file_path": "tests/transaction_tests.rs", "rank": 74, "score": 32689.686090090352 }, { "content": "#[test]\n\nfn parse_prefixed_empty() {\n\n assert_eq!(\n\n hex_str_to_bytes(&\"0x\".to_owned()).unwrap(),\n\n Vec::<u8>::new()\n\n );\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 75, "score": 32300.85445783665 }, { "content": "#[test]\n\nfn verify_zpad_less_than_size() {\n\n assert_eq!(zpad(&[1, 2, 3, 4], 2), [1, 2, 3, 4]);\n\n}\n", "file_path": "src/utils.rs", "rank": 76, "score": 31236.668378223203 }, { "content": "#[test]\n\nfn parse_prefixed_non_empty() {\n\n assert_eq!(\n\n hex_str_to_bytes(&\"0xdeadbeef\".to_owned()).unwrap(),\n\n vec![0xde, 0xad, 0xbe, 0xef]\n\n );\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 77, "score": 31236.668378223203 }, { "content": "use failure::Error;\n\n\n\n#[derive(Fail, Debug)]\n\npub enum ClarityError {\n\n #[fail(display = \"Invalid network id\")]\n\n InvalidNetworkId,\n\n #[fail(display = \"Invalid V value\")]\n\n InvalidV,\n\n #[fail(display = \"Invalid S value\")]\n\n InvalidS,\n\n #[fail(display = \"Invalid signature values\")]\n\n InvalidSignatureValues,\n\n #[fail(display = \"Zero priv key cannot sign\")]\n\n ZeroPrivKey,\n\n #[fail(display = \"Invalid private key\")]\n\n InvalidPrivKey,\n\n}\n", "file_path": "src/error.rs", "rank": 78, "score": 27895.456282463478 }, { "content": "}\n\n\n\nimpl FromStr for BigEndianInt {\n\n type Err = BigEndianIntError;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n let value = if s.starts_with(\"0x\") {\n\n // Parse as hexadecimal big endian value\n\n BigUint::parse_bytes(&s.as_bytes()[2..], 16)\n\n .ok_or(BigEndianIntError::InvalidHexValue)?\n\n } else {\n\n BigUint::parse_bytes(s.as_bytes(), 10).ok_or(BigEndianIntError::InvalidDecValue)?\n\n };\n\n Ok(BigEndianInt(value))\n\n }\n\n}\n\n\n\nimpl From<u8> for BigEndianInt {\n\n fn from(v: u8) -> Self {\n\n BigEndianInt(BigUint::from(v))\n", "file_path": "src/types.rs", "rank": 84, "score": 27174.99899218058 }, { "content": "use failure::Error;\n\nuse num_bigint::BigUint;\n\nuse num_traits::{Num, ToPrimitive, Zero};\n\nuse serde::Serialize;\n\nuse serde::Serializer;\n\nuse std::fmt;\n\nuse std::ops::Div;\n\nuse std::ops::Mul;\n\nuse std::ops::Sub;\n\nuse std::ops::{Add, AddAssign};\n\nuse std::str::FromStr;\n\n\n\n/// A wrapper for BigUint which provides serialization to BigEndian in radix 16\n\n#[derive(PartialEq, Eq, PartialOrd, Clone, Deserialize, Hash)]\n\npub struct BigEndianInt(BigUint);\n\n\n\nimpl Zero for BigEndianInt {\n\n fn zero() -> BigEndianInt {\n\n BigEndianInt(BigUint::zero())\n\n }\n", "file_path": "src/types.rs", "rank": 85, "score": 27172.804569342756 }, { "content": " // TODO: Leverage Num trait once all required traits are implemented\n\n pub fn from_str_radix(src: &str, radix: u32) -> Result<BigEndianInt, Error> {\n\n let raw = BigUint::from_str_radix(&src, radix)?;\n\n Ok(BigEndianInt(raw))\n\n }\n\n\n\n pub fn from_bytes_be(bytes: &[u8]) -> BigEndianInt {\n\n BigEndianInt(BigUint::from_bytes_be(bytes))\n\n }\n\n pub fn to_bytes_be(&self) -> Vec<u8> {\n\n self.0.to_bytes_be()\n\n }\n\n}\n\n\n\n#[derive(Fail, Debug)]\n\npub enum BigEndianIntError {\n\n #[fail(display = \"Invalid radix 16 value\")]\n\n InvalidHexValue,\n\n #[fail(display = \"Invalid radix 10 value\")]\n\n InvalidDecValue,\n", "file_path": "src/types.rs", "rank": 86, "score": 27172.793353328183 }, { "content": " BigEndianInt(self.0 / other.0)\n\n }\n\n}\n\n\n\n/// Implement serialization that would serialize as bytes\n\nimpl Serialize for BigEndianInt {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n if self.0 == BigUint::zero() {\n\n serializer.serialize_bytes(&[])\n\n } else {\n\n let bytes = self.to_bytes_be();\n\n serializer.serialize_bytes(&bytes)\n\n }\n\n }\n\n}\n\n\n\nimpl ToPrimitive for BigEndianInt {\n", "file_path": "src/types.rs", "rank": 87, "score": 27169.679113692284 }, { "content": "\n\nimpl<'a> From<&'a [u8]> for BigEndianInt {\n\n fn from(v: &'a [u8]) -> Self {\n\n BigEndianInt(BigUint::from_bytes_be(v))\n\n }\n\n}\n\n\n\nimpl Into<[u8; 32]> for BigEndianInt {\n\n fn into(self) -> [u8; 32] {\n\n let bytes = self.0.to_bytes_be();\n\n let mut res = [0u8; 32];\n\n res[32 - bytes.len()..].copy_from_slice(&bytes);\n\n res\n\n }\n\n}\n\n\n\nimpl Into<BigUint> for BigEndianInt {\n\n fn into(self) -> BigUint {\n\n self.0\n\n }\n", "file_path": "src/types.rs", "rank": 88, "score": 27169.54072225753 }, { "content": "}\n\n\n\nimpl From<BigUint> for BigEndianInt {\n\n fn from(v: BigUint) -> BigEndianInt {\n\n BigEndianInt(v)\n\n }\n\n}\n\n\n\nimpl fmt::Debug for BigEndianInt {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"{}\", self.0.to_str_radix(10))\n\n }\n\n}\n\n\n\n#[test]\n", "file_path": "src/types.rs", "rank": 89, "score": 27166.399220370673 }, { "content": " }\n\n}\n\n\n\nimpl From<u16> for BigEndianInt {\n\n fn from(v: u16) -> Self {\n\n BigEndianInt(BigUint::from(v))\n\n }\n\n}\n\n\n\nimpl From<u32> for BigEndianInt {\n\n fn from(v: u32) -> Self {\n\n BigEndianInt(BigUint::from(v))\n\n }\n\n}\n\n\n\nimpl From<u64> for BigEndianInt {\n\n fn from(v: u64) -> Self {\n\n BigEndianInt(BigUint::from(v))\n\n }\n\n}\n", "file_path": "src/types.rs", "rank": 90, "score": 27164.509620800978 }, { "content": " self.0.to_usize()\n\n }\n\n fn to_u8(&self) -> Option<u8> {\n\n self.0.to_u8()\n\n }\n\n fn to_u16(&self) -> Option<u16> {\n\n self.0.to_u16()\n\n }\n\n fn to_u32(&self) -> Option<u32> {\n\n self.0.to_u32()\n\n }\n\n fn to_f32(&self) -> Option<f32> {\n\n self.0.to_f32()\n\n }\n\n fn to_f64(&self) -> Option<f64> {\n\n self.0.to_f64()\n\n }\n\n}\n\n\n\nimpl BigEndianInt {\n", "file_path": "src/types.rs", "rank": 91, "score": 27163.837814696715 }, { "content": "impl Sub for BigEndianInt {\n\n type Output = BigEndianInt;\n\n\n\n fn sub(self, other: BigEndianInt) -> BigEndianInt {\n\n BigEndianInt(self.0 - other.0)\n\n }\n\n}\n\n\n\nimpl Mul for BigEndianInt {\n\n type Output = BigEndianInt;\n\n\n\n fn mul(self, other: BigEndianInt) -> BigEndianInt {\n\n BigEndianInt(self.0 * other.0)\n\n }\n\n}\n\n\n\nimpl Div for BigEndianInt {\n\n type Output = BigEndianInt;\n\n\n\n fn div(self, other: BigEndianInt) -> BigEndianInt {\n", "file_path": "src/types.rs", "rank": 92, "score": 27161.18617422793 }, { "content": "\n\n fn is_zero(&self) -> bool {\n\n self.0.is_zero()\n\n }\n\n}\n\n\n\nimpl Add for BigEndianInt {\n\n type Output = BigEndianInt;\n\n\n\n fn add(self, other: BigEndianInt) -> BigEndianInt {\n\n BigEndianInt(self.0 + other.0)\n\n }\n\n}\n\n\n\nimpl AddAssign for BigEndianInt {\n\n fn add_assign(&mut self, other: BigEndianInt) {\n\n self.0 += other.0;\n\n }\n\n}\n\n\n", "file_path": "src/types.rs", "rank": 93, "score": 27161.180998751937 }, { "content": " fn to_i64(&self) -> Option<i64> {\n\n self.0.to_i64()\n\n }\n\n fn to_u64(&self) -> Option<u64> {\n\n self.0.to_u64()\n\n }\n\n\n\n fn to_isize(&self) -> Option<isize> {\n\n self.0.to_isize()\n\n }\n\n fn to_i8(&self) -> Option<i8> {\n\n self.0.to_i8()\n\n }\n\n fn to_i16(&self) -> Option<i16> {\n\n self.0.to_i16()\n\n }\n\n fn to_i32(&self) -> Option<i32> {\n\n self.0.to_i32()\n\n }\n\n fn to_usize(&self) -> Option<usize> {\n", "file_path": "src/types.rs", "rank": 94, "score": 27158.774004301773 }, { "content": "use address::Address;\n\nuse error::ClarityError;\n\nuse failure::Error;\n\nuse secp256k1::{PublicKey, Secp256k1, SecretKey};\n\nuse sha3::{Digest, Keccak256};\n\nuse std::str::FromStr;\n\nuse utils::{hex_str_to_bytes, ByteDecodeError};\n\n\n\n#[derive(Fail, Debug, PartialEq)]\n\npub enum PrivateKeyError {\n\n #[fail(display = \"Private key should be exactly 64 bytes\")]\n\n InvalidLengthError,\n\n}\n\n\n\n#[derive(Debug, PartialEq, Copy, Clone)]\n\npub struct PrivateKey([u8; 32]);\n\n\n\nimpl FromStr for PrivateKey {\n\n type Err = Error;\n\n\n", "file_path": "src/private_key.rs", "rank": 95, "score": 25.531458656768365 }, { "content": " fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n if s.len() != 64 {\n\n return Err(PrivateKeyError::InvalidLengthError.into());\n\n }\n\n let bytes = hex_str_to_bytes(&s)?;\n\n debug_assert_eq!(bytes.len(), 32);\n\n let mut res = [0x0u8; 32];\n\n res.copy_from_slice(&bytes[..]);\n\n Ok(PrivateKey(res))\n\n }\n\n}\n\n\n\nimpl From<[u8; 32]> for PrivateKey {\n\n fn from(val: [u8; 32]) -> PrivateKey {\n\n PrivateKey(val)\n\n }\n\n}\n\n\n\nimpl PrivateKey {\n\n pub fn new() -> PrivateKey {\n", "file_path": "src/private_key.rs", "rank": 96, "score": 21.58801605481285 }, { "content": "use utils::{bytes_to_hex_str, hex_str_to_bytes, zpad};\n\n\n\n/// Transaction as explained in the Ethereum Yellow paper section 4.2\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct Transaction {\n\n pub nonce: BigEndianInt,\n\n pub gas_price: BigEndianInt,\n\n pub gas_limit: BigEndianInt,\n\n pub to: Address,\n\n pub value: BigEndianInt,\n\n pub data: Vec<u8>,\n\n pub signature: Option<Signature>,\n\n}\n\n\n\nimpl Serialize for Transaction {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n // Serialization of a transaction without signature serializes\n", "file_path": "src/transaction.rs", "rank": 97, "score": 18.426645712322383 }, { "content": " assert_eq!(pkey.len(), 65);\n\n if pkey[1..].to_vec() == [0x00u8; 64].to_vec() {\n\n return Err(ClarityError::ZeroPrivKey.into());\n\n }\n\n // Finally an address is last 20 bytes of a hash of the public key.\n\n let sender = Keccak256::digest(&pkey[1..]);\n\n debug_assert_eq!(sender.len(), 32);\n\n return Ok(Address::from(&sender[12..]));\n\n }\n\n }\n\n /// Creates a hash of a transaction given all TX attributes\n\n /// including signature (VRS) whether it is present, or not.\n\n pub fn hash(&self) -> Vec<u8> {\n\n Keccak256::digest(&to_bytes(&self).unwrap()).to_vec()\n\n }\n\n /// Creates a byte representation of this transaction\n\n pub fn to_bytes(&self) -> Result<Vec<u8>, Error> {\n\n Ok(to_bytes(&self)?)\n\n }\n\n}\n\n\n\n#[test]\n", "file_path": "src/transaction.rs", "rank": 98, "score": 18.13583964516878 }, { "content": "use constants::SECPK1N;\n\nuse error::ClarityError;\n\nuse failure::Error;\n\nuse num_traits::Zero;\n\nuse serde::ser::SerializeTuple;\n\nuse serde::Serialize;\n\nuse serde::Serializer;\n\nuse types::BigEndianInt;\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]\n\npub struct Signature {\n\n pub v: BigEndianInt,\n\n pub r: BigEndianInt,\n\n pub s: BigEndianInt,\n\n}\n\n\n\nimpl Signature {\n\n pub fn new(v: BigEndianInt, r: BigEndianInt, s: BigEndianInt) -> Signature {\n\n Signature { v, r, s }\n\n }\n", "file_path": "src/signature.rs", "rank": 99, "score": 18.108872136516506 } ]
Rust
bencher/src/main.rs
dimforge/dimforge-bench
b662de5580095e3a15ffc65a4abc5b40bc8db622
#[macro_use] extern crate log; use amiquip::{ Connection, ConsumerMessage, ConsumerOptions, Exchange, Publish, QueueDeclareOptions, }; use bson::DateTime; use clap::{App, Arg, SubCommand}; use dimforge_bench_common::{ BenchCSVEntry, BenchConfig, BenchContext, BenchKey, BenchMessage, BenchPlatform, }; use log::LevelFilter; use mongodb::sync::Database; use simple_logger::SimpleLogger; use std::fs::File; use std::io::Write; use std::path::Path; use std::process::Command; use std::str::FromStr; fn main() -> mongodb::error::Result<()> { SimpleLogger::new() .with_level(LevelFilter::Info) .init() .unwrap(); let matches = App::new("Dimforge benchmark tool") .arg( Arg::with_name("config") .short("f") .required(false) .takes_value(true) .help("path to the JSON configuration file"), ) .subcommand(SubCommand::with_name("configure").about("Configure credentials")) .subcommand( SubCommand::with_name("send") .about("Send a message to start a benchmark") .arg( Arg::with_name("repository") .short("r") .required(true) .takes_value(true) .help("the repository to clone"), ) .arg( Arg::with_name("branch") .short("b") .required(true) .takes_value(true) .help("the branch of the commit to compile"), ) .arg( Arg::with_name("commit") .short("c") .required(true) .takes_value(true) .help("the commit to compile"), ), ) .subcommand(SubCommand::with_name("listen").about("Listen to incoming benchmark messages")) .get_matches(); let config = matches.value_of("config"); let config = BenchConfig::from_json_file(config); if let Some(matches) = matches.subcommand_matches("send") { let repository = matches.value_of("repository").unwrap().to_string(); let branch = matches.value_of("branch").unwrap().to_string(); let commit = matches.value_of("commit").unwrap().to_string(); let message = BenchMessage { repository, branch, commit, }; send_bench_message(&config, &message); info!("Bench message sent."); } if let Some(_) = matches.subcommand_matches("listen") { listen_bench_messages(&config)?; } if let Some(_) = matches.subcommand_matches("configure") { configure(); } Ok(()) } fn configure() { println!("MongoDB bencher uri: "); let mongodb_bencher_uri = text_io::read!("{}\n"); println!("MongoDB server uri: "); let mongodb_server_uri = text_io::read!("{}\n"); println!("MongoDB database: "); let mongodb_db = text_io::read!("{}\n"); println!("Rabbitmq uri: "); let rabbitmq_uri = text_io::read!("{}\n"); println!("Save configuration to folder [$HOME/.dimforge]: "); let mut output_dir: String = text_io::read!("{}\n"); if output_dir.is_empty() { let home = std::env::var("HOME").unwrap_or(String::new()); output_dir = format!("{}/.dimforge", home); } let config = BenchConfig { mongodb_db, mongodb_bencher_uri, mongodb_server_uri, rabbitmq_uri, }; let config_json = serde_json::to_string(&config).unwrap(); std::fs::create_dir_all(&output_dir).unwrap(); let output_file = format!("{}/benchbot.json", output_dir); let mut out = File::create(&output_file).expect( "Could not open target configuration file. Did you run the `configure` subcommand yet?", ); out.write_all(config_json.as_bytes()).unwrap(); info!("Configuration successfully saved to '{}'.", output_file); } fn send_bench_message(config: &BenchConfig, message: &BenchMessage) { let mut connection = Connection::open(&config.rabbitmq_uri).unwrap(); let channel = connection.open_channel(None).unwrap(); let exchange = Exchange::direct(&channel); let message = serde_json::to_string(message).unwrap(); exchange .publish(Publish::new(message.as_bytes(), "benchmark")) .unwrap(); let _ = connection.close(); } fn listen_bench_messages(config: &BenchConfig) -> mongodb::error::Result<()> { let mut connection = Connection::open(&config.rabbitmq_uri).unwrap(); let channel = connection.open_channel(None).unwrap(); let queue = channel .queue_declare("benchmark", QueueDeclareOptions::default()) .unwrap(); let consumer = queue.consume(ConsumerOptions::default()).unwrap(); for message in consumer.receiver().iter() { match message { ConsumerMessage::Delivery(delivery) => { let body = String::from_utf8_lossy(&delivery.body); let message = serde_json::from_str::<BenchMessage>(&body); if delivery.redelivered { warn!("Dropping redelivered message: {:?}", message); let _ = delivery.ack(&channel); continue; } let message = message.unwrap(); info!("Received bench message: {:?}", message); let tempdir = tempfile::tempdir().unwrap(); let target_dir = tempdir.path(); let bench_subdir = "benchmarks3d"; let bench_names = clone_and_build_benches( target_dir, bench_subdir, &message.repository, &message.commit, ); info!("About to run benchmarks: {:?}", bench_names); let version = rustc_version::version() .map(|v| format!("{}", v)) .unwrap_or("unknown".to_string()); let platform = BenchPlatform { compiler: version.clone(), }; let key = BenchKey { commit: message.commit, branch: message.branch, date: DateTime(chrono::Utc::now()), }; for bench_name in bench_names { let context = BenchContext { name: bench_name, backend: String::new(), }; run_bench(config, target_dir, bench_subdir, &key, &context, &platform)?; } delivery.ack(&channel).unwrap(); } other => { error!("consumer ended: {:?}", other); break; } } } let _ = connection.close(); Ok(()) } fn clone_and_build_benches( repo_dir: &Path, bench_subdir: &str, repo_url: &str, commit: &str, ) -> Vec<String> { info!("Cloning {} in {:?}", repo_url, repo_dir); Command::new("git") .arg("clone") .arg(repo_url) .arg(repo_dir) .status() .unwrap(); Command::new("git") .arg("checkout") .arg(commit) .current_dir(repo_dir) .status() .unwrap(); let build_path = format!("{}/{}", repo_dir.to_string_lossy(), bench_subdir); info!("Building {}", build_path); let status = Command::new("cargo") .arg("build") .arg("--release") .args(&["--features", "simd-nightly"]) .args(&["--features", "other-backends"]) .current_dir(&build_path) .status() .unwrap(); info!("Build ended with status: {}", status); let exec_path = format!("{}/target/release", repo_dir.to_string_lossy()); let output = Command::new("./all_benchmarks3") .arg("--list") .current_dir(exec_path) .output() .unwrap(); String::from_utf8_lossy(&output.stdout) .split_whitespace() .map(|s| s.to_string()) .collect() } fn run_bench( config: &BenchConfig, bench_dir: &Path, bench_subdir: &str, key: &BenchKey, context: &BenchContext, platform: &BenchPlatform, ) -> mongodb::error::Result<()> { let build_path = format!("{}/{}", bench_dir.to_string_lossy(), bench_subdir); let status = Command::new("cargo") .arg("run") .arg("--release") .args(&["--features", "simd-nightly"]) .args(&["--features", "other-backends"]) .args(&["--", "--bench", "--example", &context.name]) .current_dir(build_path) .status() .unwrap(); info!("Exit status for '{}' benchmark: {}", context.name, status); let entries = parse_results(bench_dir, bench_subdir, key, context, platform); upload_results(&config, &entries) } fn parse_results( repo_dir: &Path, bench_subdir: &str, key: &BenchKey, context: &BenchContext, platform: &BenchPlatform, ) -> Vec<BenchCSVEntry> { let bench_result_path = format!( "{}/{}/{}.csv", repo_dir.to_string_lossy(), bench_subdir, context.name ); info!("Parting bench file: {}", bench_result_path); let csv = parse_csv(bench_result_path).unwrap(); let mut entries = Vec::new(); for (backend, timings) in csv.0.into_iter().zip(csv.1.into_iter()) { let mut context = context.clone(); context.backend = backend; let entry = BenchCSVEntry { key: key.clone(), context, platform: platform.clone(), timings, }; entries.push(entry); } entries } fn upload_results(config: &BenchConfig, entries: &[BenchCSVEntry]) -> mongodb::error::Result<()> { let db = connect_to_mongodb(&config.mongodb_bencher_uri, &config.mongodb_db)?; let coll = db.collection("rapier3d"); for entry in entries { let doc = bson::to_document(entry).unwrap(); coll.insert_one(doc, None)?; } Ok(()) } fn parse_csv(path: impl AsRef<Path>) -> csv::Result<(Vec<String>, Vec<Vec<f32>>)> { let mut reader = csv::ReaderBuilder::new() .has_headers(true) .from_path(path) .unwrap(); let headers: Vec<_> = reader.headers()?.iter().map(|h| h.to_string()).collect(); let mut values = vec![Vec::new(); headers.len()]; for record in reader.records() { for (i, value) in record?.iter().enumerate() { let val = f32::from_str(value).unwrap(); values[i].push(val); } } Ok((headers, values)) } fn connect_to_mongodb(uri: &str, db: &str) -> mongodb::error::Result<Database> { use mongodb::sync::Client; let client = Client::with_uri_str(uri)?; Ok(client.database(db)) }
#[macro_use] extern crate log; use amiquip::{ Connection, ConsumerMessage, ConsumerOptions, Exchange, Publish, QueueDeclareOptions, }; use bson::DateTime; use clap::{App, Arg, SubCommand}; use dimforge_bench_common::{ BenchCSVEntry, BenchConfig, BenchContext, BenchKey, BenchMessage, BenchPlatform, }; use log::LevelFilter; use mongodb::sync::Database; use simple_logger::SimpleLogger; use std::fs::File; use std::io::Write; use std::path::Path; use std::process::Command; use std::str::FromStr; fn main() -> mongodb::error::Result<()> { SimpleLogger::new() .with_level(LevelFilter::Info) .init() .unwrap(); let matches = App::new("Dimforge benchmark tool") .arg( Arg::with_name("config") .short("f") .required(false) .takes_value(true) .help("path to the JSON configuration file"), ) .subcommand(SubCommand::with_name("configure").about("Configure credentials")) .subcommand( SubCommand::with_name("send") .about("Send a message to start a benchmark") .arg( Arg::with_name("repository") .short("r") .required(true) .takes_value(true) .help("the repository to clone"), ) .arg( Arg::with_name("branch") .short("b") .required(true) .takes_value(true) .help("the branch of the commit to compile"), ) .arg( Arg::with_name("commit") .short("c") .required(true) .takes_value(true) .help("the commit to compile"), ), ) .subcommand(SubCommand::with_name("listen").about("Listen to incoming benchmark messages")) .get_matches(); let config = matches.value_of("config"); let config = BenchConfig::from_json_file(config); if let Some(matches) = matches.subcommand_matches("send") { let repository = matches.value_of("repository").unwrap().to_string(); let branch = matches.value_of("branch").unwrap().to_string(); let commit = matches.value_of("commit").unwrap().to_string(); let message = BenchMessage { repository, branch, commit, }; send_bench_message(&config, &message); info!("Bench message sent."); } if let Some(_) = matches.subcommand_matches("listen") { listen_bench_messages(&config)?; } if let Some(_) = matches.subcommand_matches("configure") { configure(); } Ok(()) } fn configure() { println!("MongoDB bencher uri: "); let mongodb_bencher_uri = text_io::read!("{}\n"); println!("MongoDB server uri: "); let mongodb_server_uri = text_io::read!("{}\n"); println!("MongoDB database: "); let mongodb_db = text_io::read!("{}\n"); println!("Rabbitmq uri: "); let rabbitmq_uri = text_io::read!("{}\n"); println!("Save configuration to folder [$HOME/.dimforge]: "); let mut output_dir: String = text_io::read!("{}\n"); if output_dir.is_empty() { let home = std::env::var("HOME").unwrap_or(String::new()); output_dir = format!("{}/.dimforge", home); } let config = BenchConfig { mongodb_db, mongodb_bencher_uri, mongodb_server_uri, rabbitmq_uri, }; let config_json = serde_json::to_string(&config).unwrap(); std::fs::create_dir_all(&output_dir).unwrap(); let output_file = format!("{}/benchbot.json", output_dir); let mut out = File::create(&output_file).expect( "Could not open target configuration file. Did you run the `configure` subcommand yet?", ); out.write_all(config_json.as_bytes()).unwrap(); info!("Configuration successfully saved to '{}'.", output_file); } fn send_bench_message(config: &BenchConfig, message: &BenchMessage) { let mut connection = Connection::open(&config.rabbitmq_uri).unwrap(); let channel = connection.open_channel(None).unwrap(); let exchange = Exchange::direct(&channel); let message = serde_json::to_string(message).unwrap(); exchange .publish(Publish::new(message.as_bytes(), "benchmark")) .unwrap(); let _ = connection.close(); } fn listen_bench_messages(config: &BenchConfig) -> mongodb::error::Result<()> { let mut connection = Connection::open(&config.rabbitmq_uri).unwrap(); let channel = connection.open_channel(None).unwrap(); let queue = channel .queue_declare("benchmark", QueueDeclareOptions::default()) .unwrap(); let consumer = queue.consume(ConsumerOptions::default()).unwrap(); for message in consumer.receiver().iter() { match message { ConsumerMessage::Delivery(delivery) => { let body = String::from_utf8_lossy(&delivery.body); let message = serde_json::from_str::<BenchMessage>(&body); if delivery.redelivered { warn!("Dropping redelivered message: {:?}", message); let _ = delivery.ack(&channel); continue; } let message = message.unwrap(); info!("Received bench message: {:?}", message); let tempdir = tempfile::tempdir().unwrap(); let target_dir = tempdir.path(); let bench_subdir = "benchmarks3d"; let bench_names = clone_and_build_benches( target_dir, bench_subdir, &message.repository, &message.commit, ); info!("About to run benchmarks: {:?}", bench_names); let version = rustc_version::version() .map(|v| format!("{}", v)) .unwrap_or("unknown".to_string()); let platform = BenchPlatform { compiler: version.clone(), }; let key = BenchKey { commit: message.commit, branch: message.branch, date: DateTime(chrono::Utc::now()), }; for bench_name in bench_names { let context = BenchContext { name: bench_name, backend: String::new(), }; run_bench(config, target_dir, bench_subdir, &key, &context, &platform)?; } delivery.ack(&channel).unwrap(); } other => { error!("consumer ended: {:?}", other); break; } } } let _ = connection.close(); Ok(()) } fn clone_and_build_benches( repo_dir: &Path, bench_subdir: &str, repo_url: &str, commit: &str, ) -> Vec<String> { info!("Cloning {} in {:?}", repo_url, repo_dir); Command::new("git") .arg("clone") .arg(repo_url) .arg(repo_dir) .status() .unwrap(); Command::new("git") .arg("checkout") .arg(commit) .current_dir(repo_dir) .status() .unwrap(); let build_path = format!("{}/{}", repo_dir.to_string_lossy(), bench_subdir); info!("Building {}", build_path); let status = Command::new("cargo") .arg("build") .arg("--release") .args(&["--features", "simd-nightly"]) .args(&["--features", "other-backends"]) .current_dir(&build_path) .status() .unwrap(); info!("Build ended with status: {}", status); let exec_path = format!("{}/target/release", repo_dir.to_string_lossy()); let output = Command::new("./all_benchmarks3") .arg("--list") .current_dir(exec_path) .output() .unwrap(); String::from_utf8_lossy(&output.stdout) .split_whitespace() .map(|s| s.to_string()) .collect() } fn run_bench( config: &BenchConfig, bench_dir: &Path, bench_subdir: &str, key: &BenchKey, context: &BenchContext, platform: &BenchPlatform, ) -> mongodb::error::Result<()> { let build_path = format!("{}/{}", bench_dir.to_string_lossy(), bench_subdir); let status = Command::new("cargo") .arg("run") .arg("--release") .args(&["--features", "simd-nightly"]) .args(&["--features", "other-backends"]) .args(&["--", "--bench", "--example", &context.name]) .current_dir(build_path) .status() .unwrap(); info!("Exit status for '{}' benchmark: {}", context.name, status); let entries = parse_results(bench_dir, bench_subdir, key, context, platform); upload_results(&config, &entries) } fn parse_results( repo_dir: &Path, bench_subdir: &str, key: &BenchKey, context: &BenchContext, platform: &BenchPlatform, ) -> Vec<BenchCSVEntry> { let bench_result_path = format!( "{}/{}/{}.csv", repo_dir.to_string_lossy(), bench_subdir, context.name ); info!("Parting bench file: {}", bench_result_path); let csv = parse_csv(bench_result_path).unwrap(); let mut entries = Vec::new(); for (backend, timings) in csv.0.into_iter().zip(csv.1.into_iter()) { let mut context = context.clone(); context.backend = backend; let entry = BenchCSVEntry { key: key.clone(), context, platform: platform.clone(), timings, }; entries.push(entry); } entries } fn upload_results(config: &BenchConfig, entries: &[BenchCSVEntry]) -> mongodb::error::Result<()> { let db = connect_to_mongodb(&config.mongodb_bencher_u
fn parse_csv(path: impl AsRef<Path>) -> csv::Result<(Vec<String>, Vec<Vec<f32>>)> { let mut reader = csv::ReaderBuilder::new() .has_headers(true) .from_path(path) .unwrap(); let headers: Vec<_> = reader.headers()?.iter().map(|h| h.to_string()).collect(); let mut values = vec![Vec::new(); headers.len()]; for record in reader.records() { for (i, value) in record?.iter().enumerate() { let val = f32::from_str(value).unwrap(); values[i].push(val); } } Ok((headers, values)) } fn connect_to_mongodb(uri: &str, db: &str) -> mongodb::error::Result<Database> { use mongodb::sync::Client; let client = Client::with_uri_str(uri)?; Ok(client.database(db)) }
ri, &config.mongodb_db)?; let coll = db.collection("rapier3d"); for entry in entries { let doc = bson::to_document(entry).unwrap(); coll.insert_one(doc, None)?; } Ok(()) }
function_block-function_prefixed
[ { "content": "fn connect_to_mongodb(uri: &str, db: &str) -> mongodb::error::Result<Database> {\n\n use mongodb::sync::Client;\n\n let client = Client::with_uri_str(&uri)?;\n\n Ok(client.database(db))\n\n}\n", "file_path": "server/src/main.rs", "rank": 0, "score": 139675.0472944086 }, { "content": "fn query_latest_branch_date(\n\n collection: &Collection,\n\n branch: &str,\n\n) -> mongodb::error::Result<Option<DateTime<Utc>>> {\n\n let agg_match = doc! {\n\n \"$match\": { \"key.branch\": branch }\n\n };\n\n let agg_group = doc! {\n\n \"$group\": {\n\n \"_id\": \"$key.branch\",\n\n \"maxDate\": { \"$max\": \"$key.date\" },\n\n }\n\n };\n\n\n\n if let Some(Ok(doc)) = collection\n\n .aggregate(vec![agg_match, agg_group], None)?\n\n .next()\n\n {\n\n Ok(doc.get_datetime(\"maxDate\").ok().cloned())\n\n } else {\n\n Ok(None)\n\n }\n\n}\n\n\n", "file_path": "server/src/main.rs", "rank": 7, "score": 98940.36167035074 }, { "content": "fn main() {\n\n SimpleLogger::new()\n\n .with_level(LevelFilter::Info)\n\n .init()\n\n .unwrap();\n\n\n\n let config = BenchConfig::from_json_file(None);\n\n let db = connect_to_mongodb(&config.mongodb_server_uri, &config.mongodb_db).unwrap();\n\n let state = ServerState { db };\n\n\n\n let cors = rocket_cors::CorsOptions::default().to_cors().unwrap();\n\n let rocket_config = Config::build(Environment::Production)\n\n .address(\"127.0.0.1\")\n\n .port(7878)\n\n .finalize()\n\n .unwrap();\n\n\n\n rocket::custom(rocket_config)\n\n .mount(\"/\", routes![graph_csv, graph_list])\n\n .manage(state)\n\n .attach(cors)\n\n .launch();\n\n}\n\n\n", "file_path": "server/src/main.rs", "rank": 9, "score": 93493.8396538159 }, { "content": "#[get(\"/list?<field>&<project>\")]\n\nfn graph_list(state: State<ServerState>, field: String, project: String) -> String {\n\n let db = &state.db;\n\n let collection = db.collection(&project);\n\n let values: Vec<_> = collection\n\n .distinct(&field, None, None)\n\n .unwrap()\n\n .into_iter()\n\n .map(|b| b.into_canonical_extjson())\n\n .collect();\n\n serde_json::to_string(&values).unwrap()\n\n}\n\n\n", "file_path": "server/src/main.rs", "rank": 10, "score": 91140.99656157047 }, { "content": "#[get(\"/graph/csv?<project>&<date1>&<date2>&<otherEngines>\")]\n\n#[allow(non_snake_case)]\n\nfn graph_csv(\n\n state: State<ServerState>,\n\n project: String,\n\n date1: i64,\n\n date2: i64,\n\n otherEngines: Option<bool>,\n\n) -> Json<String> {\n\n #[derive(Clone, Serialize, Deserialize)]\n\n struct BenchCSVResult {\n\n entries1: Vec<BenchCSVEntry>,\n\n entries2: Vec<BenchCSVEntry>,\n\n }\n\n\n\n info!(\"Processing request: {}, {}, {}\", project, date1, date2);\n\n let collection = state.db.collection(&project);\n\n let other_engines = otherEngines.unwrap_or(false);\n\n\n\n // 2. Retrieve all the documents at these dates for these branches.\n\n let mut filter1 = doc! {\n\n \"key.date\": Utc.timestamp_millis(date1)\n", "file_path": "server/src/main.rs", "rank": 12, "score": 86875.26361045998 }, { "content": "const path = require(\"path\");\n", "file_path": "client/webpack.config.js", "rank": 14, "score": 70276.1723228434 }, { "content": "const webpack = require('webpack');\n\nconst path = require(\"path\");\n\nconst CopyPlugin = require(\"copy-webpack-plugin\");\n\n\n\nconst dist = path.resolve(__dirname, \"dist\");\n\n\n\nconst mode = \"production\";\n\n\n\nconst appConfig = {\n\n mode: mode,\n\n entry: \"./src/index.js\",\n\n devServer: {\n\n contentBase: dist\n\n },\n\n resolve: {\n\n extensions: [\".js\"]\n\n },\n\n output: {\n\n path: dist,\n\n filename: \"index.js\"\n\n },\n\n plugins: [\n\n new CopyPlugin([\n\n path.resolve(__dirname, \"static\")\n\n ]),\n\n new webpack.IgnorePlugin(/(fs)/)\n\n ]\n\n};\n\n\n\nmodule.exports = [appConfig];\n", "file_path": "client/webpack.config.js", "rank": 16, "score": 50637.387158833706 }, { "content": "Continuous benchmarking for Dimforge crates. See results:\n", "file_path": "README.md", "rank": 17, "score": 40660.43487008771 }, { "content": "struct ServerState {\n\n db: Database,\n\n}\n\n\n", "file_path": "server/src/main.rs", "rank": 18, "score": 40026.30907503566 }, { "content": " import _ from 'lodash';\n\n import Chart from 'chart.js'\n\n import seed from 'seed-random'\n\n\n\nfunction dateMillisFromKey(key) {\n\n return parseInt(key.date.$date.$numberLong);\n\n}\n\n\n\nfunction populateDropdown(dropdown, keys) {\n\n _.forEach(keys, key => {\n\n let dateMillis = dateMillisFromKey(key);\n\n let date = new Date(dateMillis);\n\n const option = document.createElement('option');\n\n option.innerHTML = key.branch + \"@\" + key.commit + \" | \" + date.toISOString();\n\n option.value = dateMillis;\n\n dropdown.appendChild(option);\n\n });\n\n}\n\n\n\nfunction canvas() {\n\n const canvas = document.createElement('canvas');\n\n canvas.height = 100;\n\n return canvas;\n\n}\n\n\n\nseed('dimforge', {global: true});\n\n\n\nfunction removeAllChildNodes(parent) {\n\n while (parent.firstChild) {\n\n parent.removeChild(parent.firstChild);\n\n }\n\n}\n\n\n\nfunction getRandomColor() {\n\n var letters = '0123456789ABCDEF';\n\n var color = '#';\n\n for (var i = 0; i < 6; i++) {\n\n color += letters[Math.floor(Math.random() * 16)];\n\n }\n\n return color;\n\n}\n\n\n\nlet colors = new Map();\n\nlet dropdown1 = document.getElementById('dropdown1');\n\nlet dropdown2 = document.getElementById('dropdown2');\n\nlet graphsContainer = document.getElementById('graphs-container');\n\nlet checkboxOtherEngines = document.getElementById('checkbox-other-engines');\n\n\n\nfunction reloadBenchmarks() {\n\n let date1 = dropdown1.value;\n\n let date2 = dropdown2.value;\n\n let showOtherEngines = checkboxOtherEngines.checked;\n\n let url = 'https://benchmarks.dimforge.com/graph/csv?project=rapier3d&date1=' + date1 + '&date2=' + date2 + '&otherEngines=' + showOtherEngines;\n\n fetch(url)\n\n .then(response => response.json())\n\n .then(data => {\n\n let key1 = data.entries1[0].key;\n\n let key2 = data.entries2[0].key;\n\n let pltf1 = data.entries1[0].platform;\n\n let pltf2 = data.entries2[0].platform;\n\n let titlePart1 = '{' + key1.branch + '@' + key1.commit + '}';\n\n let titlePart2 = '{' + key2.branch + '@' + key2.commit + '}';\n\n let titleTail = pltf1.compiler == pltf2.compiler ?\n\n pltf1.compiler : \"{\" + pltf1.compiler + \" vs. \" + pltf2.compiler + \"}\";\n\n\n\n let filteredEntries2 = _.filter(data.entries2, e => e.context.backend == 'rapier');\n\n _.forEach(filteredEntries2, e => e.context.backend = 'rapier ' + titlePart2);\n\n _.forEach(data.entries1, e => {\n\n if (e.context.backend == 'rapier') {\n\n e.context.backend = 'rapier ' + titlePart1;\n\n }\n\n });\n\n let allEntries = _.concat(data.entries1, filteredEntries2);\n\n let groupedEntries = _.groupBy(allEntries, e => e.context.name);\n\n removeAllChildNodes(graphsContainer);\n\n\n\n _.forEach(groupedEntries, (entries, name) => {\n\n let title = name + \" − \" + titlePart1 + \" vs. \" + titlePart2 + \" - \" + titleTail;\n\n let labels = _.range(0, entries[0].timings.length);\n\n let datasets = entries.map(entry => {\n\n if (!colors.get(entry.context.backend)) {\n\n colors.set(entry.context.backend, getRandomColor());\n\n }\n\n\n\n return {\n\n label: entry.context.backend,\n\n data: entry.timings,\n\n fill: false,\n\n borderColor: [\n\n colors.get(entry.context.backend)\n\n ],\n\n borderWidth: 2,\n\n pointRadius: 0\n\n };\n\n })\n\n\n\n let chartCanvas = canvas();\n\n graphsContainer.appendChild(chartCanvas);\n\n var ctx = chartCanvas.getContext('2d');\n\n new Chart(ctx, {\n\n type: 'line',\n\n data: {\n\n labels: labels,\n\n datasets: datasets\n\n },\n\n options: {\n\n title: {\n\n display: true,\n\n text: title\n\n },\n\n scales: {\n\n yAxes: [{\n\n ticks: {\n\n beginAtZero: true,\n\n callback: function(value, index, values) {\n\n return value + 'ms';\n\n }\n\n }\n\n }]\n\n }\n\n }\n\n });\n\n });\n\n });\n\n}\n\n\n\ndropdown1.onchange = reloadBenchmarks;\n\ndropdown2.onchange = reloadBenchmarks;\n\ncheckboxOtherEngines.onchange = reloadBenchmarks;\n\n\n\nfetch('https://benchmarks.dimforge.com/list?project=rapier3d&field=key')\n\n .then(response => response.json())\n\n .then(data => {\n\n data.sort(function(a, b) {\n\n return dateMillisFromKey(a) < dateMillisFromKey(b);\n\n });\n\n console.log(data);\n\n populateDropdown(dropdown1, data);\n\n populateDropdown(dropdown2, data);\n\n // See if the URL has the ref to select.\n\n let url = new URL(window.location);\n\n let date1 = url.searchParams.get(\"date1\");\n\n let date2 = url.searchParams.get(\"date2\");\n\n\n\n if (!!date1 && !!data.includes(date1))\n\n dropdown1.value = date1;\n\n else if (dropdown1.length > 1)\n\n dropdown1.value = dateMillisFromKey(data[1]);\n\n if (!!date2 && !!data.includes(date2))\n\n dropdown2.value = date2;\n\n else if (data.length > 0) {\n\n dropdown2.value = dateMillisFromKey(data[0]);\n\n }\n\n\n\n reloadBenchmarks();\n\n });\n", "file_path": "client/src/index.js", "rank": 19, "score": 38040.26378160434 }, { "content": " };\n\n let mut filter2 = doc! {\n\n \"key.date\": Utc.timestamp_millis(date2)\n\n };\n\n\n\n if !other_engines {\n\n filter1.insert(\"context.backend\", \"rapier\");\n\n filter2.insert(\"context.backend\", \"rapier\");\n\n }\n\n\n\n let docs1 = collection.find(filter1, None).unwrap();\n\n let docs2 = collection.find(filter2, None).unwrap();\n\n\n\n let entries1: Vec<_> = docs1\n\n .filter_map(|doc| doc.ok())\n\n .filter_map(|doc| bson::from_document::<BenchCSVEntry>(doc).ok())\n\n .collect();\n\n let entries2: Vec<_> = docs2\n\n .filter_map(|doc| doc.ok())\n\n .filter_map(|doc| bson::from_document::<BenchCSVEntry>(doc).ok())\n\n .collect();\n\n\n\n // 3. Build a JSON document with the corresponding infos and return it to the client.\n\n let result = BenchCSVResult { entries1, entries2 };\n\n\n\n Json(serde_json::to_string(&result).unwrap())\n\n}\n\n\n", "file_path": "server/src/main.rs", "rank": 20, "score": 37554.22109543439 }, { "content": "#![feature(proc_macro_hygiene, decl_macro)]\n\n\n\n#[macro_use]\n\nextern crate log;\n\n#[macro_use]\n\nextern crate bson;\n\n#[macro_use]\n\nextern crate serde;\n\n#[macro_use]\n\nextern crate rocket;\n\n\n\nuse chrono::offset::TimeZone;\n\nuse chrono::{DateTime, Utc};\n\nuse dimforge_bench_common::{BenchCSVEntry, BenchConfig};\n\nuse log::LevelFilter;\n\nuse mongodb::sync::{Collection, Database};\n\nuse rocket::config::{Config, Environment};\n\nuse rocket::response::content::Json;\n\nuse rocket::State;\n\nuse simple_logger::SimpleLogger;\n\n\n", "file_path": "server/src/main.rs", "rank": 21, "score": 37552.36268547383 }, { "content": "const appConfig = {\n\n mode: mode,\n\n entry: \"./src/index.js\",\n\n devServer: {\n\n contentBase: dist\n\n },\n\n resolve: {\n\n extensions: [\".js\"]\n\n },\n\n output: {\n\n path: dist,\n\n filename: \"index.js\"\n\n },\n\n plugins: [\n\n new CopyPlugin([\n\n path.resolve(__dirname, \"static\")\n\n ]),\n\n new webpack.IgnorePlugin(/(fs)/)\n\n ]\n", "file_path": "client/webpack.config.js", "rank": 22, "score": 36200.56023233841 }, { "content": "const webpack = require('webpack');\n", "file_path": "client/webpack.config.js", "rank": 33, "score": 31256.394145508024 }, { "content": "const mode = \"production\";\n", "file_path": "client/webpack.config.js", "rank": 34, "score": 31256.394145508024 }, { "content": "const dist = path.resolve(__dirname, \"dist\");\n", "file_path": "client/webpack.config.js", "rank": 35, "score": 31256.394145508024 }, { "content": "const CopyPlugin = require(\"copy-webpack-plugin\");\n", "file_path": "client/webpack.config.js", "rank": 36, "score": 30011.071541693156 }, { "content": "let colors = new Map();\n", "file_path": "client/src/index.js", "rank": 37, "score": 15000.208570502702 }, { "content": "let dropdown2 = document.getElementById('dropdown2');\n", "file_path": "client/src/index.js", "rank": 38, "score": 15000.208570502702 }, { "content": "let dropdown1 = document.getElementById('dropdown1');\n", "file_path": "client/src/index.js", "rank": 39, "score": 15000.208570502702 }, { "content": "let graphsContainer = document.getElementById('graphs-container');\n", "file_path": "client/src/index.js", "rank": 40, "score": 14402.567693956835 }, { "content": "let checkboxOtherEngines = document.getElementById('checkbox-other-engines');\n", "file_path": "client/src/index.js", "rank": 41, "score": 14402.567693956835 }, { "content": "#[macro_use]\n\nextern crate serde;\n\n\n\nuse bson::DateTime;\n\nuse std::fs::File;\n\n\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n\npub struct BenchConfig {\n\n pub mongodb_bencher_uri: String,\n\n pub mongodb_server_uri: String,\n\n pub rabbitmq_uri: String,\n\n pub mongodb_db: String,\n\n}\n\n\n\nimpl BenchConfig {\n\n pub fn from_json_file(path: Option<&str>) -> Self {\n\n let home = std::env::var(\"HOME\").unwrap_or(String::new());\n\n let default_path = format!(\"{}/.dimforge/benchbot.json\", home);\n\n let path = path.unwrap_or(&default_path);\n\n let file = File::open(path).expect(\"Could not open configuration file.\");\n", "file_path": "common/src/lib.rs", "rank": 42, "score": 22.74385082775398 }, { "content": "\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n\npub struct BenchContext {\n\n /// Name of what is being benched.\n\n pub name: String,\n\n /// The backend used for this benchmark.\n\n pub backend: String,\n\n}\n\n\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n\npub struct BenchPlatform {\n\n /// Compiler version used to run the benchmarks.\n\n pub compiler: String,\n\n}\n\n\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n\npub struct BenchCSVEntry {\n\n /// Benchmark key.\n\n pub key: BenchKey,\n\n /// Benchmark context.\n\n pub context: BenchContext,\n\n /// Details about the platforms the benchmark is run on.\n\n pub platform: BenchPlatform,\n\n /// Timings in milliseconds.\n\n pub timings: Vec<f32>,\n\n // TODO: also add the float type, simd, parallelism, and processor?\n\n}\n", "file_path": "common/src/lib.rs", "rank": 43, "score": 20.311260300191872 }, { "content": " serde_json::from_reader(file).expect(\"Could not read configuration file as JSON.\")\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n\npub struct BenchMessage {\n\n pub repository: String,\n\n pub branch: String,\n\n pub commit: String,\n\n}\n\n\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n\npub struct BenchKey {\n\n /// The commit SHA where this benchmark is run.\n\n pub commit: String,\n\n /// The branch where this benchmark is run.\n\n pub branch: String,\n\n /// When this benchmark is run.\n\n pub date: DateTime,\n\n}\n", "file_path": "common/src/lib.rs", "rank": 44, "score": 18.657321921418827 } ]
Rust
server/src/main.rs
smokku/soldank
9aa7d307121faf7d482bf102c76db34411910a5e
#[macro_use] extern crate clap; use color_eyre::eyre::Result; use hecs::World; use smol::future; use std::{ collections::VecDeque, net::SocketAddr, sync::{Arc, RwLock}, time::{Duration, Instant}, }; use crate::{ constants::*, cvars::{set_cli_cvars, Config, NetConfig}, networking::Networking, }; use soldank_shared::{messages::NetworkMessage, networking::GameWorld}; mod cheat; mod cli; mod constants; mod cvars; mod networking; mod state; mod systems; #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum GameState { Lobby, InGame, } fn main() -> Result<()> { color_eyre::install()?; env_logger::init(); smol::block_on(async { let cmd = cli::parse_cli_args(); let mut map_name = cmd.value_of("map").unwrap_or(DEFAULT_MAP).to_owned(); map_name.push_str(".pms"); log::info!("Using map: {}", map_name); let mut config = Config { net: NetConfig { orb: Arc::new(RwLock::new(orb::Config { timestep_seconds: TIMESTEP_RATE, ..Default::default() })), ..Default::default() }, ..Default::default() }; set_cli_cvars(&mut config, &cmd); let mut networking = Networking::new(cmd.value_of("bind")).await; if let Some(key) = cmd.value_of("key") { networking.connection_key = key.to_string(); } let mut messages: VecDeque<(SocketAddr, NetworkMessage)> = VecDeque::new(); let mut world = World::new(); let mut game_state = GameState::Lobby; let mut server = orb::server::Server::<GameWorld>::new(config.net.orb.clone(), 0.0); let startup_time = Instant::now(); let mut previous_time = Instant::now(); let mut running = true; while running { let timeout = Duration::from_millis( (config.net.orb.read().unwrap().snapshot_send_period * 1000.) as _, ); future::race( networking.process(&mut world, &mut config, &mut messages), async { smol::Timer::after(timeout).await; }, ) .await; let current_time = Instant::now(); let delta_seconds = current_time.duration_since(previous_time).as_secs_f64(); let seconds_since_startup = current_time.duration_since(startup_time).as_secs_f64(); systems::process_network_messages( &mut world, &mut messages, &mut networking.connections, ); systems::message_dump(&mut messages); match game_state { GameState::Lobby => { systems::lobby(&mut world, &mut game_state, &networking); } GameState::InGame => { server.update(delta_seconds, seconds_since_startup); let server_display_state = server.display_state(); log::trace!( "server_display_state: {}", server_display_state.inner().len() ); networking.process_simulation(&mut server); if networking.connections.is_empty() { log::info!("No connections left - exiting"); running = false; } } } previous_time = current_time; networking.post_process(&config); } log::info!("Exiting server"); Ok(()) }) }
#[macro_use] extern crate clap; use color_eyre::eyre::Result; use hecs::World; use smol::future; use std::{ collections::VecDeque, net::SocketAddr, sync::{Arc, RwLock}, time::{Duration, Instant}, }; use crate::{ constants::*, cvars::{set_cli_cvars, Config, NetConfig}, networking::Networking, }; use soldank_shared::{messages::NetworkMessage, networking::GameWorld}; mod cheat; mod cli; mod constants; mod cvars; mod networking; mod state; mod systems; #[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)] pub enum GameState { Lobby, InGame, } fn main() -> Result<()> { color_eyre::install()?; env_logger::init(); smol::block_on(async { let cmd = cli::parse_cli_args(); let mut map_name = cmd.value_of("map").unwrap_or(DEFAULT_MAP).to_owned(); map_name.push_str(".pms"); log::info!("Using map: {}", map_name); let mut config = Config { net: NetConfig { orb: Arc::new(RwLock::new(orb::Config { timestep_seconds: TIMESTEP_RATE, ..Default::default() })), ..Default::default() }, ..Default::default() }; set_cli_cvars(&mut config, &cmd); let mut networking = Networking::new(cmd.value_of("bind")).await; if let Some(key) = cmd.value_of("key") { networking.connection_key = key.to_string(); } let mut messages: VecDeque<(SocketAddr, NetworkMessage)> = VecDeque::new(); let mut world = World::new(); let mut game_state = GameState::Lobby; let mut server = orb::server::Server::<GameWorld>::new(config.net.orb.clone(), 0.0); let startup_time = Instant::now(); let mut previous_time = Instant::now(); let mut running = true; while running { let timeout = Duration::from_millis( (config.net.orb.read().unwrap().snapshot_send_period * 1000.) as _, ); future::race( networking.process(&mut world, &mut config, &mut messages), async { smol::Timer::after(timeout).await; }, ) .await; let current_time = Instant::now(); let delta_seconds = current_time.duration_since(previous_time).as_secs_f64(); let seconds_since_startup = current_time.duration_since(startup_time).as_secs_f64(); systems::process_network_messages( &mut world, &mut messages, &mut networking.connections, ); systems::message_dump(&mut messages); match game_state { GameState::Lobby => { systems::lobby(&mut world, &mut game_state, &networking); } GameState::InGame => { server.update(delta_seconds, seconds_since_startup); let server_display_state = server.display_state(); log::trace!( "server_display_state: {}", server_display_state.inner().len() ); networking.process_simula
networking.post_process(&config); } log::info!("Exiting server"); Ok(()) }) }
tion(&mut server); if networking.connections.is_empty() { log::info!("No connections left - exiting"); running = false; } } } previous_time = current_time;
function_block-random_span
[ { "content": "pub fn lobby(world: &mut World, game_state: &mut GameState, networking: &Networking) {\n\n if *game_state != GameState::Lobby {\n\n log::error!(\"Running lobby system outside Lobby GameState\");\n\n }\n\n\n\n let ready = !networking.connections.is_empty()\n\n && networking\n\n .connections\n\n .iter()\n\n .all(|(_, conn)| conn.authorized && conn.entity.is_some() && conn.ready);\n\n\n\n if ready {\n\n log::info!(\"All players ready - switching to InGame state\");\n\n *game_state = GameState::InGame;\n\n\n\n for (&addr, conn) in networking.connections.iter() {\n\n let entity = conn.entity.unwrap();\n\n world.spawn_at(\n\n entity,\n\n (\n", "file_path": "server/src/systems.rs", "rank": 0, "score": 386889.4162242777 }, { "content": "pub fn set_cli_cvars(config: &mut dyn IVisit, cmd: &clap::ArgMatches) {\n\n if let Some(values) = cmd.values_of(\"set\") {\n\n for chunk in values.collect::<Vec<_>>().chunks_exact(2) {\n\n let cvar = chunk[0];\n\n let value = chunk[1];\n\n match cvar::console::set(config, cvar, value) {\n\n Ok(set) => {\n\n if !set {\n\n log::error!(\n\n \"Cannot set cvar `{} = {}`: cvar not available.\",\n\n cvar,\n\n value\n\n );\n\n }\n\n }\n\n Err(err) => {\n\n log::error!(\"Cannot parse `{} = {}`: {}.\", cvar, value, err);\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "shared/src/cvars.rs", "rank": 1, "score": 383414.1865552966 }, { "content": "pub fn follow_camera(world: &mut World, config: &Config) {\n\n for (_, (pos, rb_pos)) in world\n\n .query::<With<Camera, (Option<&mut Position>, Option<&RigidBodyPosition>)>>()\n\n .iter()\n\n {\n\n if let Some(rb_pos) = rb_pos {\n\n if let Some(mut pos) = pos {\n\n let mut vec_pos = rb_pos.position.translation.into();\n\n vec_pos *= config.phys.scale;\n\n pos.0 = lerp(pos.0, vec_pos, 0.33);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "client/src/game/systems/movement.rs", "rank": 2, "score": 323389.08832866687 }, { "content": "pub fn force_movement(world: &mut World, config: &Config) {\n\n const RUNSPEED: f32 = 0.118;\n\n const RUNSPEEDUP: f32 = RUNSPEED / 6.0;\n\n const MAX_VELOCITY: f32 = 11.0;\n\n\n\n for (_, (input, mut forces, mut velocity, mass_properties)) in world\n\n .query::<With<\n\n ForceMovement,\n\n (\n\n &Input,\n\n &mut RigidBodyForces,\n\n &mut RigidBodyVelocity,\n\n &RigidBodyMassProps,\n\n ),\n\n >>()\n\n .iter()\n\n {\n\n if input.state.contains(InputState::MoveLeft)\n\n && !input.state.contains(InputState::MoveRight)\n\n {\n", "file_path": "client/src/game/systems/movement.rs", "rank": 3, "score": 323389.08832866687 }, { "content": "pub fn create_map_colliders(world: &mut World, resources: &Resources, config: &Config) {\n\n let map = resources.get::<MapFile>().unwrap();\n\n let scale = config.phys.scale;\n\n\n\n for polygon in map.polygons.iter() {\n\n match polygon.polytype {\n\n PolyType::NoCollide | PolyType::Background | PolyType::BackgroundTransition => continue,\n\n _ => {}\n\n }\n\n\n\n let vertices: Vec<Point<Real>> = polygon\n\n .vertices\n\n .iter()\n\n .map(|v| point![v.x / scale, v.y / scale])\n\n .collect();\n\n let mut collider = ColliderBundle {\n\n shape: ColliderShape::triangle(vertices[0], vertices[1], vertices[2]),\n\n flags: ColliderFlags {\n\n collision_groups: polygon.polytype.into(),\n\n ..Default::default()\n", "file_path": "client/src/game/physics.rs", "rank": 4, "score": 304810.6601626086 }, { "content": "pub fn update_soldiers(world: &mut World, resources: &Resources, config: &Config) {\n\n let mut emitter = Vec::new();\n\n\n\n for (_entity, (mut soldier, input, rb_pos)) in world\n\n .query::<(&mut Soldier, Option<&Input>, Option<&RigidBodyPosition>)>()\n\n .iter()\n\n {\n\n if let Some(input) = input {\n\n soldier.control.left = input.state.contains(InputState::MoveLeft);\n\n soldier.control.right = input.state.contains(InputState::MoveRight);\n\n soldier.control.up = input.state.contains(InputState::Jump);\n\n soldier.control.down = input.state.contains(InputState::Crouch);\n\n soldier.control.fire = input.state.contains(InputState::Fire);\n\n soldier.control.jets = input.state.contains(InputState::Jet);\n\n // soldier.control.grenade = input.state.contains(InputState::);\n\n soldier.control.change = input.state.contains(InputState::ChangeWeapon);\n\n soldier.control.throw = input.state.contains(InputState::ThrowGrenade);\n\n soldier.control.drop = input.state.contains(InputState::DropWeapon);\n\n soldier.control.reload = input.state.contains(InputState::Reload);\n\n soldier.control.prone = input.state.contains(InputState::Prone);\n", "file_path": "client/src/game/systems/soldier.rs", "rank": 5, "score": 304711.7152712123 }, { "content": "pub fn apply_input(world: &mut World, eng: &Engine) {\n\n for (_, mut input) in world.query::<With<Pawn, &mut Input>>().iter() {\n\n input.state = eng.input.state;\n\n }\n\n}\n", "file_path": "client/src/game/systems/mod.rs", "rank": 6, "score": 279715.21383071097 }, { "content": "pub fn build_state_message(world: &World, _client_entity: Entity, time: &systems::Time) -> Bytes {\n\n // TODO: scope updates to client_entity visibility range\n\n // FIXME: send only entities changed since last client acknowledged tick\n\n\n\n let mut entities = HashMap::new();\n\n for entity_ref in world.iter() {\n\n let components = entities.entry(entity_ref.entity()).or_insert_with(Vec::new);\n\n if let Some(soldier) = entity_ref.get::<components::Soldier>() {\n\n components.push(ComponentValue::Soldier((*soldier).clone()));\n\n }\n\n if let Some(nick) = entity_ref.get::<components::Nick>() {\n\n components.push(ComponentValue::Nick((*nick).clone()));\n\n }\n\n if let Some(pos) = entity_ref.get::<components::Position>() {\n\n components.push(ComponentValue::Pos((*pos).clone()));\n\n }\n\n }\n\n\n\n encode_message(NetworkMessage::GameState {\n\n tick: time.tick,\n\n entities,\n\n })\n\n}\n", "file_path": "server/src/state.rs", "rank": 7, "score": 275637.69632084377 }, { "content": "pub fn parse_cli_args<'a>() -> clap::ArgMatches<'a> {\n\n clap::app_from_crate!()\n\n .arg(\n\n clap::Arg::with_name(\"bind\")\n\n .value_name(\"address:port\")\n\n .help(\"IP address and port to bind\")\n\n .short(\"b\")\n\n .long(\"bind\")\n\n .takes_value(true)\n\n .env(\"SOLDANK_SERVER_BIND\"),\n\n )\n\n .arg(\n\n clap::Arg::with_name(\"map\")\n\n .value_name(\"map name\")\n\n .help(\"name of map to load\")\n\n .short(\"m\")\n\n .long(\"map\")\n\n .takes_value(true)\n\n .default_value(constants::DEFAULT_MAP)\n\n .env(\"SOLDANK_USE_MAP\"),\n", "file_path": "server/src/cli.rs", "rank": 8, "score": 270986.7521684171 }, { "content": "pub fn kinetic_movement(world: &mut World) {\n\n for (_entity, mut body) in world.query::<&mut Particle>().iter() {\n\n if body.active {\n\n body.euler();\n\n }\n\n }\n\n}\n\n\n\npub struct PrimitiveMovement;\n\n\n", "file_path": "client/src/game/systems/movement.rs", "rank": 9, "score": 267411.75259995117 }, { "content": "pub fn primitive_movement(world: &mut World) {\n\n for (_, (input, mut pos)) in world\n\n .query::<With<PrimitiveMovement, (&Input, &mut Position)>>()\n\n .iter()\n\n {\n\n let mut delta = Vec2::ZERO;\n\n\n\n if input.state.contains(InputState::MoveLeft) {\n\n delta.x -= 1.;\n\n }\n\n if input.state.contains(InputState::MoveRight) {\n\n delta.x += 1.;\n\n }\n\n if input.state.contains(InputState::Jump) {\n\n delta.y -= 1.;\n\n }\n\n if input.state.contains(InputState::Crouch) {\n\n delta.y += 1.;\n\n }\n\n\n\n if delta != Vec2::ZERO {\n\n **pos += delta;\n\n }\n\n }\n\n}\n\n\n\npub struct ForceMovement;\n\n\n", "file_path": "client/src/game/systems/movement.rs", "rank": 10, "score": 267411.75259995117 }, { "content": "pub fn process_network_messages(\n\n world: &mut World,\n\n messages: &mut VecDeque<(SocketAddr, NetworkMessage)>,\n\n connections: &mut HashMap<SocketAddr, Connection>,\n\n) {\n\n let mut control_updates = HashMap::new();\n\n let mut unprocessed = Vec::new();\n\n\n\n for (addr, message) in messages.drain(..) {\n\n match message {\n\n NetworkMessage::ControlState {\n\n ack_tick,\n\n begin_tick,\n\n control,\n\n } => {\n\n if let Some(conn) = connections.get_mut(&addr) {\n\n conn.ack_tick = ack_tick;\n\n } else {\n\n log::error!(\"Processing message from unknown connection: [{}]\", addr);\n\n }\n", "file_path": "server/src/systems.rs", "rank": 11, "score": 264478.4425936138 }, { "content": "pub fn dump_cvars(config: &mut dyn IVisit) {\n\n log::info!(\"--- cvars:\");\n\n cvar::console::walk(config, |path, node| {\n\n if let cvar::Node::Prop(prop) = node.as_node() {\n\n log::info!(\"{} = `{}`\", path, prop.get());\n\n }\n\n });\n\n}\n\n\n\npub struct Physics {\n\n pub scale: f32,\n\n pub gravity: f32,\n\n}\n\n\n\nimpl Default for Physics {\n\n fn default() -> Self {\n\n Self {\n\n scale: PHYSICS_SCALE,\n\n gravity: GRAV,\n\n }\n", "file_path": "shared/src/cvars.rs", "rank": 12, "score": 260805.71739521774 }, { "content": "pub fn rotate_balls(world: &mut World, timecur: f64) {\n\n for (_entity, mut sprite) in world.query::<&mut Sprite>().iter() {\n\n if let Transform::FromOrigin { rot, .. } = &mut sprite.transform {\n\n rot.0 = timecur as f32 % (2. * PI);\n\n }\n\n }\n\n}\n", "file_path": "client/src/game/systems/debug.rs", "rank": 13, "score": 249994.7905882613 }, { "content": "pub fn message_dump(messages: &mut VecDeque<(SocketAddr, NetworkMessage)>) {\n\n for (addr, message) in messages.drain(..) {\n\n log::warn!(\"{}: {:#?}\", addr, message);\n\n }\n\n}\n", "file_path": "shared/src/systems/debug.rs", "rank": 15, "score": 245337.75348874068 }, { "content": "pub fn apply_input(world: &World, time: &Time) {\n\n let tick = time.tick;\n\n\n\n for (entity, buffer) in world.query::<&mut ControlBuffer>().iter() {\n\n // FIXME: apply all queued inputs in rollback manner\n\n let max_tick = buffer.keys().max().unwrap();\n\n if let Some((control, _)) = buffer.get(max_tick) {\n\n systems::apply_input(world.entity(entity).unwrap(), *control);\n\n } else {\n\n log::warn!(\n\n \"Missed input for tick {}({}) on entity {:?}\",\n\n tick,\n\n max_tick,\n\n entity\n\n );\n\n }\n\n }\n\n}\n", "file_path": "server/src/systems.rs", "rank": 16, "score": 245240.73539530154 }, { "content": "pub fn update_cursor(world: &mut World, mouse_x: f32, mouse_y: f32) {\n\n for (_entity, mut cursor) in world.query::<&mut Cursor>().iter() {\n\n cursor.x = mouse_x;\n\n cursor.y = mouse_y;\n\n }\n\n\n\n for (_entity, mut camera) in world.query::<&mut Camera>().iter() {\n\n if camera.is_active && camera.centered {\n\n let zoom = f32::exp(camera.zoom);\n\n let mut offset = Vec2::ZERO;\n\n\n\n offset.x = zoom\n\n * (mouse_x - GAME_WIDTH / 2.0)\n\n * ((2.0 * 640.0 / GAME_WIDTH - 1.0)\n\n + (GAME_WIDTH - 640.0) / GAME_WIDTH * 0.0 / 6.8);\n\n offset.y = zoom * (mouse_y - GAME_HEIGHT / 2.0);\n\n\n\n camera.offset = lerp(camera.offset, offset, 0.14);\n\n }\n\n }\n\n}\n\n\n", "file_path": "client/src/render/systems.rs", "rank": 17, "score": 238780.44367184286 }, { "content": "pub fn parse_cli_args<'a>() -> clap::ArgMatches<'a> {\n\n clap::app_from_crate!()\n\n .arg(\n\n clap::Arg::with_name(\"map\")\n\n .help(\"name of map to load\")\n\n .short(\"m\")\n\n .long(\"map\")\n\n .takes_value(true)\n\n .default_value(constants::DEFAULT_MAP),\n\n )\n\n .arg(\n\n clap::Arg::with_name(\"debug\")\n\n .help(\"display debug UI on start (^` to toggle)\")\n\n .long(\"debug\"),\n\n )\n\n .arg(\n\n clap::Arg::with_name(\"connect\")\n\n .value_name(\"address:port\")\n\n .help(\"server address and port to connect\")\n\n .short(\"c\")\n", "file_path": "client/src/cli.rs", "rank": 18, "score": 238589.5128723845 }, { "content": "pub fn update_previous_physics(world: &mut World) {\n\n for (_entity, (mut prev, pos, vel, force, cont)) in world\n\n .query::<(\n\n &mut PreviousPhysics,\n\n Option<&RigidBodyPosition>,\n\n Option<&RigidBodyVelocity>,\n\n Option<&RigidBodyForces>,\n\n Option<&Contact>,\n\n )>()\n\n .iter()\n\n {\n\n if let Some(pos) = pos {\n\n prev.position = pos.position;\n\n }\n\n if let Some(vel) = vel {\n\n prev.linvel = vel.linvel;\n\n prev.angvel = vel.angvel;\n\n }\n\n if let Some(force) = force {\n\n prev.force = force.force;\n", "file_path": "client/src/game/physics.rs", "rank": 19, "score": 235909.67514251618 }, { "content": "pub fn render_cursor(world: &World, sprites: &Sprites, batch: &mut DrawBatch) {\n\n for (_entity, (cursor, mut sprite)) in world.query::<(&Cursor, &mut Sprite)>().iter() {\n\n let offset = if let Some(sprite) = sprite.sprite.as_ref() {\n\n vec2(sprite.width, sprite.height) / -2.\n\n } else {\n\n Vec2::ZERO\n\n };\n\n draw_sprite_in_batch(\n\n batch,\n\n sprites,\n\n &mut *sprite,\n\n Position(**cursor + offset),\n\n 0.0,\n\n );\n\n }\n\n}\n\n\n", "file_path": "client/src/render/systems.rs", "rank": 20, "score": 234862.52487645164 }, { "content": "pub fn build_ui(eng: &Engine<'_>, game: &mut GameState) {\n\n let gravity = game.config.phys.gravity;\n\n let debug = &mut game.config.debug;\n\n\n\n if debug.visible {\n\n let mouse = if let Some((_entity, cursor)) = game.world.query::<&Cursor>().iter().next() {\n\n **cursor\n\n } else {\n\n Vec2::ZERO\n\n };\n\n let scale = game.config.phys.scale;\n\n let (camera, camera_position) = game.world.get_camera_and_camera_position();\n\n let (dx, dy, _w, _h) = camera.viewport(*camera_position);\n\n let (x, y) = camera.mouse_to_world(*camera_position, mouse.x, mouse.y);\n\n\n\n egui::Window::new(\"Debugger\")\n\n .title_bar(false)\n\n .resizable(false)\n\n .collapsible(false)\n\n .show(eng.egui_ctx, |ui| {\n", "file_path": "client/src/debug/mod.rs", "rank": 21, "score": 227311.4690193868 }, { "content": "pub fn render_sprites(world: &World, sprites: &Sprites, batch: &mut DrawBatch, phys_scale: f32) {\n\n for (_entity, (mut sprite, position, rb_position)) in world\n\n .query::<(&mut Sprite, Option<&Position>, Option<&RigidBodyPosition>)>()\n\n .iter()\n\n {\n\n let params = if let Some(rbp) = rb_position {\n\n Some((\n\n Position::new(\n\n rbp.position.translation.vector.x * phys_scale,\n\n rbp.position.translation.vector.y * phys_scale,\n\n ),\n\n rbp.position.rotation.angle(),\n\n ))\n\n } else {\n\n position.map(|pos| (*pos, 0.0))\n\n };\n\n\n\n if let Some((pos, rot)) = params {\n\n draw_sprite_in_batch(batch, sprites, &mut *sprite, pos, rot);\n\n }\n\n }\n\n}\n\n\n", "file_path": "client/src/render/systems.rs", "rank": 22, "score": 218196.65181510441 }, { "content": "/// The [`DisplayState`] represents the information about how to display the [`World`][world] at\n\n/// its current state. For example, while a [`World`][world] might contain information about\n\n/// player's position and velocities, some games may only need to know about the position to render\n\n/// it (unless you're doing some fancy motion-blur). You can think of a [`DisplayState`] as the\n\n/// \"output\" of a [`World`][world]. There is nothing stopping you from making the [`DisplayState`]\n\n/// the same structure as the [`World`][world] if it makes more sense for your game, but most of\n\n/// the time, the [`World`][world] structure may contain things that are inefficient to copy around\n\n/// (e.g. an entire physics engine)\n\n///\n\n/// [world]: [crate::world::World]\n\npub trait DisplayState: Send + Sync + Clone {\n\n /// CrystalOrb needs to mix different [`DisplayState`]s from different [`World`][world]s\n\n /// together, as well as mix [`DisplayState`] from two adjacent timestamps. The\n\n /// [`from_interpolation`](DisplayState::from_interpolation) method tells CrystalOrb how to\n\n /// perform this \"mix\" operation. Here, the `t` parameter is the interpolation parameter that\n\n /// ranges between `0.0` and `1.0`, where `t = 0.0` represents the request to have 100% of\n\n /// `state1` and 0% of `state2`, and where `t = 1.0` represents the request to have 0% of\n\n /// `state1` and 100% of `state2`.\n\n ///\n\n /// A common operation to implement this function is through [linear\n\n /// interpolation](https://en.wikipedia.org/wiki/Linear_interpolation), which looks like this:\n\n ///\n\n /// ```text\n\n /// state1 * (1.0 - t) + state2 * t\n\n /// ```\n\n ///\n\n /// However, for things involving rotation, you may need to use [spherical linear\n\n /// interpolation](https://en.wikipedia.org/wiki/Slerp), or [circular\n\n /// statistics](https://en.wikipedia.org/wiki/Mean_of_circular_quantities), and perhaps you may\n\n /// need to convert between coordinate systems before/after performing the interpolation to get\n", "file_path": "orb/src/world/display_state.rs", "rank": 23, "score": 217133.40351737803 }, { "content": "pub fn tick_debug(world: &World, time: &systems::Time) {\n\n log::debug!(\"tick {}, entities: {}\", time.tick, world.len());\n\n for entity_ref in world.iter() {\n\n log::debug!(\n\n \"{:?}, components: {:?}\",\n\n entity_ref.entity(),\n\n entity_ref.len()\n\n );\n\n }\n\n}\n\n\n", "file_path": "shared/src/systems/debug.rs", "rank": 24, "score": 213953.31338982924 }, { "content": "pub fn process_contact_events(world: &mut World, resources: &Resources, now: f64) {\n\n let event_recv = resources\n\n .get_mut::<Arc<Mutex<BroadcastReceiver<ContactEvent>>>>()\n\n .unwrap();\n\n for event in event_recv.lock().unwrap().try_iter() {\n\n // log::debug!(\"Received contact event: {:?}\", event);\n\n match event {\n\n ContactEvent::Started(handle1, handle2) => {\n\n let entity1: Entity = handle1.entity();\n\n let entity2: Entity = handle2.entity();\n\n if let Ok(mut contact) = world.get_mut::<Contact>(entity1) {\n\n contact.timestamp = now;\n\n contact.entities.insert(entity2);\n\n }\n\n if let Ok(mut contact) = world.get_mut::<Contact>(entity2) {\n\n contact.timestamp = now;\n\n contact.entities.insert(entity1);\n\n }\n\n }\n\n ContactEvent::Stopped(handle1, handle2) => {\n", "file_path": "client/src/game/physics.rs", "rank": 25, "score": 206801.62194309523 }, { "content": "pub fn despawn_outliers(world: &mut World, max_pos: f32, phys_scale: f32) {\n\n let mut to_despawn = Vec::new();\n\n\n\n for (entity, pos) in world.query::<&RigidBodyPosition>().iter() {\n\n let x = pos.position.translation.x * phys_scale;\n\n let y = pos.position.translation.y * phys_scale;\n\n if !(-max_pos..=max_pos).contains(&x) || !(-max_pos..=max_pos).contains(&y) {\n\n to_despawn.push(entity);\n\n }\n\n }\n\n\n\n for (entity, pos) in world.query::<&Position>().iter() {\n\n if pos.x > max_pos || pos.x < -max_pos || pos.y > max_pos || pos.y < -max_pos {\n\n to_despawn.push(entity);\n\n }\n\n }\n\n\n\n for entity in to_despawn {\n\n world.despawn(entity).unwrap();\n\n }\n\n}\n", "file_path": "shared/src/physics.rs", "rank": 26, "score": 206801.62194309523 }, { "content": "fn toggle_state(ui: &mut egui::Ui, state: &mut bool, label: &str) {\n\n if ui.selectable_label(*state, label).clicked() {\n\n *state = !*state;\n\n }\n\n}\n\n\n", "file_path": "client/src/debug/mod.rs", "rank": 27, "score": 197871.73407287727 }, { "content": "pub fn create_physics_resources(resources: &mut Resources) {\n\n use multiqueue2::broadcast_queue;\n\n use std::sync::{Arc, Mutex};\n\n\n\n resources.insert(physics::PhysicsPipeline::new());\n\n resources.insert(physics::IntegrationParameters::default());\n\n resources.insert(physics::BroadPhase::new());\n\n resources.insert(physics::NarrowPhase::new());\n\n resources.insert(physics::IslandManager::new());\n\n resources.insert(physics::JointSet::new());\n\n resources.insert(physics::CCDSolver::new());\n\n resources.insert(physics::JointsEntityMap::default());\n\n resources.insert(physics::ModificationTracker::default());\n\n let (event_sender, event_recv) = broadcast_queue(64);\n\n resources.insert(game::physics::PhysicsEventHandler::new(event_sender));\n\n resources.insert(Arc::new(Mutex::new(event_recv)));\n\n}\n", "file_path": "client/src/main.rs", "rank": 28, "score": 195157.363232243 }, { "content": "fn toggle_state_inv(ui: &mut egui::Ui, state: &mut bool, label: &str) {\n\n if ui.selectable_label(!*state, label).clicked() {\n\n *state = !*state;\n\n }\n\n}\n", "file_path": "client/src/debug/mod.rs", "rank": 29, "score": 194878.21394570929 }, { "content": "pub fn encode_message(msg: NetworkMessage) -> Bytes {\n\n match msg {\n\n NetworkMessage::ConnectionAuthorize { nick, key } => {\n\n let mut msg = vec![OperationCode::CCREQ_AUTHORIZE as u8];\n\n let pkt = AuthPacket { nick, key };\n\n msg.extend(SerBin::serialize_bin(&pkt));\n\n msg.into()\n\n }\n\n NetworkMessage::Cvars(cvars) => {\n\n let mut msg = vec![OperationCode::STT_CVARS as u8];\n\n msg.extend(SerBin::serialize_bin(&cvars));\n\n msg.into()\n\n }\n\n NetworkMessage::ControlState {\n\n ack_tick,\n\n begin_tick,\n\n control,\n\n } => {\n\n let mut msg = vec![OperationCode::STT_CONTROL as u8];\n\n let pkt = ControlPacket {\n", "file_path": "shared/src/messages.rs", "rank": 30, "score": 188717.5546860631 }, { "content": "pub fn decode_message(data: &[u8]) -> Option<NetworkMessage> {\n\n let code = data[0];\n\n if let Ok(op_code) = OperationCode::try_from(code) {\n\n match op_code {\n\n OperationCode::CCREQ_CONNECT\n\n | OperationCode::CCREP_ACCEPT\n\n | OperationCode::CCREP_REJECT\n\n | OperationCode::CCREP_AUTHORIZED\n\n | OperationCode::CCREQ_READY => {\n\n panic!(\"Should not handle packet: 0x{:x} ({:?})\", code, op_code)\n\n }\n\n OperationCode::CCREQ_AUTHORIZE => {\n\n if let Ok(AuthPacket { nick, key }) = DeBin::deserialize_bin(&data[1..]) {\n\n return Some(NetworkMessage::ConnectionAuthorize { nick, key });\n\n }\n\n }\n\n OperationCode::STT_CVARS => {\n\n if let Ok(cvars) = DeBin::deserialize_bin(&data[1..]) {\n\n return Some(NetworkMessage::Cvars(cvars));\n\n }\n", "file_path": "shared/src/messages.rs", "rank": 31, "score": 182848.3287743454 }, { "content": "pub fn read_vertex<T: Read>(reader: &mut T) -> MapVertex {\n\n let pos = read_vec3(reader);\n\n let rhw = reader.read_f32::<LittleEndian>().unwrap();\n\n let color = read_color(reader);\n\n let u = reader.read_f32::<LittleEndian>().unwrap();\n\n let v = reader.read_f32::<LittleEndian>().unwrap();\n\n\n\n MapVertex {\n\n x: pos.x,\n\n y: pos.y,\n\n z: pos.z,\n\n rhw,\n\n color,\n\n u,\n\n v,\n\n }\n\n}\n\n\n", "file_path": "client/src/mapfile.rs", "rank": 32, "score": 175006.65330771665 }, { "content": "pub fn read_color<T: Read>(reader: &mut T) -> MapColor {\n\n let b = reader.read_u8().unwrap();\n\n let g = reader.read_u8().unwrap();\n\n let r = reader.read_u8().unwrap();\n\n let a = reader.read_u8().unwrap();\n\n\n\n MapColor { r, g, b, a }\n\n}\n\n\n", "file_path": "client/src/mapfile.rs", "rank": 33, "score": 175006.65330771665 }, { "content": "fn run_rhai(args: &[&str], env: &mut Env) -> Result<Option<String>, String> {\n\n let script = args[0];\n\n\n\n let ast = if let Some(ast) = env.ast.get(script) {\n\n ast\n\n } else {\n\n if !script.ends_with(\".rhai\") {\n\n return Err(\"Script must end with .rhai extension.\".to_string());\n\n }\n\n\n\n let mut file = env.fs.open(script).map_err(|err| err.to_string())?;\n\n let mut buffer = Vec::new();\n\n file.read_to_end(&mut buffer)\n\n .map_err(|err| err.to_string())?;\n\n\n\n let mut ast = env\n\n .engine\n\n .compile(String::from_utf8_lossy(&buffer).as_ref())\n\n .map_err(|err| format!(\"Failed to compile {}: {}\", script, err))?;\n\n ast.set_source(script);\n", "file_path": "client/src/engine/script.rs", "rank": 34, "score": 166301.97386138112 }, { "content": "fn cvars_get(args: &[&str], env: &mut Env) -> Result<Option<String>, String> {\n\n let var = args[0];\n\n if var.starts_with(\"--\") {\n\n match var {\n\n \"--dump\" => {\n\n let mut dump = Vec::new();\n\n cvar::console::walk(env.config, |path, node| {\n\n if let cvar::Node::Prop(prop) = node.as_node() {\n\n dump.push(format!(\"{} = `{}`\", path, prop.get()));\n\n }\n\n });\n\n Ok(Some(dump.join(\"\\n\")))\n\n }\n\n _ => Err(format!(\"Unknown option: {:?}.\", var)),\n\n }\n\n } else {\n\n Ok(cvar::console::get(env.config, args[0]))\n\n }\n\n}\n\n\n", "file_path": "client/src/engine/script.rs", "rank": 35, "score": 166239.0858702977 }, { "content": "fn cvars_toggle(args: &[&str], env: &mut Env) -> Result<Option<String>, String> {\n\n let mut output = Err(\"No such cvar.\".to_string());\n\n\n\n cvar::console::find(env.config, args[0], |node| {\n\n if let cvar::Node::Prop(prop) = node.as_node() {\n\n match prop.get().as_str() {\n\n \"true\" => {\n\n if prop.set(\"false\").is_ok() {\n\n output = Ok(None);\n\n }\n\n }\n\n \"false\" => {\n\n if prop.set(\"true\").is_ok() {\n\n output = Ok(None);\n\n }\n\n }\n\n _ => {\n\n output = Err(\"Value of cvar is not boolean\".to_string());\n\n }\n\n }\n", "file_path": "client/src/engine/script.rs", "rank": 36, "score": 166239.0858702977 }, { "content": "fn cvars_set(args: &[&str], env: &mut Env) -> Result<Option<String>, String> {\n\n match cvar::console::set(env.config, args[0], args[1]).unwrap() {\n\n true => {\n\n if let Err(err) = env.event_sender.try_send(Event::ConfigChanged) {\n\n log::error!(\"Cannot send ConfigChanged Event: {}\", err);\n\n }\n\n\n\n Ok(None)\n\n }\n\n false => Err(\"No such cvar.\".to_string()),\n\n }\n\n}\n\n\n", "file_path": "client/src/engine/script.rs", "rank": 37, "score": 166239.0858702977 }, { "content": "pub fn read_string<T: Read>(reader: &mut T, length: u32) -> Result<String, Box<dyn Error>> {\n\n let mut buffer: Vec<u8>;\n\n let byte = reader.read_u8()?;\n\n buffer = vec![0u8; byte as usize];\n\n reader.read_exact(buffer.as_mut_slice())?;\n\n\n\n let filler = length - u32::from(byte);\n\n for _i in 0..filler {\n\n let _ = reader.read_u8()?;\n\n }\n\n\n\n let x = String::from_utf8_lossy(&buffer).into_owned();\n\n\n\n Ok(x)\n\n}\n\n\n", "file_path": "client/src/mapfile.rs", "rank": 38, "score": 165123.74904507678 }, { "content": "pub fn load_image_rgba<P: AsRef<Path> + Clone>(\n\n fs: &mut gvfs::filesystem::Filesystem,\n\n filename: P,\n\n) -> image::RgbaImage {\n\n let mut file = fs.open(filename.clone()).expect(\"Error opening File\");\n\n let mut buffer = Vec::new();\n\n file.read_to_end(&mut buffer).expect(\"Error reading File\");\n\n let img = image::load(\n\n Cursor::new(buffer),\n\n image::ImageFormat::from_path(filename).unwrap(),\n\n )\n\n .unwrap();\n\n match img {\n\n image::DynamicImage::ImageRgba8(img) => img,\n\n _ => img.to_rgba8(),\n\n }\n\n}\n\n\n", "file_path": "gfx2d/src/extra.rs", "rank": 39, "score": 158699.6674496562 }, { "content": "fn register<T: Component + Clone>() -> CloneEntry {\n\n CloneEntry {\n\n type_id: TypeId::of::<T>(),\n\n type_name: type_name::<T>(),\n\n add_type: |batch_type| {\n\n batch_type.add::<T>();\n\n },\n\n add_values: |batch, arch| {\n\n let mut writer = batch\n\n .writer::<T>()\n\n .unwrap_or_else(|| panic!(\"Missing type from batch: {}\", type_name::<T>()));\n\n for item in arch\n\n .get::<T>()\n\n .unwrap_or_else(|| panic!(\"Missing type from archetype: {}\", type_name::<T>()))\n\n .iter()\n\n {\n\n if writer.push(item.clone()).is_err() {\n\n panic!()\n\n }\n\n }\n", "file_path": "shared/src/world.rs", "rank": 40, "score": 157127.40013500792 }, { "content": "#[allow(clippy::too_many_arguments)]\n\npub fn render_soldiers(\n\n world: &World,\n\n soldier_graphics: &SoldierGraphics,\n\n sprites: &[Vec<gfx2d::Sprite>],\n\n batch: &mut DrawBatch,\n\n debug_batch: &mut DrawBatch,\n\n frame_percent: f32,\n\n scale: f32,\n\n skeleton: bool,\n\n) {\n\n for (_entity, soldier) in world.query::<&Soldier>().iter() {\n\n let frame_percent = iif!(soldier.active, frame_percent, 1.0);\n\n render_soldier(soldier, soldier_graphics, sprites, batch, frame_percent);\n\n if skeleton {\n\n render_skeleton(soldier, debug_batch, scale, frame_percent);\n\n }\n\n }\n\n}\n", "file_path": "client/src/render/systems.rs", "rank": 41, "score": 156939.81083920898 }, { "content": "pub fn premultiply_image(img: &mut image::RgbaImage) {\n\n for pixel in img.pixels_mut() {\n\n let a = f32::from(pixel[3]) / 255.0;\n\n\n\n *pixel = image::Rgba([\n\n (f32::from(pixel[0]) * a) as u8,\n\n (f32::from(pixel[1]) * a) as u8,\n\n (f32::from(pixel[2]) * a) as u8,\n\n pixel[3],\n\n ]);\n\n }\n\n}\n\n\n", "file_path": "gfx2d/src/extra.rs", "rank": 42, "score": 156341.79604517415 }, { "content": "pub fn connection_ready() -> Bytes {\n\n vec![OperationCode::CCREQ_READY as u8].into()\n\n}\n\n\n", "file_path": "shared/src/messages.rs", "rank": 43, "score": 154035.70898729548 }, { "content": "pub fn connection_accept() -> Bytes {\n\n vec![\n\n OperationCode::CCREP_ACCEPT as u8,\n\n b'S',\n\n b'L',\n\n b'D',\n\n b'T',\n\n NET_PROTOCOL_VERSION,\n\n ]\n\n .into()\n\n}\n\n\n", "file_path": "shared/src/messages.rs", "rank": 44, "score": 154035.70898729548 }, { "content": "pub fn connection_request() -> Bytes {\n\n vec![\n\n OperationCode::CCREQ_CONNECT as u8,\n\n b'S',\n\n b'L',\n\n b'D',\n\n b'T',\n\n NET_PROTOCOL_VERSION,\n\n ]\n\n .into()\n\n}\n\n\n", "file_path": "shared/src/messages.rs", "rank": 45, "score": 154035.70898729548 }, { "content": "pub fn connection_reject() -> Bytes {\n\n vec![\n\n OperationCode::CCREP_REJECT as u8,\n\n b'S',\n\n b'L',\n\n b'D',\n\n b'T',\n\n NET_PROTOCOL_VERSION,\n\n ]\n\n .into()\n\n}\n\n\n", "file_path": "shared/src/messages.rs", "rank": 46, "score": 154035.70898729548 }, { "content": "pub fn soldier_movement(\n\n world: &mut World,\n\n resources: &Resources,\n\n config: &Config,\n\n mouse: (f32, f32),\n\n now: f64,\n\n) {\n\n let mut legs_parents = HashMap::new();\n\n for (entity, parent) in world\n\n .query::<With<game::components::Legs, &Parent>>()\n\n .iter()\n\n {\n\n legs_parents.insert(**parent, entity);\n\n }\n\n\n\n for (body, (mut soldier, input, pawn, mut body_vel, mut body_forces, body_mp)) in world\n\n .query::<(\n\n &mut Soldier,\n\n &Input,\n\n Option<&Pawn>,\n", "file_path": "client/src/game/systems/soldier.rs", "rank": 47, "score": 153323.25347923674 }, { "content": "/// Structures that implement the [`World`] trait are structures that are responsible for storing\n\n/// *and* simulating the game physics. The [`World`] is a simulation that is updated using its\n\n/// [`Stepper::step`] implementation. Players and any game logic outside of the physics simulation\n\n/// can interact with the physics simulation by applying [commands](Command) to the [`World`] (for\n\n/// example, a command to tell player 2's rigid body to jump, or a command to spawn a new player\n\n/// rigid body).\n\n///\n\n/// CrystalOrb needs two additional functionality for your world:\n\n/// (1) the ability to\n\n/// [create](World::snapshot) and [apply](World::apply_snapshot) [snapshots](World::SnapshotType)\n\n/// so that CrystalOrb can synchronize the states between the client and the server, and\n\n/// (2) the ability to [output](World::display_state) and [mix](DisplayState::from_interpolation)\n\n/// between the \"display state\" of the world.\n\n///\n\n/// # Conceptual examples with various physics engines\n\n///\n\n/// If you are using the [rapier physics\n\n/// engine](https://www.rapier.rs), then this [`World`] structure would contain things like the\n\n/// `PhysicsPipeline`, `BroadPhase`, `NarrowPhase`, `RigidBodySet`, `ColliderSet`, `JointSet`,\n\n/// `CCDSolver`, and any other pieces of game-specific state you need. The `Stepper::step` implementation\n\n/// for such a world would typically invoke `PhysicsPipeline::step`, as well as any other\n\n/// game-specific non-physics logic.\n\n///\n\n/// If you are using the [nphysics physics\n\n/// engine](https://www.nphysics.org), then this [`World`] structure would contain things like the\n\n/// `DefaultMechanicalWorld`, `DefaultGeometricalWorld`, `DefaultBodySet`, `DefaultColliderSet`,\n\n/// `DefaultJointConstraintSet`, and `DefaultForceGeneratorSet`, as well as any other pieces of\n\n/// game-specific state you need. The `Stepper::step` implementation for such a world would\n\n/// typically invoke `DefaultMechanicalWorld::step`, and any other game-specific update logic.\n\npub trait World: Stepper + Default + Send + Sync + 'static {\n\n type ClientId: Send + Sync + Copy + PartialEq + Debug;\n\n\n\n /// The command that can be used by the game and the player to interact with the physics\n\n /// simulation. Typically, this is an enum of some kind, but it is up to you.\n\n type CommandType: Command;\n\n\n\n /// The subset of state information about the world that can be used to fully recreate the\n\n /// world. Needs to be serializable so that it can be sent from the server to the client. There\n\n /// is nothing stopping you from making the [`SnapshotType`](World::SnapshotType) the same type\n\n /// as your [`World`] if you are ok with serializing the entire physics engine state. If you\n\n /// think sending your entire [`World`] is a bit too heavy-weighted, you can hand-craft and\n\n /// optimise your own `SnapshotType` structure to be as light-weight as possible.\n\n type SnapshotType: Debug + Clone + /*Serialize + DeserializeOwned +*/ Send + Sync + 'static;\n\n\n\n /// The subset of state information about the world that is to be displayed/rendered. This is\n\n /// used by the client to create the perfect blend of state information for the current\n\n /// rendering frame. You could make this [`DisplayState`](World::DisplayStateType) the same\n\n /// structure as your [`World`] if you don't mind CrystalOrb making lots of copies of your\n\n /// entire [`World`] structure and performing interpolation on all your state variables.\n", "file_path": "orb/src/world/world_trait.rs", "rank": 48, "score": 151912.95113983628 }, { "content": "pub fn read_vec3<T: Read>(reader: &mut T) -> Vec3 {\n\n let x = reader.read_f32::<LittleEndian>().unwrap();\n\n let y = reader.read_f32::<LittleEndian>().unwrap();\n\n let z = reader.read_f32::<LittleEndian>().unwrap();\n\n\n\n vec3(x, y, z)\n\n}\n", "file_path": "client/src/mapfile.rs", "rank": 49, "score": 147851.88646977436 }, { "content": "fn main() {\n\n color_eyre::install().unwrap();\n\n engine::Logger::init();\n\n\n\n let cmd = cli::parse_cli_args();\n\n\n\n let mut filesystem = Filesystem::new(clap::crate_name!(), \"Soldat2k\").unwrap();\n\n\n\n if let Ok(manifest_dir) = env::var(\"CARGO_MANIFEST_DIR\") {\n\n let mut path = path::PathBuf::from(manifest_dir);\n\n path.push(\"../resources\");\n\n filesystem.mount(path.canonicalize().unwrap().as_path(), true);\n\n }\n\n log::info!(\"Full VFS info: {:#?}\", filesystem);\n\n\n\n let mut mods = Vec::new();\n\n\n\n let soldat_smod = path::Path::new(\"/soldat.smod\");\n\n if filesystem.is_file(soldat_smod) {\n\n mods.push((\n", "file_path": "client/src/main.rs", "rank": 50, "score": 147724.52737070204 }, { "content": "type CommandFunction = fn(&[&str], &mut Env) -> Result<Option<String>, String>;\n\n\n\n#[derive(Clone)]\n\npub struct WorldEnv {\n\n inner: *mut hecs::World,\n\n}\n\n\n\nimpl ScriptEngine {\n\n pub fn new(\n\n event_send: BroadcastSender<Event>,\n\n event_recv: BroadcastReceiver<Event>,\n\n ) -> ScriptEngine {\n\n let mut commands = HashMap::new();\n\n\n\n commands.insert(\"exec\", (1, fake_command as CommandFunction));\n\n commands.insert(\"alias\", (2, alias_command as CommandFunction));\n\n commands.insert(\"get\", (1, cvars_get as CommandFunction));\n\n commands.insert(\"set\", (2, cvars_set as CommandFunction));\n\n commands.insert(\"toggle\", (1, cvars_toggle as CommandFunction));\n\n commands.insert(\"log\", (1, log_info as CommandFunction));\n", "file_path": "client/src/engine/script.rs", "rank": 51, "score": 142789.5363790562 }, { "content": "pub fn packet_verify(packet: &[u8]) -> bool {\n\n packet[1] == b'S'\n\n && packet[2] == b'L'\n\n && packet[3] == b'D'\n\n && packet[4] == b'T'\n\n && packet[5] == NET_PROTOCOL_VERSION\n\n}\n\n\n", "file_path": "shared/src/messages.rs", "rank": 52, "score": 142080.599132978 }, { "content": "fn unbind_all(_args: &[&str], env: &mut Env) -> Result<Option<String>, String> {\n\n env.input.unbind_all();\n\n Ok(None)\n\n}\n\n\n", "file_path": "client/src/engine/script.rs", "rank": 53, "score": 140425.54693469274 }, { "content": "pub fn remove_color_key(img: &mut image::RgbaImage, color_key: Color) {\n\n for pixel in img.pixels_mut() {\n\n if rgba(pixel[0], pixel[1], pixel[2], pixel[3]) == color_key {\n\n *pixel = image::Rgba([0, 0, 0, 0]);\n\n }\n\n }\n\n}\n", "file_path": "gfx2d/src/extra.rs", "rank": 54, "score": 140387.20877081112 }, { "content": "fn log_warn(args: &[&str], env: &mut Env) -> Result<Option<String>, String> {\n\n log::warn!(\"{}\", join_args(args, env));\n\n Ok(None)\n\n}\n\n\n", "file_path": "client/src/engine/script.rs", "rank": 55, "score": 138070.3498224511 }, { "content": "fn exit_game(_args: &[&str], _env: &mut Env) -> Result<Option<String>, String> {\n\n if cfg!(debug_assertions) {\n\n log::info!(\"Script exit\");\n\n std::process::abort();\n\n } else {\n\n log::warn!(\"Attempted script exit!\");\n\n }\n\n Ok(None)\n\n}\n\n\n", "file_path": "client/src/engine/script.rs", "rank": 56, "score": 138070.3498224511 }, { "content": "fn unbind_key(args: &[&str], env: &mut Env) -> Result<Option<String>, String> {\n\n match KeyBind::from_str(args[0]) {\n\n Ok(kc) => {\n\n env.input.unbind_key(kc);\n\n Ok(None)\n\n }\n\n Err(_) => Err(\"Unknown keycode.\".to_string()),\n\n }\n\n}\n\n\n", "file_path": "client/src/engine/script.rs", "rank": 57, "score": 138070.3498224511 }, { "content": "fn log_info(args: &[&str], env: &mut Env) -> Result<Option<String>, String> {\n\n log::info!(\"{}\", join_args(args, env));\n\n Ok(None)\n\n}\n", "file_path": "client/src/engine/script.rs", "rank": 58, "score": 138070.3498224511 }, { "content": "fn eval_rhai(args: &[&str], env: &mut Env) -> Result<Option<String>, String> {\n\n let res = env\n\n .engine\n\n .eval_expression::<Dynamic>(args.join(\" \").as_str())\n\n .map_err(|err| {\n\n log::error!(\"Failed to eval: {:?}\", err);\n\n \"Failed to eval.\".to_string()\n\n })?;\n\n\n\n Ok(Some(res.to_string()))\n\n}\n\n\n", "file_path": "client/src/engine/script.rs", "rank": 59, "score": 138070.3498224511 }, { "content": "fn bind_key(args: &[&str], env: &mut Env) -> Result<Option<String>, String> {\n\n let key = args[0].to_ascii_lowercase();\n\n let mut mods = KeyMods::default();\n\n\n\n let kb = if let Some(button) = key.strip_prefix(\"mouse\") {\n\n KeyBind::Mouse(match button {\n\n \"1\" => mq::MouseButton::Left,\n\n \"2\" => mq::MouseButton::Middle,\n\n \"3\" => mq::MouseButton::Right,\n\n _ => return Err(\"Unknown mouse button.\".to_string()),\n\n })\n\n } else if let Some(wheel) = key.strip_prefix(\"mwheel\") {\n\n KeyBind::Wheel(match wheel {\n\n \"up\" => Direction::Up,\n\n \"down\" => Direction::Down,\n\n \"left\" => Direction::Left,\n\n \"right\" => Direction::Right,\n\n _ => return Err(\"Unknown mouse wheel direction.\".to_string()),\n\n })\n\n } else {\n", "file_path": "client/src/engine/script.rs", "rank": 60, "score": 138070.3498224511 }, { "content": "fn fake_command(_args: &[&str], _env: &mut Env) -> Result<Option<String>, String> {\n\n Err(\"Called fake command.\".to_string())\n\n}\n\n\n", "file_path": "client/src/engine/script.rs", "rank": 61, "score": 138070.3498224511 }, { "content": "fn alias_command(args: &[&str], env: &mut Env) -> Result<Option<String>, String> {\n\n env.aliases.insert(args[0].to_string(), args[1..].join(\" \"));\n\n Ok(None)\n\n}\n\n\n", "file_path": "client/src/engine/script.rs", "rank": 62, "score": 138070.3498224511 }, { "content": "fn echo_args(args: &[&str], env: &mut Env) -> Result<Option<String>, String> {\n\n if args.is_empty() {\n\n Ok(None)\n\n } else {\n\n Ok(Some(join_args(args, env)))\n\n }\n\n}\n\n\n", "file_path": "client/src/engine/script.rs", "rank": 63, "score": 138070.3498224511 }, { "content": "fn log_error(args: &[&str], env: &mut Env) -> Result<Option<String>, String> {\n\n log::error!(\"{}\", join_args(args, env));\n\n Ok(None)\n\n}\n", "file_path": "client/src/engine/script.rs", "rank": 64, "score": 138070.3498224511 }, { "content": "fn default_motd() -> String {\n\n format!(\n\n \"{} {} - {}\",\n\n clap::crate_name!(),\n\n clap::crate_version!(),\n\n clap::crate_description!()\n\n )\n\n}\n\n\n\nimpl Default for ServerInfo {\n\n fn default() -> Self {\n\n Self {\n\n motd: default_motd(),\n\n }\n\n }\n\n}\n\n\n\nimpl IVisit for ServerInfo {\n\n fn visit(&mut self, f: &mut dyn FnMut(&mut dyn INode)) {\n\n f(&mut cvar::Property(\"motd\", &mut self.motd, default_motd()));\n\n }\n\n}\n", "file_path": "server/src/cvars.rs", "rank": 65, "score": 134088.21491278935 }, { "content": "fn add_scenery(batch: &mut DrawBatch, (prop, sprite): (&MapProp, &Sprite)) {\n\n let color = rgba(prop.color.r, prop.color.g, prop.color.b, prop.alpha);\n\n let mut sprite = sprite.clone();\n\n sprite.width = prop.width as f32;\n\n sprite.height = prop.height as f32;\n\n\n\n batch.add_sprite(\n\n &sprite,\n\n color,\n\n Transform::FromOrigin {\n\n pos: vec2(prop.x, prop.y),\n\n scale: vec2(prop.scale_x, prop.scale_y),\n\n rot: (-prop.rotation, vec2(0.0, 1.0)),\n\n },\n\n );\n\n}\n\n\n\nimpl MapGraphics {\n\n pub fn background(&mut self) -> DrawSlice {\n\n self.batch.slice(self.background.clone())\n", "file_path": "client/src/render/map.rs", "rank": 66, "score": 132781.86366038694 }, { "content": "fn add_poly(batch: &mut DrawBatch, poly: &MapPolygon, texture: &Texture) {\n\n let (a, b, c) = (&poly.vertices[0], &poly.vertices[1], &poly.vertices[2]);\n\n\n\n batch.add(\n\n Some(texture),\n\n &[\n\n vertex(\n\n vec2(a.x, a.y),\n\n vec2(a.u, a.v),\n\n rgba(a.color.r, a.color.g, a.color.b, a.color.a),\n\n ),\n\n vertex(\n\n vec2(b.x, b.y),\n\n vec2(b.u, b.v),\n\n rgba(b.color.r, b.color.g, b.color.b, b.color.a),\n\n ),\n\n vertex(\n\n vec2(c.x, c.y),\n\n vec2(c.u, c.v),\n\n rgba(c.color.r, c.color.g, c.color.b, c.color.a),\n\n ),\n\n ],\n\n );\n\n}\n\n\n", "file_path": "client/src/render/map.rs", "rank": 67, "score": 132781.86366038694 }, { "content": "pub fn apply_input(entity: EntityRef, control: Control) {\n\n if let Some(mut position) = entity.get_mut::<Position>() {\n\n if control.contains(Control::LEFT) {\n\n position.x -= 1.;\n\n }\n\n if control.contains(Control::RIGHT) {\n\n position.x += 1.;\n\n }\n\n if control.contains(Control::UP) {\n\n position.y -= 1.;\n\n }\n\n if control.contains(Control::DOWN) {\n\n position.y += 1.;\n\n }\n\n log::trace!(\"position {:?} / {:?}\", *position, control);\n\n }\n\n}\n", "file_path": "shared/src/systems/input.rs", "rank": 68, "score": 131003.67621960602 }, { "content": "pub fn pack_rects<T>(width: i32, height: i32, rects: &mut [Rect<T>]) -> usize {\n\n let mut bp = BinPack {\n\n used: vec![],\n\n free: vec![bp_rect(0, 0, width, height)],\n\n };\n\n\n\n for i in 0..rects.len() {\n\n let mut best_score = MAX_SCORE;\n\n let mut best_index = rects.len();\n\n let mut best_rect = bp_rect(0, 0, 0, 0);\n\n\n\n for (j, rect) in rects.iter().enumerate().skip(i) {\n\n let (rc, score) = score_rect(&bp, rect.w, rect.h);\n\n\n\n if score.0 < best_score.0 || (score.0 == best_score.0 && score.1 < best_score.1) {\n\n best_index = j;\n\n best_score = score;\n\n best_rect = rc;\n\n }\n\n }\n", "file_path": "gfx2d/src/binpack.rs", "rank": 69, "score": 129407.72793998211 }, { "content": "/// A command is a request to change the physics simulation in some way, issued from outside the\n\n/// physics simulation. It is the way in which players and any non-physics game logic can interact\n\n/// with the physics simulation.\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// use orb::command::Command;\n\n///\n\n/// #[derive(Debug, Clone)]\n\n/// struct Player(usize);\n\n///\n\n/// #[derive(Debug, Clone)]\n\n/// enum GameCommand {\n\n/// Spawn(Player),\n\n/// Despawn(Player),\n\n/// Jump(Player),\n\n/// Left(Player),\n\n/// Right(Player),\n\n/// }\n\n///\n\n/// impl Command for GameCommand {};\n\n/// ```\n\npub trait Command: Clone + Sync + Send + 'static + /*Serialize + DeserializeOwned +*/ Debug {}\n\n\n\n/// A handy structure for receiving commands out-of-order, and consuming them in-order. This also\n\n/// unintuitively keeps track of the \"current\" timestamp for whatever uses this [`CommandBuffer`].\n\n/// This is because the [`CommandBuffer`] needs to maintain an acceptable window of command\n\n/// timestamps centered around the current timestamp, or else the command timestamps would be too\n\n/// far about and [wouldn't be\n\n/// comparable](crate::timestamp::Timestamp::comparable_range_with_midpoint).\n\n#[derive(Clone, Debug)]\n\npub(crate) struct CommandBuffer<CommandType: Command> {\n\n map: BTreeMap<Reverse<Timestamp>, Vec<CommandType>>,\n\n timestamp: Timestamp,\n\n}\n\n\n\nimpl<CommandType: Command> Default for CommandBuffer<CommandType> {\n\n fn default() -> Self {\n\n Self {\n\n map: BTreeMap::new(),\n\n timestamp: Timestamp::default(),\n\n }\n", "file_path": "orb/src/command.rs", "rank": 70, "score": 128556.99672403224 }, { "content": "pub fn connection_authorized<S: AsRef<str>>(motd: S) -> Bytes {\n\n let motd = motd.as_ref().as_bytes();\n\n if motd.len() > u8::MAX as usize {\n\n log::error!(\"Server MOTD is longer than {} bytes\", u8::MAX);\n\n abort();\n\n }\n\n let mut msg = vec![OperationCode::CCREP_AUTHORIZED as u8];\n\n msg.push(motd.len() as u8);\n\n msg.extend_from_slice(motd);\n\n msg.into()\n\n}\n\n\n", "file_path": "shared/src/messages.rs", "rank": 71, "score": 127405.04568996833 }, { "content": "pub fn render_skeleton(soldier: &Soldier, batch: &mut DrawBatch, px: f32, frame_percent: f32) {\n\n let sk = &soldier.skeleton;\n\n\n\n for constraint in sk.constraints() {\n\n let pa = constraint.particle_num.0 as usize;\n\n let pb = constraint.particle_num.1 as usize;\n\n\n\n let a = lerp(sk.old_pos(pa), sk.pos(pa), frame_percent);\n\n let b = lerp(sk.old_pos(pb), sk.pos(pb), frame_percent);\n\n\n\n let m = Transform::WithPivot {\n\n pos: a,\n\n pivot: Vec2::ZERO,\n\n scale: vec2(distance(a, b), 1.0),\n\n rot: vec2angle(b - a),\n\n }\n\n .matrix();\n\n\n\n batch.add_quad(\n\n None,\n", "file_path": "client/src/render/soldiers.rs", "rank": 72, "score": 127222.64324939599 }, { "content": "fn is_prop_active(map: &MapFile, prop: &MapProp) -> bool {\n\n prop.active && prop.level <= 2 && prop.style > 0 && prop.style as usize <= map.scenery.len()\n\n}\n\n\n", "file_path": "client/src/render/map.rs", "rank": 73, "score": 124091.189723484 }, { "content": "pub fn physics(\n\n graphics: &mut GameGraphics,\n\n world: &World,\n\n _resources: &Resources,\n\n zoom: f32,\n\n scale: f32,\n\n) {\n\n use rapier2d::prelude::*;\n\n let cl = rgb(0, 255, 0);\n\n let th = 0.5 * zoom;\n\n\n\n for (_entity, rb) in world\n\n .query::<crate::physics::RigidBodyComponentsQuery>()\n\n .iter()\n\n {\n\n let cl = rgb(255, 255, 0);\n\n let Isometry {\n\n translation: tr,\n\n rotation: rot,\n\n } = rb.position.position;\n", "file_path": "client/src/render/debug.rs", "rank": 74, "score": 123612.97525241907 }, { "content": "pub fn debug_render(\n\n ctx: &mut Context,\n\n graphics: &mut GameGraphics,\n\n world: &World,\n\n resources: &Resources,\n\n config: &Config,\n\n) {\n\n let state = &config.debug.render;\n\n let map = resources.get::<MapFile>().unwrap();\n\n\n\n let screen_size = ctx.screen_size();\n\n let screen_scale = GAME_WIDTH / screen_size.0;\n\n\n\n let (camera, _pos) = world.get_camera_and_camera_position();\n\n let zoom = f32::exp(camera.zoom);\n\n\n\n // let fonts = resources.get::<HashMap<&str, FontId>>().unwrap();\n\n // let mut paint = Paint::color(Color::hex(\"B7410E\"));\n\n // paint.set_font(&[*fonts.get(\"roboto\").unwrap()]);\n\n // paint.set_font_size(40.0);\n", "file_path": "client/src/render/debug.rs", "rank": 75, "score": 121034.59194170736 }, { "content": "pub fn render_soldier(\n\n soldier: &Soldier,\n\n soldier_graphics: &SoldierGraphics,\n\n sprites: &[Vec<Sprite>],\n\n batch: &mut DrawBatch,\n\n frame_percent: f32,\n\n) {\n\n let sk = &soldier.skeleton;\n\n let (colors, alpha) = colors_and_alpha(soldier);\n\n let has_blood = alpha[SoldierAlpha::Blood as usize] > 0;\n\n let visible = parts_visibility(&soldier_graphics.base_visibility, soldier, has_blood);\n\n\n\n // TODO: team2 offset, dredlock rotation matrix\n\n\n\n for (i, part) in soldier_graphics.parts.iter().enumerate() {\n\n if visible[i] && !part.sprite.is_none() {\n\n let mut sprite_index: usize = 0;\n\n let cx = part.center.0;\n\n let mut cy = part.center.1;\n\n let mut scale = Vec2::ONE;\n", "file_path": "client/src/render/soldiers.rs", "rank": 76, "score": 121034.59194170736 }, { "content": "pub fn render_bullet(\n\n bullet: &Bullet,\n\n sprites: &[Vec<Sprite>],\n\n batch: &mut DrawBatch,\n\n _elapsed: f64,\n\n frame_percent: f32,\n\n) {\n\n let frame_percent = iif!(bullet.active, frame_percent, 1.0);\n\n let pos = lerp(bullet.particle.old_pos, bullet.particle.pos, frame_percent);\n\n\n\n match bullet.style {\n\n BulletStyle::ThrownKnife => {\n\n let t = lerp(\n\n bullet.timeout_prev as f32,\n\n bullet.timeout as f32,\n\n frame_percent,\n\n );\n\n\n\n let (rot, sprite) = {\n\n if bullet.particle.velocity.x >= 0.0 {\n", "file_path": "client/src/render/bullets.rs", "rank": 77, "score": 121034.59194170736 }, { "content": "pub fn meta() -> ShaderMeta {\n\n ShaderMeta {\n\n images: vec![\"sampler\".to_string()],\n\n uniforms: UniformBlockLayout {\n\n uniforms: vec![UniformDesc::new(\"transform\", UniformType::Mat4)],\n\n },\n\n }\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Debug, Clone, Copy)]\n\npub struct Vertex {\n\n pub pos: [f32; 2],\n\n pub texcoords: [f32; 2],\n\n pub color: [u8; 4],\n\n}\n\n\n\n#[repr(C)]\n\npub struct Uniforms {\n\n pub transform: glam::Mat4,\n\n}\n\n\n", "file_path": "gfx2d/src/context/pipeline.rs", "rank": 78, "score": 115685.61660665998 }, { "content": "pub fn params() -> PipelineParams {\n\n PipelineParams {\n\n primitive_type: PrimitiveType::Triangles,\n\n color_blend: Some(BlendState::new(\n\n Equation::Add,\n\n BlendFactor::One,\n\n BlendFactor::OneMinusValue(BlendValue::SourceAlpha),\n\n )),\n\n alpha_blend: Some(BlendState::new(\n\n Equation::Add,\n\n BlendFactor::One,\n\n BlendFactor::OneMinusValue(BlendValue::SourceAlpha),\n\n )),\n\n ..Default::default()\n\n }\n\n}\n", "file_path": "gfx2d/src/context/pipeline.rs", "rank": 79, "score": 115685.61660665998 }, { "content": "pub fn vertex(pos: glam::Vec2, uv: glam::Vec2, color: Color) -> Vertex {\n\n Vertex {\n\n pos: [pos.x, pos.y],\n\n uv: [uv.x, uv.y],\n\n color: [color.r, color.g, color.b, color.a],\n\n }\n\n}\n\n\n\npub struct Gfx2dContext {\n\n pipeline: Pipeline,\n\n bindings: Bindings,\n\n white_texture: Texture,\n\n}\n\n\n\nimpl Gfx2dContext {\n\n pub fn new(ctx: &mut Context) -> Gfx2dContext {\n\n let white_texture = Texture::from_rgba8(ctx, 1, 1, &[255, 255, 255, 255]);\n\n\n\n let shader = Shader::new(\n\n ctx,\n", "file_path": "gfx2d/src/context/mod.rs", "rank": 80, "score": 115579.97608103343 }, { "content": "use super::super::timestamp::Timestamped;\n\nuse std::{fmt::Debug, ops::Deref};\n\n\n\n/// The [`DisplayState`] represents the information about how to display the [`World`][world] at\n\n/// its current state. For example, while a [`World`][world] might contain information about\n\n/// player's position and velocities, some games may only need to know about the position to render\n\n/// it (unless you're doing some fancy motion-blur). You can think of a [`DisplayState`] as the\n\n/// \"output\" of a [`World`][world]. There is nothing stopping you from making the [`DisplayState`]\n\n/// the same structure as the [`World`][world] if it makes more sense for your game, but most of\n\n/// the time, the [`World`][world] structure may contain things that are inefficient to copy around\n\n/// (e.g. an entire physics engine)\n\n///\n\n/// [world]: [crate::world::World]\n", "file_path": "orb/src/world/display_state.rs", "rank": 81, "score": 114933.50092603482 }, { "content": " fn from(timestamped: Timestamped<T>) -> Self {\n\n Self {\n\n display_state: timestamped.inner().clone(),\n\n timestamp: i16::from(timestamped.timestamp()) as f64,\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::super::super::timestamp::Timestamp;\n\n use super::*;\n\n\n\n #[derive(Clone, Default, Debug, PartialEq)]\n\n struct MockDisplayState(f64);\n\n impl DisplayState for MockDisplayState {\n\n fn from_interpolation(state1: &Self, state2: &Self, t: f64) -> Self {\n\n Self(state1.0 * t + state2.0 * (1.0 - t))\n\n }\n\n }\n", "file_path": "orb/src/world/display_state.rs", "rank": 82, "score": 114931.13278559179 }, { "content": " } else if (t - 1.0).abs() < f64::EPSILON {\n\n state2.clone()\n\n } else {\n\n assert_eq!(state1.timestamp(), state2.timestamp(), \"Can only interpolate between timestamped states of the same timestamp. If timestamps differ, you will need to use Tweened::from_interpolation to also interpolate the timestamp value into a float.\");\n\n\n\n Self::new(\n\n DisplayState::from_interpolation(state1.inner(), state2.inner(), t),\n\n state1.timestamp(),\n\n )\n\n }\n\n }\n\n}\n\n\n\n/// This is the result when you interpolate/\"blend\"/\"tween\" between two [`DisplayState`]s of\n\n/// adjacent timestamps (similar to [\"Inbetweening\"](https://en.wikipedia.org/wiki/Inbetweening) in\n\n/// animation - the generation of intermediate frames). You get the [`DisplayState`] and a\n\n/// floating-point, non-whole-number timestamp.\n\n#[derive(Default, Debug, Clone, PartialEq)]\n\npub struct Tweened<T> {\n\n display_state: T,\n", "file_path": "orb/src/world/display_state.rs", "rank": 83, "score": 114930.85858151708 }, { "content": " /// the right transformations about the correct pivot points.\n\n ///\n\n /// [world]: [crate::world::World]\n\n fn from_interpolation(state1: &Self, state2: &Self, t: f64) -> Self;\n\n}\n\n\n\nimpl<T: DisplayState> DisplayState for Timestamped<T> {\n\n /// Interpolate between two timestamped display states. If the two timestamps are\n\n /// different, then the interpolation parameter `t` must be either `0.0` or `1.0`.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics if the timestamps are different but the interpolation parameter is not `0.0` nor\n\n /// `1.0`, since timestamps are whole number values and cannot be continuously\n\n /// interpolated. Interpolating between two display states of different timestamps is known\n\n /// as \"tweening\" (i.e. animation in-betweening) and should be done using\n\n /// [`Tweened::from_interpolation`].\n\n fn from_interpolation(state1: &Self, state2: &Self, t: f64) -> Self {\n\n if t == 0.0 {\n\n state1.clone()\n", "file_path": "orb/src/world/display_state.rs", "rank": 84, "score": 114928.65809186932 }, { "content": " timestamp: f64,\n\n}\n\n\n\nimpl<T: DisplayState> Tweened<T> {\n\n /// Get the resulting in-between [`DisplayState`].\n\n pub fn display_state(&self) -> &T {\n\n &self.display_state\n\n }\n\n\n\n /// Get the \"logical timestamp\" that [`Tweened::display_state`] corresponds with. For\n\n /// example, a `float_timestamp` of `123.4` represents the in-between frame that is 40% of\n\n /// the way between frame `123` and frame `124`.\n\n pub fn float_timestamp(&self) -> f64 {\n\n self.timestamp\n\n }\n\n}\n\n\n\nimpl<T: DisplayState> Deref for Tweened<T> {\n\n type Target = T;\n\n fn deref(&self) -> &Self::Target {\n", "file_path": "orb/src/world/display_state.rs", "rank": 85, "score": 114925.08607109374 }, { "content": " &self.display_state\n\n }\n\n}\n\n\n\nimpl<T: DisplayState> Tweened<T> {\n\n /// Interpolate between two timestamped dispay states to find the in-between display state.\n\n pub fn from_interpolation(state1: &Timestamped<T>, state2: &Timestamped<T>, t: f64) -> Self {\n\n // Note: timestamps are in modulo arithmetic, so we need to work using the wrapped\n\n // difference value.\n\n let timestamp_difference: i16 = (state2.timestamp() - state1.timestamp()).into();\n\n let timestamp_offset: f64 = t * (timestamp_difference as f64);\n\n let timestamp_interpolated = i16::from(state1.timestamp()) as f64 + timestamp_offset;\n\n Self {\n\n display_state: T::from_interpolation(state1.inner(), state2.inner(), t),\n\n timestamp: timestamp_interpolated,\n\n }\n\n }\n\n}\n\n\n\nimpl<T: DisplayState> From<Timestamped<T>> for Tweened<T> {\n", "file_path": "orb/src/world/display_state.rs", "rank": 86, "score": 114922.49230842882 }, { "content": "\n\n #[test]\n\n fn when_interpolating_displaystate_with_t_0_then_state1_is_returned() {\n\n // GIVEN\n\n let state1 = Timestamped::new(MockDisplayState(4.0), Timestamp::default() + 2);\n\n let state2 = Timestamped::new(MockDisplayState(8.0), Timestamp::default() + 5);\n\n\n\n // WHEN\n\n let interpolated = DisplayState::from_interpolation(&state1, &state2, 0.0);\n\n\n\n // THEN\n\n assert_eq!(state1, interpolated);\n\n }\n\n\n\n #[test]\n\n fn when_interpolating_displaystate_with_t_1_then_state2_is_returned() {\n\n // GIVEN\n\n let state1 = Timestamped::new(MockDisplayState(4.0), Timestamp::default() + 2);\n\n let state2 = Timestamped::new(MockDisplayState(8.0), Timestamp::default() + 5);\n\n\n\n // WHEN\n\n let interpolated = DisplayState::from_interpolation(&state1, &state2, 1.0);\n\n\n\n // THEN\n\n assert_eq!(state2, interpolated);\n\n }\n\n}\n", "file_path": "orb/src/world/display_state.rs", "rank": 87, "score": 114921.92042697546 }, { "content": "pub fn vec2normalize(v: Vec2) -> Vec2 {\n\n let magnitude = v.length();\n\n iif!(magnitude < 0.001, Vec2::ZERO, v / magnitude)\n\n}\n\n\n", "file_path": "client/src/calc.rs", "rank": 88, "score": 113134.30787098754 }, { "content": "pub fn vec2angle(v: Vec2) -> Rad {\n\n Vec2::X.angle_between(v)\n\n}\n\n\n", "file_path": "client/src/calc.rs", "rank": 89, "score": 113134.30787098754 }, { "content": "pub fn vec2length(v: Vec2) -> f32 {\n\n v.length()\n\n}\n\n\n", "file_path": "client/src/calc.rs", "rank": 90, "score": 113134.30787098754 }, { "content": "#[derive(Debug)]\n\nstruct ClientWorldSimulations<WorldType: World> {\n\n /// The next server snapshot that needs applying after the current latest snapshot has been\n\n /// fully interpolated into.\n\n queued_snapshot: Option<Timestamped<WorldType::SnapshotType>>,\n\n\n\n /// The timestamp of the last queued snapshot from the server, so we can discard stale\n\n /// snapshots from the server when the arrive out of order. This persists even after the queued\n\n /// snapshot has been cleared after it has been applied to the world.\n\n last_queued_snapshot_timestamp: Option<Timestamp>,\n\n\n\n /// The timestamp of the last received snapshot from the server, regardless of whether it\n\n /// was discarded or accepted (since we only keep the latest snapshot). This is primarily\n\n /// for diagnostic purposes.\n\n last_received_snapshot_timestamp: Option<Timestamp>,\n\n\n\n /// The command buffer that is used to initialize the new world simulation's command\n\n /// buffers whenever a queued snapshot is applied to it. Contains older commands that the\n\n /// individual world simulation's internal command buffers would have already dropped, but\n\n /// would otherwise need to replay onto the server snapshot to get it back to the current\n\n /// timestamp.\n", "file_path": "orb/src/client.rs", "rank": 91, "score": 111714.08560041498 }, { "content": "fn split_free_rect(bp: &mut BinPack, free_rect: &BPRect, used_rect: &BPRect) -> bool {\n\n if used_rect.left() >= free_rect.right() || used_rect.right() <= free_rect.left()\n\n || used_rect.top() >= free_rect.bottom() || used_rect.bottom() <= free_rect.top()\n\n {\n\n return false;\n\n }\n\n\n\n if used_rect.left() < free_rect.right() && used_rect.right() > free_rect.left() {\n\n if used_rect.top() > free_rect.top() && used_rect.top() < free_rect.bottom() {\n\n bp.free.push(BPRect {\n\n h: used_rect.y - free_rect.y,\n\n ..*free_rect\n\n });\n\n }\n\n\n\n if used_rect.bottom() < free_rect.bottom() {\n\n bp.free.push(BPRect {\n\n y: used_rect.bottom(),\n\n h: free_rect.bottom() - used_rect.bottom(),\n\n ..*free_rect\n", "file_path": "gfx2d/src/binpack.rs", "rank": 92, "score": 111239.74564388569 }, { "content": "pub fn trace_dump_packet(data: &[u8]) {\n\n for (n, line) in hexdump_iter(data).enumerate() {\n\n log::trace!(\" {:3} {}\", n, line);\n\n }\n\n}\n", "file_path": "shared/src/lib.rs", "rank": 93, "score": 110822.33602904488 }, { "content": "pub fn rad(angle: f32) -> Rad {\n\n angle\n\n}\n\n\n", "file_path": "gfx2d/src/math.rs", "rank": 94, "score": 110730.03482886383 }, { "content": "pub fn deg(angle: f32) -> Deg {\n\n angle\n\n}\n\n\n\n// Mat2d\n\n\n\n// Indexed by row. Works like a 3x3 matrix where last row is always [0, 0, 1]\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Mat2d(pub(crate) (f32, f32, f32), pub(crate) (f32, f32, f32));\n\n\n\nimpl Mat2d {\n\n pub fn identity() -> Mat2d {\n\n Mat2d((1.0, 0.0, 0.0), (0.0, 1.0, 0.0))\n\n }\n\n\n\n pub fn translate(x: f32, y: f32) -> Mat2d {\n\n Mat2d((1.0, 0.0, x), (0.0, 1.0, y))\n\n }\n\n\n\n pub fn scale(x: f32, y: f32) -> Mat2d {\n", "file_path": "gfx2d/src/math.rs", "rank": 95, "score": 110730.03482886383 }, { "content": "fn prune_free_list(bp: &mut BinPack) {\n\n let mut i = 0;\n\n\n\n while i < bp.free.len() {\n\n let mut j = i + 1;\n\n\n\n while j < bp.free.len() {\n\n if bp.free[j].contains(&bp.free[i]) {\n\n bp.free.remove(i);\n\n i = i.wrapping_sub(1);\n\n break;\n\n } else if bp.free[i].contains(&bp.free[j]) {\n\n bp.free.remove(j);\n\n } else {\n\n j += 1;\n\n }\n\n }\n\n\n\n i = i.wrapping_add(1);\n\n }\n\n}\n", "file_path": "gfx2d/src/binpack.rs", "rank": 96, "score": 110007.63149638863 }, { "content": "pub trait WorldCameraExt {\n\n fn make_active_camera(&mut self, entity: Entity) -> Result<(), SimpleError>;\n\n fn get_active_camera(&self) -> Option<Entity>;\n\n fn get_camera_and_camera_position(&self) -> (Camera, Position);\n\n}\n\n\n\nimpl WorldCameraExt for World {\n\n fn make_active_camera(&mut self, entity: Entity) -> Result<(), SimpleError> {\n\n let mut set_camera = false;\n\n if let Ok(mut camera) = self.get_mut::<Camera>(entity) {\n\n camera.is_active = true;\n\n set_camera = true;\n\n }\n\n\n\n if set_camera {\n\n for (id, mut camera_to_disable) in self.query::<&mut Camera>().iter() {\n\n if id != entity {\n\n camera_to_disable.is_active = false;\n\n }\n\n }\n", "file_path": "client/src/engine/world.rs", "rank": 97, "score": 107301.26704928975 }, { "content": "pub trait SpriteData: Send + Sync + std::fmt::Debug {\n\n fn id(&self) -> usize;\n\n fn name(&self) -> &str;\n\n fn group(&self) -> Group;\n\n fn filename(&self) -> &'static str;\n\n fn values() -> &'static [Self]\n\n where\n\n Self: std::marker::Sized;\n\n}\n\n\n\ninclude!(\"gfx_macro.rs\");\n\n\n\nimpl std::convert::From<usize> for SoldierPart {\n\n fn from(id: usize) -> SoldierPart {\n\n match SoldierPart::values().get(id as usize) {\n\n Some(&v) => v,\n\n _ => panic!(\"Invalid sprite identifier.\"),\n\n }\n\n }\n\n}\n", "file_path": "client/src/render/gfx.rs", "rank": 98, "score": 105320.33841560801 }, { "content": "fn load_animations(fs: &mut Filesystem) -> Vec<AnimData> {\n\n let data = [\n\n (Anim::Stand, \"stoi.poa\", 3, true),\n\n (Anim::Run, \"biega.poa\", 1, true),\n\n (Anim::RunBack, \"biegatyl.poa\", 1, true),\n\n (Anim::Jump, \"skok.poa\", 1, false),\n\n (Anim::JumpSide, \"skokwbok.poa\", 1, false),\n\n (Anim::Fall, \"spada.poa\", 1, false),\n\n (Anim::Crouch, \"kuca.poa\", 1, false),\n\n (Anim::CrouchRun, \"kucaidzie.poa\", 2, true),\n\n (Anim::Reload, \"laduje.poa\", 2, false),\n\n (Anim::Throw, \"rzuca.poa\", 1, false),\n\n (Anim::Recoil, \"odrzut.poa\", 1, false),\n\n (Anim::SmallRecoil, \"odrzut2.poa\", 1, false),\n\n (Anim::Shotgun, \"shotgun.poa\", 1, false),\n\n (Anim::ClipOut, \"clipout.poa\", 3, false),\n\n (Anim::ClipIn, \"clipin.poa\", 3, false),\n\n (Anim::SlideBack, \"slideback.poa\", 2, false),\n\n (Anim::Change, \"change.poa\", 1, false),\n\n (Anim::ThrowWeapon, \"wyrzuca.poa\", 1, false),\n", "file_path": "client/src/anims.rs", "rank": 99, "score": 103464.70759914964 } ]
Rust
lib/src/settings.rs
orhun/pueue
8b033231e8b95a987d284c621b45ed20203700d3
use std::collections::HashMap; use std::fs::{create_dir_all, File}; use std::io::{prelude::*, BufReader}; use std::path::{Path, PathBuf}; use log::info; use serde_derive::{Deserialize, Serialize}; use shellexpand::tilde; use crate::error::Error; use crate::platform::directories::*; use crate::setting_defaults::*; #[derive(PartialEq, Clone, Debug, Default, Deserialize, Serialize)] pub struct Shared { pub pueue_directory: Option<PathBuf>, pub runtime_directory: Option<PathBuf>, #[cfg(not(target_os = "windows"))] #[serde(default = "default_true")] pub use_unix_socket: bool, pub pid_path: Option<PathBuf>, #[cfg(not(target_os = "windows"))] pub unix_socket_path: Option<PathBuf>, #[serde(default = "default_host")] pub host: String, #[serde(default = "default_port")] pub port: String, pub daemon_cert: Option<PathBuf>, pub daemon_key: Option<PathBuf>, pub shared_secret_path: Option<PathBuf>, } #[derive(PartialEq, Clone, Debug, Default, Deserialize, Serialize)] pub struct Client { #[serde(default = "Default::default")] pub restart_in_place: bool, #[serde(default = "default_true")] pub read_local_logs: bool, #[serde(default = "Default::default")] pub show_confirmation_questions: bool, #[serde(default = "Default::default")] pub show_expanded_aliases: bool, #[serde(default = "Default::default")] pub dark_mode: bool, pub max_status_lines: Option<usize>, #[serde(default = "default_status_time_format")] pub status_time_format: String, #[serde(default = "default_status_datetime_format")] pub status_datetime_format: String, } #[derive(PartialEq, Clone, Debug, Default, Deserialize, Serialize)] pub struct Daemon { #[serde(default = "Default::default")] pub pause_group_on_failure: bool, #[serde(default = "Default::default")] pub pause_all_on_failure: bool, pub callback: Option<String>, #[serde(default = "default_callback_log_lines")] pub callback_log_lines: usize, #[serde(skip_serializing)] #[deprecated( since = "1.1.0", note = "The configuration for groups is now stored in the state." )] pub groups: Option<HashMap<String, i64>>, } impl Default for Settings { fn default() -> Self { Settings { client: Client { read_local_logs: true, status_time_format: default_status_time_format(), status_datetime_format: default_status_datetime_format(), ..Default::default() }, daemon: Daemon { callback_log_lines: default_callback_log_lines(), ..Default::default() }, shared: Shared { #[cfg(not(target_os = "windows"))] use_unix_socket: true, host: default_host(), port: default_port(), ..Default::default() }, profiles: HashMap::new(), } } } #[derive(PartialEq, Clone, Debug, Deserialize, Serialize)] pub struct Settings { #[serde(default = "Default::default")] pub client: Client, #[serde(default = "Default::default")] pub daemon: Daemon, pub shared: Shared, #[serde(default = "HashMap::new")] pub profiles: HashMap<String, NestedSettings>, } #[derive(PartialEq, Clone, Debug, Deserialize, Serialize)] pub struct NestedSettings { #[serde(default = "Default::default")] pub client: Client, #[serde(default = "Default::default")] pub daemon: Daemon, #[serde(default = "Default::default")] pub shared: Shared, } pub fn expand_home(old_path: &Path) -> PathBuf { PathBuf::from(tilde(&old_path.to_string_lossy()).into_owned()) } impl Shared { pub fn pueue_directory(&self) -> PathBuf { if let Some(path) = &self.pueue_directory { expand_home(path) } else { default_pueue_path() } } pub fn runtime_directory(&self) -> PathBuf { if let Some(path) = &self.runtime_directory { expand_home(path) } else if let Some(path) = default_runtime_directory() { path } else { default_pueue_path() } } #[cfg(not(target_os = "windows"))] pub fn unix_socket_path(&self) -> PathBuf { if let Some(path) = &self.unix_socket_path { expand_home(path) } else { self.runtime_directory() .join(format!("pueue_{}.socket", whoami::username())) } } pub fn pid_path(&self) -> PathBuf { if let Some(path) = &self.pid_path { expand_home(path) } else { self.runtime_directory().join("pueue.pid") } } pub fn daemon_cert(&self) -> PathBuf { if let Some(path) = &self.daemon_cert { expand_home(path) } else { self.pueue_directory().join("certs").join("daemon.cert") } } pub fn daemon_key(&self) -> PathBuf { if let Some(path) = &self.daemon_key { expand_home(path) } else { self.pueue_directory().join("certs").join("daemon.key") } } pub fn shared_secret_path(&self) -> PathBuf { if let Some(path) = &self.shared_secret_path { expand_home(path) } else { self.pueue_directory().join("shared_secret") } } } impl Settings { pub fn read(from_file: &Option<PathBuf>) -> Result<(Settings, bool), Error> { if let Some(path) = from_file { let file = File::open(path) .map_err(|err| Error::IoPathError(path.clone(), "opening config file", err))?; let reader = BufReader::new(file); let settings = serde_yaml::from_reader(reader) .map_err(|err| Error::ConfigDeserialization(err.to_string()))?; return Ok((settings, true)); }; info!("Parsing config files"); for directory in get_config_directories().into_iter() { let path = directory.join("pueue.yml"); info!("Checking path: {path:?}"); if path.exists() && path.is_file() { info!("Found config file at: {path:?}"); let file = File::open(&path) .map_err(|err| Error::IoPathError(path, "opening config file.", err))?; let reader = BufReader::new(file); let settings = serde_yaml::from_reader(reader) .map_err(|err| Error::ConfigDeserialization(err.to_string()))?; return Ok((settings, true)); } } info!("No config file found. Use default config."); Ok((Settings::default(), false)) } pub fn save(&self, path: &Option<PathBuf>) -> Result<(), Error> { let config_path = if let Some(path) = path { path.clone() } else { default_config_directory().join("pueue.yml") }; let config_dir = config_path .parent() .ok_or_else(|| Error::InvalidPath("Couldn't resolve config directory".into()))?; if !config_dir.exists() { create_dir_all(&config_dir).map_err(|err| { Error::IoPathError(config_dir.to_path_buf(), "creating config dir", err) })?; } let content = match serde_yaml::to_string(self) { Ok(content) => content, Err(error) => { return Err(Error::Generic(format!( "Configuration file serialization failed:\n{error}" ))) } }; let mut file = File::create(&config_path).map_err(|err| { Error::IoPathError(config_dir.to_path_buf(), "creating settings file", err) })?; file.write_all(content.as_bytes()).map_err(|err| { Error::IoPathError(config_dir.to_path_buf(), "writing settings file", err) })?; Ok(()) } pub fn load_profile(&mut self, profile: &str) -> Result<(), Error> { let profile = self.profiles.remove(profile).ok_or_else(|| { Error::ConfigDeserialization(format!("Couldn't find profile with name \"{profile}\"")) })?; self.client = profile.client; self.daemon = profile.daemon; self.shared = profile.shared; Ok(()) } } #[cfg(test)] mod test { use super::*; #[test] fn test_load_profile() { let mut settings = Settings::default(); assert_eq!( settings.client.status_time_format, default_status_time_format() ); assert_eq!( settings.daemon.callback_log_lines, default_callback_log_lines() ); assert_eq!(settings.shared.host, default_host()); let mut profile = Settings::default(); profile.client.status_time_format = "test".to_string(); profile.daemon.callback_log_lines = 100_000; profile.shared.host = "quatschhost".to_string(); let profile = NestedSettings { client: profile.client, daemon: profile.daemon, shared: profile.shared, }; settings.profiles.insert("testprofile".to_string(), profile); settings .load_profile("testprofile") .expect("We just added the profile"); assert_eq!(settings.client.status_time_format, "test"); assert_eq!(settings.daemon.callback_log_lines, 100_000); assert_eq!(settings.shared.host, "quatschhost"); } #[test] fn test_error_on_missing_profile() { let mut settings = Settings::default(); let result = settings.load_profile("doesn't exist"); let expected_error_message = "Couldn't find profile with name \"doesn't exist\""; if let Err(Error::ConfigDeserialization(error_message)) = result { assert_eq!(error_message, expected_error_message); return; } panic!("Got unexpected result when expecting missing profile error: {result:?}"); } }
use std::collections::HashMap; use std::fs::{create_dir_all, File}; use std::io::{prelude::*, BufReader}; use std::path::{Path, PathBuf}; use log::info; use serde_derive::{Deserialize, Serialize}; use shellexpand::tilde; use crate::error::Error; use crate::platform::directories::*; use crate::setting_defaults::*; #[derive(PartialEq, Clone, Debug, Default, Deserialize, Serialize)] pub struct Shared { pub pueue_directory: Option<PathBuf>, pub runtime_directory: Option<PathBuf>, #[cfg(not(target_os = "windows"))] #[serde(default = "default_true")] pub use_unix_socket: bool, pub pid_path: Option<PathBuf>, #[cfg(not(target_os = "windows"))] pub unix_socket_path: Option<PathBuf>, #[serde(default = "default_host")] pub host: String, #[serde(default = "default_port")] pub port: String, pub daemon_cert: Option<PathBuf>, pub daemon_key: Option<PathBuf>, pub shared_secret_path: Option<PathBuf>, } #[derive(PartialEq, Clone, Debug, Default, Deserialize, Serialize)] pub struct Client { #[serde(default = "Default::default")] pub restart_in_place: bool, #[serde(default = "default_true")] pub read_local_logs: bool, #[serde(default = "Default::default")] pub show_confirmation_questions: bool, #[serde(default = "Default::default")] pub show_expanded_aliases: bool, #[serde(default = "Default::default")] pub dark_mode: bool, pub max_status_lines: Option<usize>, #[serde(default = "default_status_time_format")] pub status_time_format: String, #[serde(default = "default_status_datetime_format")] pub status_datetime_format: String, } #[derive(PartialEq, Clone, Debug, Default, Deserialize, Serialize)] pub struct Daemon { #[serde(default = "Default::default")] pub pause_group_on_failure: bool, #[serde(default = "Default::default")] pub pause_all_on_failure: bool, pub callback: Option<String>, #[serde(default = "default_callback_log_lines")] pub callback_log_lines: usize, #[serde(skip_serializing)] #[deprecated( since = "1.1.0", note = "The configuration for groups is now stored in the state." )] pub groups: Option<HashMap<String, i64>>, } impl Default for Settings { fn default() -> Self { Settings { client: Client { read_local_logs: true, status_time_format: default_status_time_format(), status_datetime_format: default_status_datetime_format(), ..Default::default() }, daemon: Daemon { callback_log_lines: default_callback_log_lines(), ..Default::default() }, shared: Shared { #[cfg(not(target_os = "windows"))] use_unix_socket: true, host: default_host(), port: default_port(), ..Default::default() }, profiles: HashMap::new(), } } } #[derive(PartialEq, Clone, Debug, Deserialize, Serialize)] pub struct Settings { #[serde(default = "Default::default")] pub client: Client, #[serde(default = "Default::default")] pub daemon: Daemon, pub shared: Shared, #[serde(default = "HashMap::new")] pub profiles: HashMap<String, NestedSettings>, } #[derive(PartialEq, Clone, Debug, Deserialize, Serialize)] pub struct NestedSettings { #[serde(default = "Default::default")] pub client: Client, #[serde(default = "Default::default")] pub daemon: Daemon, #[serde(default = "Default::default")] pub shared: Shared, } pub fn expand_home(old_path: &Path) -> PathBuf { PathBuf::from(tilde(&old_path.to_string_lossy()).into_owned()) } impl Shared { pub fn pueue_directory(&self) -> PathBuf { if let Some(path) = &self.pueue_directory { expand_home(path) } else { default_pueue_path() } } pub fn runtime_directory(&self) -> PathBuf { if let Some(path) = &self.runtime_directory { expand_home(path) } else if let Some(path) = default_runtime_directory() { path } else { default_pueue_path() } } #[cfg(not(target_os = "windows"))] pub fn unix_socket_path(&self) -> PathBuf { if let Some(path) = &self.unix_socket_path { expand_home(path) } else { self.runtime_directory() .join(format!("pueue_{}.socket", whoami::username())) } } pub fn pid_path(&self) -> PathBuf { if let Some(path) = &self.pid_path { expand_home(path) } else { self.runtime_directory().join("pueue.pid") } } pub fn daemon_cert(&self) -> PathBuf { if let Some(path) = &self.daemon_cert { expand_home(path) } else { self.pueue_directory().join("certs").join("daemon.cert") } } pub fn daemon_key(&self) -> PathBuf { if let Some(path) = &self.daemon_key { expand_home(path) } else { self.pueue_directory().join("certs").join("daemon.key") } }
} impl Settings { pub fn read(from_file: &Option<PathBuf>) -> Result<(Settings, bool), Error> { if let Some(path) = from_file { let file = File::open(path) .map_err(|err| Error::IoPathError(path.clone(), "opening config file", err))?; let reader = BufReader::new(file); let settings = serde_yaml::from_reader(reader) .map_err(|err| Error::ConfigDeserialization(err.to_string()))?; return Ok((settings, true)); }; info!("Parsing config files"); for directory in get_config_directories().into_iter() { let path = directory.join("pueue.yml"); info!("Checking path: {path:?}"); if path.exists() && path.is_file() { info!("Found config file at: {path:?}"); let file = File::open(&path) .map_err(|err| Error::IoPathError(path, "opening config file.", err))?; let reader = BufReader::new(file); let settings = serde_yaml::from_reader(reader) .map_err(|err| Error::ConfigDeserialization(err.to_string()))?; return Ok((settings, true)); } } info!("No config file found. Use default config."); Ok((Settings::default(), false)) } pub fn save(&self, path: &Option<PathBuf>) -> Result<(), Error> { let config_path = if let Some(path) = path { path.clone() } else { default_config_directory().join("pueue.yml") }; let config_dir = config_path .parent() .ok_or_else(|| Error::InvalidPath("Couldn't resolve config directory".into()))?; if !config_dir.exists() { create_dir_all(&config_dir).map_err(|err| { Error::IoPathError(config_dir.to_path_buf(), "creating config dir", err) })?; } let content = match serde_yaml::to_string(self) { Ok(content) => content, Err(error) => { return Err(Error::Generic(format!( "Configuration file serialization failed:\n{error}" ))) } }; let mut file = File::create(&config_path).map_err(|err| { Error::IoPathError(config_dir.to_path_buf(), "creating settings file", err) })?; file.write_all(content.as_bytes()).map_err(|err| { Error::IoPathError(config_dir.to_path_buf(), "writing settings file", err) })?; Ok(()) } pub fn load_profile(&mut self, profile: &str) -> Result<(), Error> { let profile = self.profiles.remove(profile).ok_or_else(|| { Error::ConfigDeserialization(format!("Couldn't find profile with name \"{profile}\"")) })?; self.client = profile.client; self.daemon = profile.daemon; self.shared = profile.shared; Ok(()) } } #[cfg(test)] mod test { use super::*; #[test] fn test_load_profile() { let mut settings = Settings::default(); assert_eq!( settings.client.status_time_format, default_status_time_format() ); assert_eq!( settings.daemon.callback_log_lines, default_callback_log_lines() ); assert_eq!(settings.shared.host, default_host()); let mut profile = Settings::default(); profile.client.status_time_format = "test".to_string(); profile.daemon.callback_log_lines = 100_000; profile.shared.host = "quatschhost".to_string(); let profile = NestedSettings { client: profile.client, daemon: profile.daemon, shared: profile.shared, }; settings.profiles.insert("testprofile".to_string(), profile); settings .load_profile("testprofile") .expect("We just added the profile"); assert_eq!(settings.client.status_time_format, "test"); assert_eq!(settings.daemon.callback_log_lines, 100_000); assert_eq!(settings.shared.host, "quatschhost"); } #[test] fn test_error_on_missing_profile() { let mut settings = Settings::default(); let result = settings.load_profile("doesn't exist"); let expected_error_message = "Couldn't find profile with name \"doesn't exist\""; if let Err(Error::ConfigDeserialization(error_message)) = result { assert_eq!(error_message, expected_error_message); return; } panic!("Got unexpected result when expecting missing profile error: {result:?}"); } }
pub fn shared_secret_path(&self) -> PathBuf { if let Some(path) = &self.shared_secret_path { expand_home(path) } else { self.pueue_directory().join("shared_secret") } }
function_block-full_function
[ { "content": "/// This is a small helper which either returns a given group or the default group.\n\npub fn group_or_default(group: &Option<String>) -> String {\n\n group\n\n .clone()\n\n .unwrap_or_else(|| PUEUE_DEFAULT_GROUP.to_string())\n\n}\n\n\n", "file_path": "client/client.rs", "rank": 0, "score": 326908.95006968966 }, { "content": "/// Check if a task can be deleted. \\\n\n/// We have to check all dependant tasks, that haven't finished yet.\n\n/// This is necessary to prevent deletion of tasks which are specified as a dependency.\n\n///\n\n/// `to_delete` A list of task ids, which should also be deleted.\n\n/// This allows to remove dependency tasks as well as their dependants.\n\npub fn is_task_removable(state: &LockedState, task_id: &usize, to_delete: &[usize]) -> bool {\n\n // Get all task ids of any dependant tasks.\n\n let dependants: Vec<usize> = state\n\n .tasks\n\n .iter()\n\n .filter(|(_, task)| {\n\n task.dependencies.contains(task_id) && !matches!(task.status, TaskStatus::Done(_))\n\n })\n\n .map(|(_, task)| task.id)\n\n .collect();\n\n\n\n if dependants.is_empty() {\n\n return true;\n\n }\n\n\n\n // Check if the dependants are supposed to be deleted as well.\n\n let should_delete_dependants = dependants.iter().all(|task_id| to_delete.contains(task_id));\n\n if !should_delete_dependants {\n\n return false;\n\n }\n\n\n\n // Lastly, do a recursive check if there are any dependants on our dependants\n\n dependants\n\n .iter()\n\n .all(|task_id| is_task_removable(state, task_id, to_delete))\n\n}\n\n\n", "file_path": "daemon/state_helper.rs", "rank": 1, "score": 272953.82606509374 }, { "content": "pub fn default_pueue_path() -> PathBuf {\n\n data_local_dir().join(\"pueue\")\n\n}\n\n\n", "file_path": "lib/src/platform/windows/directories.rs", "rank": 2, "score": 268529.3255694893 }, { "content": "/// Invoked on `pueue groups`.\n\n/// Manage groups.\n\n/// - Show groups\n\n/// - Add group\n\n/// - Remove group\n\npub fn group(message: GroupMessage, sender: &Sender<Message>, state: &SharedState) -> Message {\n\n let mut state = state.lock().unwrap();\n\n\n\n match message {\n\n GroupMessage::List => {\n\n // Return information about all groups to the client.\n\n Message::GroupResponse(GroupResponseMessage {\n\n groups: state.groups.clone(),\n\n })\n\n }\n\n GroupMessage::Add {\n\n name,\n\n parallel_tasks,\n\n } => {\n\n if state.groups.contains_key(&name) {\n\n return create_failure_message(format!(\"Group \\\"{name}\\\" already exists\"));\n\n }\n\n\n\n // Propagate the message to the TaskHandler, which is responsible for actually\n\n // manipulating our internal data\n", "file_path": "daemon/network/message_handler/group.rs", "rank": 3, "score": 262619.83956108993 }, { "content": "/// Remove the daemon's pid file.\n\n/// Errors if it doesn't exist or cannot be deleted.\n\npub fn cleanup_pid_file(pid_path: &Path) -> Result<()> {\n\n std::fs::remove_file(pid_path)\n\n .map_err(|err| Error::IoError(\"removing pid file\".to_string(), err))?;\n\n Ok(())\n\n}\n", "file_path": "daemon/pid.rs", "rank": 4, "score": 261602.37085724057 }, { "content": "/// Create a file containing the current pid of the daemon's main process.\n\n/// Fails if it already exists or cannot be created.\n\npub fn create_pid_file(pid_path: &Path) -> Result<()> {\n\n // If an old PID file exists, check if the referenced process is still running.\n\n // The pid might not have been properly cleaned up, if the machine or Pueue crashed hard.\n\n if pid_path.exists() {\n\n check_for_running_daemon(pid_path)?;\n\n }\n\n let mut file = File::create(pid_path)\n\n .map_err(|err| Error::IoError(\"creating pid file\".to_string(), err))?;\n\n\n\n file.write_all(std::process::id().to_string().as_bytes())\n\n .map_err(|err| Error::IoError(\"writing pid file\".to_string(), err))?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "daemon/pid.rs", "rank": 5, "score": 261602.05298857612 }, { "content": "/// This function creates files `[1-20]` in the specified directory.\n\n/// The return value is the expected output.\n\n///\n\n/// If `partial == true`, the expected output are only the last 5 lines.\n\nfn create_test_files(path: &Path, partial: bool) -> Result<String> {\n\n // Convert numbers from 1 to 01, so they're correctly ordered when using `ls`.\n\n let names: Vec<String> = (0..20)\n\n .map(|number| {\n\n if number < 10 {\n\n let mut name = \"0\".to_string();\n\n name.push_str(&number.to_string());\n\n name\n\n } else {\n\n number.to_string()\n\n }\n\n })\n\n .collect();\n\n\n\n for name in &names {\n\n File::create(path.join(name))?;\n\n }\n\n\n\n // Only return the last 5 lines if partial output is requested.\n\n if partial {\n", "file_path": "tests/unix/log.rs", "rank": 6, "score": 261101.93594783032 }, { "content": "/// Create and return the two file handles for the `(stdout, stderr)` log file of a task.\n\n/// These are two handles to the same file.\n\npub fn create_log_file_handles(task_id: usize, path: &Path) -> Result<(File, File), Error> {\n\n let log_path = get_log_path(task_id, path);\n\n let stdout_handle = File::create(&log_path)\n\n .map_err(|err| Error::IoPathError(log_path, \"getting stdout handle\", err))?;\n\n let stderr_handle = stdout_handle\n\n .try_clone()\n\n .map_err(|err| Error::IoError(\"cloning stderr handle\".to_string(), err))?;\n\n\n\n Ok((stdout_handle, stderr_handle))\n\n}\n\n\n", "file_path": "lib/src/log.rs", "rank": 7, "score": 254810.35590945123 }, { "content": "/// A small helper for handling task failures. \\\n\n/// Users can specify whether they want to pause the task's group or the\n\n/// whole daemon on a failed tasks. This function wraps that logic and decides if anything should be\n\n/// paused depending on the current settings.\n\n///\n\n/// `group` should be the name of the failed task.\n\npub fn pause_on_failure(state: &mut LockedState, group: &str) {\n\n if state.settings.daemon.pause_group_on_failure {\n\n if let Some(group) = state.groups.get_mut(group) {\n\n group.status = GroupStatus::Paused;\n\n }\n\n } else if state.settings.daemon.pause_all_on_failure {\n\n state.set_status_for_all_groups(GroupStatus::Paused);\n\n }\n\n}\n\n\n", "file_path": "daemon/state_helper.rs", "rank": 8, "score": 250347.51908617368 }, { "content": "/// Restore the last state from a previous session. \\\n\n/// The state is stored as json in the `pueue_directory`.\n\n///\n\n/// If the state cannot be deserialized, an empty default state will be used instead. \\\n\n/// All groups with queued tasks will be automatically paused to prevent unwanted execution.\n\npub fn restore_state(pueue_directory: &Path) -> Result<Option<State>> {\n\n let path = pueue_directory.join(\"state.json\");\n\n\n\n // Ignore if the file doesn't exist. It doesn't have to.\n\n if !path.exists() {\n\n info!(\"Couldn't find state from previous session at location: {path:?}\");\n\n return Ok(None);\n\n }\n\n info!(\"Restoring state\");\n\n\n\n // Try to load the file.\n\n let data = fs::read_to_string(&path).context(\"State restore: Failed to read file:\\n\\n{}\")?;\n\n\n\n // Try to deserialize the state file.\n\n let mut state: State = serde_json::from_str(&data).context(\"Failed to deserialize state.\")?;\n\n\n\n // Restore all tasks.\n\n // While restoring the tasks, check for any invalid/broken stati.\n\n for (_, task) in state.tasks.iter_mut() {\n\n // Handle ungraceful shutdowns while executing tasks.\n", "file_path": "daemon/state_helper.rs", "rank": 9, "score": 249780.38712543948 }, { "content": "#[allow(clippy::needless_collect)]\n\npub fn read_last_lines(file: &mut File, amount: usize) -> String {\n\n let reader = RevBufReader::new(file);\n\n\n\n let lines: Vec<String> = reader\n\n .lines()\n\n .take(amount)\n\n .map(|line| line.unwrap_or_else(|_| \"Pueue: Failed to read line.\".to_string()))\n\n .collect();\n\n\n\n lines.into_iter().rev().collect::<Vec<String>>().join(\"\\n\")\n\n}\n\n\n", "file_path": "lib/src/log.rs", "rank": 10, "score": 249111.50650435925 }, { "content": "/// Save the current state to disk. \\\n\n/// We do this to restore in case of a crash. \\\n\n/// If log == true, the file will be saved with a time stamp.\n\n///\n\n/// In comparison to the daemon -> client communication, the state is saved\n\n/// as JSON for readability and debugging purposes.\n\nfn save_state_to_file(state: &State, log: bool) -> Result<()> {\n\n let serialized = serde_json::to_string(&state).context(\"Failed to serialize state:\");\n\n\n\n let serialized = serialized.unwrap();\n\n let path = state.settings.shared.pueue_directory();\n\n let (temp, real) = if log {\n\n let path = path.join(\"log\");\n\n let now: DateTime<Utc> = Utc::now();\n\n let time = now.format(\"%Y-%m-%d_%H-%M-%S\");\n\n (\n\n path.join(format!(\"{time}_state.json.partial\")),\n\n path.join(format!(\"{time}_state.json\")),\n\n )\n\n } else {\n\n (path.join(\"state.json.partial\"), path.join(\"state.json\"))\n\n };\n\n\n\n // Write to temporary log file first, to prevent loss due to crashes.\n\n fs::write(&temp, serialized).context(\"Failed to write temp file while saving state.\")?;\n\n\n", "file_path": "daemon/state_helper.rs", "rank": 11, "score": 248110.95220486433 }, { "content": "/// Return a nicely formatted headline that's displayed above group tables\n\npub fn get_group_headline(name: &str, group: &Group, colors: &Colors) -> String {\n\n // Style group name\n\n let name = style(format!(\"Group \\\"{}\\\"\", name)).attribute(Attribute::Bold);\n\n\n\n // Print the current state of the group.\n\n let status = match group.status {\n\n GroupStatus::Running => style_text(\"running\", Some(colors.green()), None),\n\n GroupStatus::Paused => style_text(\"paused\", Some(colors.yellow()), None),\n\n };\n\n\n\n format!(\"{} ({} parallel): {}\", name, group.parallel_tasks, status)\n\n}\n\n\n", "file_path": "client/display/helper.rs", "rank": 12, "score": 247153.8692986467 }, { "content": "/// Return the file handle for the log file of a task.\n\npub fn get_log_file_handle(task_id: usize, path: &Path) -> Result<File, Error> {\n\n let path = get_log_path(task_id, path);\n\n let handle = File::open(&path)\n\n .map_err(|err| Error::IoPathError(path, \"getting log file handle\", err))?;\n\n\n\n Ok(handle)\n\n}\n\n\n", "file_path": "lib/src/log.rs", "rank": 13, "score": 246123.3955029394 }, { "content": "/// Invoked if a client fails to edit a task and asks the daemon to restore the task's status.\n\npub fn edit_restore(task_id: usize, state: &SharedState) -> Message {\n\n // Check whether the task exists and is queued/stashed. Abort if that's not the case.\n\n let mut state = state.lock().unwrap();\n\n match state.tasks.get_mut(&task_id) {\n\n Some(task) => {\n\n if task.status != TaskStatus::Locked {\n\n return create_failure_message(\"The requested task isn't locked\");\n\n }\n\n task.status = task.prev_status.clone();\n\n\n\n return create_success_message(format!(\n\n \"The requested task's status has been restored to '{}'\",\n\n task.status\n\n ));\n\n }\n\n None => create_failure_message(\"No task with this id.\"),\n\n }\n\n}\n", "file_path": "daemon/network/message_handler/edit.rs", "rank": 14, "score": 236655.32668034744 }, { "content": "/// Invoked when calling `pueue edit`.\n\n/// If a user wants to edit a message, we need to send him the current command.\n\n/// Lock the task to prevent execution, before the user has finished editing the command.\n\npub fn edit_request(task_id: usize, state: &SharedState) -> Message {\n\n // Check whether the task exists and is queued/stashed. Abort if that's not the case.\n\n let mut state = state.lock().unwrap();\n\n match state.tasks.get_mut(&task_id) {\n\n Some(task) => {\n\n if !task.is_queued() {\n\n return create_failure_message(\"You can only edit a queued/stashed task\");\n\n }\n\n task.prev_status = task.status.clone();\n\n task.status = TaskStatus::Locked;\n\n\n\n let message = EditResponseMessage {\n\n task_id: task.id,\n\n command: task.original_command.clone(),\n\n path: task.path.clone(),\n\n };\n\n Message::EditResponse(message)\n\n }\n\n None => create_failure_message(\"No task with this id.\"),\n\n }\n\n}\n\n\n", "file_path": "daemon/network/message_handler/edit.rs", "rank": 15, "score": 236646.56763502007 }, { "content": "pub fn default_pueue_path() -> PathBuf {\n\n get_home_dir().join(\".local/share/pueue\")\n\n}\n\n\n", "file_path": "lib/src/platform/apple/directories.rs", "rank": 16, "score": 236464.96934013508 }, { "content": "pub fn default_pueue_path() -> PathBuf {\n\n if let Ok(path) = std::env::var(\"XDG_DATA_HOME\") {\n\n expand_home(&PathBuf::from(path)).join(\"pueue\")\n\n } else {\n\n get_home_dir().join(\".local/share/pueue\")\n\n }\n\n}\n\n\n", "file_path": "lib/src/platform/linux/directories.rs", "rank": 17, "score": 236464.96934013508 }, { "content": "/// Invoked when calling `pueue remove`.\n\n/// Remove tasks from the queue.\n\n/// We have to ensure that those tasks aren't running!\n\npub fn remove(task_ids: Vec<usize>, state: &SharedState) -> Message {\n\n let mut state = state.lock().unwrap();\n\n let filter = |task: &Task| {\n\n matches!(\n\n task.status,\n\n TaskStatus::Queued\n\n | TaskStatus::Stashed { .. }\n\n | TaskStatus::Done(_)\n\n | TaskStatus::Locked\n\n )\n\n };\n\n let (mut not_running, mut running) = state.filter_tasks(filter, Some(task_ids));\n\n\n\n // Don't delete tasks, if there are other tasks that depend on this one.\n\n // However, we allow to delete those tasks, if they're supposed to be deleted as well.\n\n for task_id in not_running.clone() {\n\n if !is_task_removable(&state, &task_id, &not_running) {\n\n running.push(task_id);\n\n not_running.retain(|id| id != &task_id);\n\n };\n", "file_path": "daemon/network/message_handler/remove.rs", "rank": 18, "score": 234397.94434058893 }, { "content": "/// Invoked when calling `pueue stash`.\n\n/// Stash specific queued tasks.\n\n/// They won't be executed until they're enqueued or explicitely started.\n\npub fn stash(task_ids: Vec<usize>, state: &SharedState) -> Message {\n\n let (matching, mismatching) = {\n\n let mut state = state.lock().unwrap();\n\n let (matching, mismatching) = state.filter_tasks(\n\n |task| matches!(task.status, TaskStatus::Queued | TaskStatus::Locked),\n\n Some(task_ids),\n\n );\n\n\n\n for task_id in &matching {\n\n state.change_status(*task_id, TaskStatus::Stashed { enqueue_at: None });\n\n }\n\n\n\n (matching, mismatching)\n\n };\n\n\n\n compile_task_response(\"Tasks are stashed\", matching, mismatching)\n\n}\n", "file_path": "daemon/network/message_handler/stash.rs", "rank": 19, "score": 234397.94434058896 }, { "content": "/// Check whether the given group exists. Return an failure message if it doesn't.\n\npub fn ensure_group_exists<'state>(\n\n state: &'state mut LockedState,\n\n group: &str,\n\n) -> Result<&'state mut Group, Message> {\n\n let group_keys: Vec<String> = state.groups.keys().cloned().collect();\n\n if let Some(group) = state.groups.get_mut(group) {\n\n return Ok(group);\n\n }\n\n\n\n Err(create_failure_message(format!(\n\n \"Group {group} doesn't exists. Use one of these: {group_keys:?}\",\n\n )))\n\n}\n\n\n", "file_path": "daemon/network/response_helper.rs", "rank": 20, "score": 232610.73534132954 }, { "content": "/// Set the parallel tasks for a specific group.\n\npub fn set_parallel_tasks(message: ParallelMessage, state: &SharedState) -> Message {\n\n let mut state = state.lock().unwrap();\n\n let group = match ensure_group_exists(&mut state, &message.group) {\n\n Ok(group) => group,\n\n Err(message) => return message,\n\n };\n\n\n\n group.parallel_tasks = message.parallel_tasks;\n\n\n\n create_success_message(format!(\n\n \"Parallel tasks setting for group \\\"{}\\\" adjusted\",\n\n &message.group\n\n ))\n\n}\n", "file_path": "daemon/network/message_handler/parallel.rs", "rank": 21, "score": 232449.89467841096 }, { "content": "/// Kill a child process\n\npub fn kill_child(task_id: usize, child: &mut Child, _kill_children: bool) -> bool {\n\n match child.kill() {\n\n Err(_) => {\n\n info!(\"Task {task_id} has already finished by itself\");\n\n false\n\n }\n\n Ok(_) => {\n\n let pids = get_cur_task_processes(child.id());\n\n\n\n for pid in pids {\n\n terminate_process(pid);\n\n }\n\n true\n\n }\n\n }\n\n}\n\n\n", "file_path": "daemon/platform/windows/process_helper.rs", "rank": 23, "score": 228026.91488875874 }, { "content": "/// Get the path to the log file of a task.\n\npub fn get_log_path(task_id: usize, path: &Path) -> PathBuf {\n\n let task_log_dir = path.join(\"task_logs\");\n\n let path = task_log_dir.join(format!(\"{task_id}.log\"));\n\n path\n\n}\n\n\n", "file_path": "lib/src/log.rs", "rank": 24, "score": 227339.1204932628 }, { "content": "pub fn default_config_directory() -> PathBuf {\n\n data_local_dir().join(\"pueue\")\n\n}\n\n\n", "file_path": "lib/src/platform/windows/directories.rs", "rank": 25, "score": 226396.13718121545 }, { "content": "/// Get a daemon pid from a specific pueue directory.\n\n/// This function gives the daemon a little time to boot up, but ultimately crashes if it takes too\n\n/// long.\n\npub fn get_pid(pid_path: &Path) -> Result<i32> {\n\n // Give the daemon about 1 sec to boot and create the pid file.\n\n let tries = 20;\n\n let mut current_try = 0;\n\n\n\n while current_try < tries {\n\n // The daemon didn't create the pid file yet. Wait for 100ms and try again.\n\n if !pid_path.exists() {\n\n sleep_ms(50);\n\n current_try += 1;\n\n continue;\n\n }\n\n\n\n let mut file = File::open(&pid_path).context(\"Couldn't open pid file\")?;\n\n let mut content = String::new();\n\n file.read_to_string(&mut content)\n\n .context(\"Couldn't write to file\")?;\n\n\n\n // The file has been created but not yet been written to.\n\n if content.is_empty() {\n", "file_path": "tests/helper/daemon.rs", "rank": 26, "score": 221070.17730971653 }, { "content": "/// Returns the formatted `start` and `end` text for a given task.\n\n///\n\n/// 1. If the start || end is today, skip the date.\n\n/// 2. Otherwise show the date in both.\n\n///\n\n/// If the task doesn't have a start and/or end yet, an empty string will be returned\n\n/// for the respective field.\n\nfn formatted_start_end(task: &Task, settings: &Settings) -> (String, String) {\n\n // Get the start time.\n\n // If the task didn't start yet, just return two empty strings.\n\n let start = match task.start {\n\n Some(start) => start,\n\n None => return (\"\".into(), \"\".into()),\n\n };\n\n\n\n // If the task started today, just show the time.\n\n // Otherwise show the full date and time.\n\n let started_today = start >= Local::today().and_hms(0, 0, 0);\n\n let formatted_start = if started_today {\n\n start\n\n .format(&settings.client.status_time_format)\n\n .to_string()\n\n } else {\n\n start\n\n .format(&settings.client.status_datetime_format)\n\n .to_string()\n\n };\n", "file_path": "client/display/state.rs", "rank": 27, "score": 219727.4964194139 }, { "content": "/// Convenience wrapper around save_to_file.\n\npub fn save_state(state: &State) -> Result<()> {\n\n save_state_to_file(state, false)\n\n}\n\n\n", "file_path": "daemon/state_helper.rs", "rank": 28, "score": 219018.58187626937 }, { "content": "pub fn default_runtime_directory() -> Option<PathBuf> {\n\n None\n\n}\n\n\n", "file_path": "lib/src/platform/windows/directories.rs", "rank": 29, "score": 218879.4691008012 }, { "content": "/// Assert that certain process id no longer exists\n\npub fn process_exists(pid: u32) -> bool {\n\n unsafe {\n\n let handle = CreateToolhelp32Snapshot(TH32CS_SNAPPROCESS, 0);\n\n\n\n let mut process_entry = PROCESSENTRY32 {\n\n dwSize: std::mem::size_of::<PROCESSENTRY32>() as u32,\n\n ..Default::default()\n\n };\n\n\n\n loop {\n\n if process_entry.th32ProcessID == pid {\n\n CloseHandle(handle);\n\n return true;\n\n }\n\n\n\n if Process32Next(handle, &mut process_entry) == FALSE {\n\n break;\n\n }\n\n }\n\n\n", "file_path": "daemon/platform/windows/process_helper.rs", "rank": 30, "score": 218128.05567725466 }, { "content": "fn print_all_groups(state: State, tasks: Vec<Task>, settings: &Settings, colors: &Colors) {\n\n // Early exit and hint if there are no tasks in the queue\n\n // Print the state of the default group anyway, since this is information one wants to\n\n // see most of the time anyway.\n\n if state.tasks.is_empty() {\n\n let headline = get_group_headline(\n\n PUEUE_DEFAULT_GROUP,\n\n state.groups.get(PUEUE_DEFAULT_GROUP).unwrap(),\n\n colors,\n\n );\n\n println!(\"{headline}\\n\");\n\n println!(\"Task list is empty. Add tasks with `pueue add -- [cmd]`\");\n\n return;\n\n }\n\n\n\n // Sort all tasks by their respective group;\n\n let sorted_tasks = sort_tasks_by_group(tasks);\n\n\n\n // Always print the default queue at the very top, if no specific group is requested.\n\n if sorted_tasks.get(PUEUE_DEFAULT_GROUP).is_some() {\n", "file_path": "client/display/state.rs", "rank": 31, "score": 213528.3016519758 }, { "content": "/// Custom group serializer, which tries to deserialize the field with the legacy representation if\n\n/// there are any errors. That way we can recover in a smooth way from the old format.\n\n/// This is necessary to ensure a semi-smooth transition from v1 to v2.\n\n/// TODO: Remove in 2.1.0\n\nfn deserialize_groups<'de, D>(deserializer: D) -> Result<BTreeMap<String, Group>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n // Do a general deserialization to Serde's `Value` type.\n\n // That way we don't break the deserialization state if we're unable to deserialize it into\n\n // our expected format.\n\n let value: Value = serde::Deserialize::deserialize(deserializer)?;\n\n let groups: Result<BTreeMap<String, Group>, serde_json::Error> =\n\n serde_json::from_value(value.clone());\n\n\n\n // If we cannot deserialize the state, this means that this is probably an old state which uses\n\n // the old format. Try to deserialize the old format and convert them to the new format.\n\n match groups {\n\n Ok(groups) => Ok(groups),\n\n Err(_) => {\n\n let legacy_groups: Result<BTreeMap<String, GroupStatus>, serde_json::Error> =\n\n serde_json::from_value(value);\n\n\n\n let groups = match legacy_groups {\n", "file_path": "lib/src/state.rs", "rank": 32, "score": 211782.70110651944 }, { "content": "/// Invoked after closing the editor on `pueue edit`.\n\n/// Now we actually update the message with the updated command from the client.\n\npub fn edit(message: EditMessage, state: &SharedState) -> Message {\n\n // Check whether the task exists and is locked. Abort if that's not the case.\n\n let mut state = state.lock().unwrap();\n\n match state.tasks.get_mut(&message.task_id) {\n\n Some(task) => {\n\n if !(task.status == TaskStatus::Locked) {\n\n return create_failure_message(\"Task is no longer locked.\");\n\n }\n\n\n\n task.status = task.prev_status.clone();\n\n task.original_command = message.command.clone();\n\n task.command = insert_alias(message.command.clone());\n\n task.path = message.path.clone();\n\n ok_or_return_failure_message!(save_state(&state));\n\n\n\n create_success_message(\"Command has been updated\")\n\n }\n\n None => create_failure_message(format!(\"Task to edit has gone away: {}\", message.task_id)),\n\n }\n\n}\n\n\n", "file_path": "daemon/network/message_handler/edit.rs", "rank": 33, "score": 211208.9868865764 }, { "content": "/// Invoked when calling `pueue clean`.\n\n/// Remove all failed or done tasks from the state.\n\npub fn clean(message: CleanMessage, state: &SharedState) -> Message {\n\n let mut state = state.lock().unwrap();\n\n ok_or_return_failure_message!(save_state(&state));\n\n\n\n let (matching, _) = state.filter_tasks(|task| matches!(task.status, TaskStatus::Done(_)), None);\n\n\n\n for task_id in &matching {\n\n // Ensure the task is removable, i.e. there are no dependant tasks.\n\n if !is_task_removable(&state, task_id, &[]) {\n\n continue;\n\n }\n\n\n\n if message.successful_only || message.group.is_some() {\n\n if let Some(task) = state.tasks.get(task_id) {\n\n // Check if we should ignore this task, if only successful tasks should be removed.\n\n if message.successful_only\n\n && !matches!(task.status, TaskStatus::Done(TaskResult::Success))\n\n {\n\n continue;\n\n }\n", "file_path": "daemon/network/message_handler/clean.rs", "rank": 34, "score": 211208.93435816246 }, { "content": "/// Invoked when calling `pueue switch`.\n\n/// Switch the position of two tasks in the upcoming queue.\n\n/// We have to ensure that those tasks are either `Queued` or `Stashed`\n\npub fn switch(message: SwitchMessage, state: &SharedState) -> Message {\n\n let mut state = state.lock().unwrap();\n\n\n\n let task_ids = vec![message.task_id_1, message.task_id_2];\n\n let (_, mismatching) = state.filter_tasks(\n\n |task| matches!(task.status, TaskStatus::Queued | TaskStatus::Stashed { .. }),\n\n Some(task_ids.to_vec()),\n\n );\n\n if !mismatching.is_empty() {\n\n return create_failure_message(\"Tasks have to be either queued or stashed.\");\n\n }\n\n if task_ids[0] == task_ids[1] {\n\n return create_failure_message(\"You cannot switch a task with itself.\");\n\n }\n\n\n\n // Get the tasks. Expect them to be there, since we found no mismatch\n\n let mut first_task = state.tasks.remove(&task_ids[0]).unwrap();\n\n let mut second_task = state.tasks.remove(&task_ids[1]).unwrap();\n\n\n\n // Switch task ids\n", "file_path": "daemon/network/message_handler/switch.rs", "rank": 35, "score": 211204.6365012147 }, { "content": "/// Invoked when calling `pueue enqueue`.\n\n/// Enqueue specific stashed tasks.\n\npub fn enqueue(message: EnqueueMessage, state: &SharedState) -> Message {\n\n let mut state = state.lock().unwrap();\n\n let (matching, mismatching) = {\n\n let (matching, mismatching) = state.filter_tasks(\n\n |task| matches!(task.status, TaskStatus::Stashed { .. } | TaskStatus::Locked),\n\n Some(message.task_ids),\n\n );\n\n\n\n (matching, mismatching)\n\n };\n\n\n\n for task_id in &matching {\n\n // We just checked that they're there and the state is locked. It's safe to unwrap.\n\n let task = state.tasks.get_mut(task_id).expect(\"Task should be there.\");\n\n\n\n // Either specify the point of time the task should be enqueued or enqueue the task\n\n // immediately.\n\n if message.enqueue_at.is_some() {\n\n task.status = TaskStatus::Stashed {\n\n enqueue_at: message.enqueue_at,\n", "file_path": "daemon/network/message_handler/enqueue.rs", "rank": 36, "score": 211204.6365012147 }, { "content": "/// By default, several columns aren't shown until there's actually some data to display.\n\n/// This function determines, which of those columns actually need to be shown.\n\npub fn has_special_columns(tasks: &[Task]) -> (bool, bool, bool) {\n\n // Check whether there are any delayed tasks.\n\n let has_delayed_tasks = tasks.iter().any(|task| {\n\n matches!(\n\n task.status,\n\n TaskStatus::Stashed {\n\n enqueue_at: Some(_)\n\n }\n\n )\n\n });\n\n\n\n // Check whether there are any tasks with dependencies.\n\n let has_dependencies = tasks.iter().any(|task| !task.dependencies.is_empty());\n\n\n\n // Check whether there are any tasks a label.\n\n let has_labels = tasks.iter().any(|task| task.label.is_some());\n\n\n\n (has_delayed_tasks, has_dependencies, has_labels)\n\n}\n\n\n", "file_path": "client/display/helper.rs", "rank": 37, "score": 210945.78388561594 }, { "content": "/// Print a local log file of a task.\n\nfn print_local_file(stdout: &mut Stdout, file: &mut File, lines: &Option<usize>, text: String) {\n\n if let Ok(metadata) = file.metadata() {\n\n if metadata.len() != 0 {\n\n // Don't print a newline between the task information and the first output\n\n println!(\"\\n{}\", text);\n\n\n\n // Only print the last lines if requested\n\n if let Some(lines) = lines {\n\n if let Err(err) = seek_to_last_lines(file, *lines) {\n\n println!(\"Failed reading local log file: {err}\");\n\n return;\n\n }\n\n }\n\n\n\n // Print everything\n\n if let Err(err) = io::copy(file, stdout) {\n\n println!(\"Failed reading local log file: {err}\");\n\n };\n\n }\n\n }\n\n}\n", "file_path": "client/display/log/local.rs", "rank": 38, "score": 209924.8527820369 }, { "content": "/// Save the current current state in a file with a timestamp.\n\n/// At the same time remove old state logs from the log directory.\n\n/// This function is called, when large changes to the state are applied, e.g. clean/reset.\n\npub fn backup_state(state: &LockedState) -> Result<()> {\n\n save_state_to_file(state, true)?;\n\n rotate_state(state).context(\"Failed to rotate old log files\")?;\n\n Ok(())\n\n}\n\n\n", "file_path": "daemon/state_helper.rs", "rank": 39, "score": 207857.01276986563 }, { "content": "/// Invoked when calling `pueue log`.\n\n/// Return tasks and their output to the client.\n\npub fn get_log(message: LogRequestMessage, state: &SharedState) -> Message {\n\n let state = { state.lock().unwrap().clone() };\n\n // Return all logs, if no specific task id is specified.\n\n let task_ids = if message.task_ids.is_empty() {\n\n state.tasks.keys().cloned().collect()\n\n } else {\n\n message.task_ids\n\n };\n\n\n\n let mut tasks = BTreeMap::new();\n\n for task_id in task_ids.iter() {\n\n if let Some(task) = state.tasks.get(task_id) {\n\n // We send log output and the task at the same time.\n\n // This isn't as efficient as sending the raw compressed data directly,\n\n // but it's a lot more convenient for now.\n\n let output = if message.send_logs {\n\n match read_and_compress_log_file(\n\n *task_id,\n\n &state.settings.shared.pueue_directory(),\n\n message.lines,\n", "file_path": "daemon/network/message_handler/log.rs", "rank": 40, "score": 204260.41786389658 }, { "content": "/// Do a full reset of the state.\n\n/// This doesn't reset any processes!\n\npub fn reset_state(state: &mut LockedState) -> Result<()> {\n\n backup_state(state)?;\n\n state.tasks = BTreeMap::new();\n\n state.set_status_for_all_groups(GroupStatus::Running);\n\n\n\n save_state(state)\n\n}\n\n\n", "file_path": "daemon/state_helper.rs", "rank": 41, "score": 201978.47978923097 }, { "content": "/// Determine how many lines of output should be printed/returned.\n\n/// `None` implicates that all lines are printed.\n\n///\n\n/// By default, everything is returned for single tasks and only some lines for multiple.\n\n/// `json` is an exception to this, in json mode we always only return some lines\n\n/// (unless otherwise explicitely requested).\n\n///\n\n/// `full` always forces the full log output\n\n/// `lines` force a specific amount of lines\n\npub fn determine_log_line_amount(full: bool, lines: &Option<usize>) -> Option<usize> {\n\n if full {\n\n None\n\n } else if let Some(lines) = lines {\n\n Some(*lines)\n\n } else {\n\n // By default, only some lines are shown per task\n\n Some(15)\n\n }\n\n}\n\n\n", "file_path": "client/display/log/mod.rs", "rank": 42, "score": 201963.42709407178 }, { "content": "/// This is a helper function to safely kill a child process.\n\n/// Its purpose is to properly kill all processes and prevent any dangling processes.\n\n///\n\n/// Sadly, this needs some extra handling. Check the docstring of `send_signal_to_child` for\n\n/// additional information on why this needs to be done.\n\n///\n\n/// Returns `true`, if everything went alright\n\n/// Returns `false`, if the process went away while we tried to send the signal.\n\npub fn kill_child(task_id: usize, child: &mut Child, kill_children: bool) -> bool {\n\n let pid: i32 = child.id().try_into().unwrap();\n\n\n\n // Check whether this process actually spawned a shell.\n\n let is_shell = if let Ok(is_shell) = did_process_spawn_shell(pid) {\n\n is_shell\n\n } else {\n\n return false;\n\n };\n\n\n\n // We have to kill the root process first, to prevent it from spawning new processes.\n\n // However, this prevents us from getting its child processes afterwards.\n\n // That's why we have to get the list of child processes already now.\n\n let mut child_processes = None;\n\n if kill_children || is_shell {\n\n child_processes = Some(get_child_processes(pid));\n\n }\n\n\n\n // Kill the parent first\n\n let kill_result = child.kill();\n", "file_path": "daemon/platform/linux/process_helper.rs", "rank": 43, "score": 201154.79705539538 }, { "content": "/// This is a helper function to safely kill a child process.\n\n/// Its purpose is to properly kill all processes and prevent any dangling processes.\n\npub fn kill_child(task_id: usize, child: &mut Child, _kill_children: bool) -> bool {\n\n match child.kill() {\n\n Err(_) => {\n\n debug!(\"Task {task_id} has already finished by itself\");\n\n false\n\n }\n\n _ => true,\n\n }\n\n}\n\n\n", "file_path": "daemon/platform/apple/process_helper.rs", "rank": 44, "score": 201151.2513925482 }, { "content": "/// Send a signal to a windows process.\n\npub fn run_action_on_child(child: &Child, action: &ProcessAction, _children: bool) -> Result<bool> {\n\n let pids = get_cur_task_processes(child.id());\n\n if pids.is_empty() {\n\n bail!(\"Process has just gone away\");\n\n }\n\n\n\n match action {\n\n ProcessAction::Pause => {\n\n for pid in pids {\n\n for thread in get_threads(pid) {\n\n suspend_thread(thread);\n\n }\n\n }\n\n }\n\n ProcessAction::Resume => {\n\n for pid in pids {\n\n for thread in get_threads(pid) {\n\n resume_thread(thread);\n\n }\n\n }\n\n }\n\n }\n\n\n\n Ok(true)\n\n}\n\n\n", "file_path": "daemon/platform/windows/process_helper.rs", "rank": 45, "score": 201045.33939422993 }, { "content": "/// Remove the the log files of a task.\n\npub fn clean_log_handles(task_id: usize, path: &Path) {\n\n let path = get_log_path(task_id, path);\n\n if path.exists() {\n\n if let Err(err) = remove_file(path) {\n\n error!(\"Failed to remove stdout file for task {task_id} with error {err:?}\");\n\n };\n\n }\n\n}\n\n\n", "file_path": "lib/src/log.rs", "rank": 46, "score": 200931.5869439487 }, { "content": "/// Generate a random secret and write it to a file.\n\npub fn init_shared_secret(path: &Path) -> Result<(), Error> {\n\n if path.exists() {\n\n return Ok(());\n\n }\n\n\n\n const PASSWORD_LEN: usize = 512;\n\n let mut rng = rand::thread_rng();\n\n\n\n let secret: String = std::iter::repeat(())\n\n .map(|()| rng.sample(Alphanumeric))\n\n .map(char::from)\n\n .take(PASSWORD_LEN)\n\n .collect();\n\n\n\n let mut file = File::create(&path)\n\n .map_err(|err| Error::IoPathError(path.to_path_buf(), \"creating shared secret\", err))?;\n\n file.write_all(&secret.into_bytes())\n\n .map_err(|err| Error::IoPathError(path.to_path_buf(), \"writing shared secret\", err))?;\n\n\n\n // Set proper file permissions for unix filesystems\n", "file_path": "lib/src/network/secret.rs", "rank": 47, "score": 200881.95567189148 }, { "content": "/// Follow the log ouput of running task.\n\n///\n\n/// If no task is specified, this will check for the following cases:\n\n///\n\n/// - No running task: Print an error that there are no running tasks\n\n/// - Single running task: Follow the output of that task\n\n/// - Multiple running tasks: Print out the list of possible tasks to follow.\n\npub fn follow_local_task_logs(pueue_directory: &Path, task_id: usize, lines: Option<usize>) {\n\n let mut handle = match get_log_file_handle(task_id, pueue_directory) {\n\n Ok(stdout) => stdout,\n\n Err(err) => {\n\n println!(\"Failed to get log file handles: {err}\");\n\n return;\n\n }\n\n };\n\n let path = get_log_path(task_id, pueue_directory);\n\n\n\n // Stdout handler to directly write log file output to io::stdout\n\n // without having to load anything into memory.\n\n let mut stdout = io::stdout();\n\n\n\n // If lines is passed as an option, seek the output file handle to the start of\n\n // the line corresponding to the `lines` number of lines from the end of the file.\n\n // The loop following this section will copy those lines to stdout\n\n if let Some(lines) = lines {\n\n if let Err(err) = seek_to_last_lines(&mut handle, lines) {\n\n println!(\"Error seeking to last lines from log: {err}\");\n", "file_path": "client/display/follow.rs", "rank": 48, "score": 198436.71325361752 }, { "content": "/// Read logs directly from local files for a specific task.\n\nfn get_local_log(settings: &Settings, id: usize, lines: Option<usize>) -> String {\n\n let mut file = match get_log_file_handle(id, &settings.shared.pueue_directory()) {\n\n Ok(file) => file,\n\n Err(err) => {\n\n return format!(\"(Pueue error) Failed to get log file handle: {err}\");\n\n }\n\n };\n\n\n\n let output = if let Some(lines) = lines {\n\n read_last_lines(&mut file, lines)\n\n } else {\n\n let mut output = String::new();\n\n if let Err(error) = file.read_to_string(&mut output) {\n\n output.push_str(&format!(\n\n \"(Pueue error) Failed to read local log output file: {error:?}\"\n\n ))\n\n };\n\n\n\n output\n\n };\n\n\n\n output\n\n}\n\n\n", "file_path": "client/display/log/json.rs", "rank": 49, "score": 197883.70102124842 }, { "content": "/// Invoked when calling `pueue pause`.\n\n/// Forward the pause message to the task handler, which then pauses groups/tasks/everything.\n\npub fn pause(message: PauseMessage, sender: &Sender<Message>, state: &SharedState) -> Message {\n\n let mut state = state.lock().unwrap();\n\n // If a group is selected, make sure it exists.\n\n if let TaskSelection::Group(group) = &message.tasks {\n\n if let Err(message) = ensure_group_exists(&mut state, group) {\n\n return message;\n\n }\n\n }\n\n\n\n // Forward the message to the task handler.\n\n sender\n\n .send(Message::Pause(message.clone()))\n\n .expect(SENDER_ERR);\n\n\n\n // Return a response depending on the selected tasks.\n\n match message.tasks {\n\n TaskSelection::TaskIds(task_ids) => task_action_response_helper(\n\n \"Tasks are being paused\",\n\n task_ids,\n\n |task| matches!(task.status, TaskStatus::Running),\n\n &state,\n\n ),\n\n TaskSelection::Group(group) => {\n\n create_success_message(format!(\"Group \\\"{group}\\\" is being paused.\"))\n\n }\n\n TaskSelection::All => create_success_message(\"All queues are being paused.\"),\n\n }\n\n}\n", "file_path": "daemon/network/message_handler/pause.rs", "rank": 50, "score": 194776.45039604622 }, { "content": "/// Invoked when calling `pueue kill`.\n\n/// Forward the kill message to the task handler, which then kills the process.\n\npub fn kill(message: KillMessage, sender: &Sender<Message>, state: &SharedState) -> Message {\n\n let mut state = state.lock().unwrap();\n\n\n\n // If a group is selected, make sure it exists.\n\n if let TaskSelection::Group(group) = &message.tasks {\n\n if let Err(message) = ensure_group_exists(&mut state, group) {\n\n return message;\n\n }\n\n }\n\n\n\n sender\n\n .send(Message::Kill(message.clone()))\n\n .expect(SENDER_ERR);\n\n\n\n if let Some(signal) = message.signal {\n\n match message.tasks {\n\n TaskSelection::TaskIds(task_ids) => task_action_response_helper(\n\n \"Tasks are being killed\",\n\n task_ids,\n\n |task| task.is_running(),\n", "file_path": "daemon/network/message_handler/kill.rs", "rank": 51, "score": 194772.30379583867 }, { "content": "pub fn handle_message(message: Message, sender: &Sender<Message>, state: &SharedState) -> Message {\n\n match message {\n\n Message::Add(message) => add::add_task(message, sender, state),\n\n Message::Clean(message) => clean::clean(message, state),\n\n Message::Edit(message) => edit::edit(message, state),\n\n Message::EditRequest(task_id) => edit::edit_request(task_id, state),\n\n Message::EditRestore(task_id) => edit::edit_restore(task_id, state),\n\n Message::Enqueue(message) => enqueue::enqueue(message, state),\n\n Message::Group(message) => group::group(message, sender, state),\n\n Message::Kill(message) => kill::kill(message, sender, state),\n\n Message::Log(message) => log::get_log(message, state),\n\n Message::Parallel(message) => parallel::set_parallel_tasks(message, state),\n\n Message::Pause(message) => pause::pause(message, sender, state),\n\n Message::Remove(task_ids) => remove::remove(task_ids, state),\n\n Message::Reset(message) => reset(message, sender),\n\n Message::Restart(message) => restart::restart_multiple(message, sender, state),\n\n Message::Send(message) => send::send(message, sender, state),\n\n Message::Start(message) => start::start(message, sender, state),\n\n Message::Stash(task_ids) => stash::stash(task_ids, state),\n\n Message::Switch(message) => switch::switch(message, state),\n\n Message::Status => get_status(state),\n\n _ => create_failure_message(\"Not yet implemented\"),\n\n }\n\n}\n\n\n", "file_path": "daemon/network/message_handler/mod.rs", "rank": 52, "score": 194772.30379583867 }, { "content": "/// Invoked when calling `pueue send`.\n\n/// The message will be forwarded to the task handler, which then sends the user input to the process.\n\n/// In here we only do some error handling.\n\npub fn send(message: SendMessage, sender: &Sender<Message>, state: &SharedState) -> Message {\n\n // Check whether the task exists and is running. Abort if that's not the case.\n\n {\n\n let state = state.lock().unwrap();\n\n match state.tasks.get(&message.task_id) {\n\n Some(task) => {\n\n if task.status != TaskStatus::Running {\n\n return create_failure_message(\"You can only send input to a running task\");\n\n }\n\n }\n\n None => return create_failure_message(\"No task with this id.\"),\n\n }\n\n }\n\n\n\n // Check whether the task exists and is running, abort if that's not the case.\n\n sender.send(Message::Send(message)).expect(SENDER_ERR);\n\n\n\n create_success_message(\"Message is being send to the process.\")\n\n}\n", "file_path": "daemon/network/message_handler/send.rs", "rank": 53, "score": 194772.30379583867 }, { "content": "/// Invoked when calling `pueue start`.\n\n/// Forward the start message to the task handler, which then starts the process(es).\n\npub fn start(message: StartMessage, sender: &Sender<Message>, state: &SharedState) -> Message {\n\n let mut state = state.lock().unwrap();\n\n // If a group is selected, make sure it exists.\n\n if let TaskSelection::Group(group) = &message.tasks {\n\n if let Err(message) = ensure_group_exists(&mut state, group) {\n\n return message;\n\n }\n\n }\n\n\n\n // Forward the message to the task handler.\n\n sender\n\n .send(Message::Start(message.clone()))\n\n .expect(SENDER_ERR);\n\n\n\n // Return a response depending on the selected tasks.\n\n match message.tasks {\n\n TaskSelection::TaskIds(task_ids) => task_action_response_helper(\n\n \"Tasks are being started\",\n\n task_ids,\n\n |task| {\n", "file_path": "daemon/network/message_handler/start.rs", "rank": 54, "score": 194772.30379583867 }, { "content": "/// Print the current state of the daemon in a nicely formatted table.\n\n/// We pass the tasks as a separate parameter and as a list.\n\n/// This allows us to print the tasks in any user-defined order.\n\npub fn print_state(\n\n state: State,\n\n tasks: Vec<Task>,\n\n cli_command: &SubCommand,\n\n colors: &Colors,\n\n settings: &Settings,\n\n) {\n\n let (json, group_only) = match cli_command {\n\n SubCommand::Status { json, group } => (*json, group.clone()),\n\n SubCommand::FormatStatus { group } => (false, group.clone()),\n\n _ => panic!(\"Got wrong Subcommand {cli_command:?} in print_state. This shouldn't happen!\"),\n\n };\n\n\n\n // If the json flag is specified, print the state as json and exit.\n\n if json {\n\n println!(\"{}\", serde_json::to_string(&state).unwrap());\n\n return;\n\n }\n\n\n\n if let Some(group) = group_only {\n\n print_single_group(state, tasks, settings, colors, group);\n\n return;\n\n }\n\n\n\n print_all_groups(state, tasks, settings, colors);\n\n}\n\n\n", "file_path": "client/display/state.rs", "rank": 55, "score": 194432.85421491947 }, { "content": "/// Return the default config directory for pueue.\n\n/// This follows the XDG specification and uses `XDG_CONFIG_HOME` if it's set.\n\npub fn default_config_directory() -> PathBuf {\n\n if let Ok(path) = std::env::var(\"XDG_CONFIG_HOME\") {\n\n PathBuf::from(path).join(\"pueue\")\n\n } else {\n\n get_home_dir().join(\".config/pueue\")\n\n }\n\n}\n", "file_path": "lib/src/platform/linux/directories.rs", "rank": 56, "score": 194344.47096321563 }, { "content": "pub fn default_config_directory() -> PathBuf {\n\n get_home_dir().join(\"Library/Preferences/pueue\")\n\n}\n", "file_path": "lib/src/platform/apple/directories.rs", "rank": 57, "score": 194331.78095186126 }, { "content": "// Use local data directory since this data doesn't need to be synced.\n\npub fn data_local_dir() -> PathBuf {\n\n dirs::data_local_dir().unwrap_or_else(|| PathBuf::from(\"\\\\\"))\n\n}\n\n\n", "file_path": "lib/src/platform/windows/directories.rs", "rank": 58, "score": 194150.906525113 }, { "content": "fn write_file(blob: String, name: &str, path: &Path) -> Result<(), Error> {\n\n info!(\"Generate {name}.\");\n\n let mut file = File::create(path)\n\n .map_err(|err| Error::IoPathError(path.to_path_buf(), \"creating certificate\", err))?;\n\n\n\n file.write_all(&blob.into_bytes())\n\n .map_err(|err| Error::IoPathError(path.to_path_buf(), \"writing certificate\", err))?;\n\n\n\n #[cfg(not(target_os = \"windows\"))]\n\n {\n\n use std::os::unix::fs::PermissionsExt;\n\n let mut permissions = file\n\n .metadata()\n\n .map_err(|_| Error::CertificateFailure(\"Failed to certificate permission.\".into()))?\n\n .permissions();\n\n permissions.set_mode(0o640);\n\n std::fs::set_permissions(path, permissions)\n\n .map_err(|_| Error::CertificateFailure(\"Failed to certificate permission.\".into()))?;\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "lib/src/network/certificate.rs", "rank": 59, "score": 193440.13279179588 }, { "content": "/// Invoked when calling `pueue add`.\n\n/// Queues a new task to the state.\n\n/// If the start_immediately flag is set, send a StartMessage to the task handler.\n\npub fn add_task(message: AddMessage, sender: &Sender<Message>, state: &SharedState) -> Message {\n\n let mut state = state.lock().unwrap();\n\n if let Err(message) = ensure_group_exists(&mut state, &message.group) {\n\n return message;\n\n }\n\n\n\n let starting_status = if message.stashed || message.enqueue_at.is_some() {\n\n TaskStatus::Stashed {\n\n enqueue_at: message.enqueue_at,\n\n }\n\n } else {\n\n TaskStatus::Queued\n\n };\n\n\n\n // Ensure that specified dependencies actually exist.\n\n let not_found: Vec<_> = message\n\n .dependencies\n\n .iter()\n\n .filter(|id| !state.tasks.contains_key(id))\n\n .collect();\n", "file_path": "daemon/network/message_handler/add.rs", "rank": 60, "score": 191679.36937399616 }, { "content": "/// Sort given tasks by their groups\n\n/// This is needed to print a table for each group\n\npub fn sort_tasks_by_group(tasks: Vec<Task>) -> BTreeMap<String, Vec<Task>> {\n\n // We use a BTreeMap, since groups should be ordered alphabetically by their name\n\n let mut sorted_task_groups = BTreeMap::new();\n\n for task in tasks.into_iter() {\n\n if !sorted_task_groups.contains_key(&task.group) {\n\n sorted_task_groups.insert(task.group.clone(), Vec::new());\n\n }\n\n sorted_task_groups.get_mut(&task.group).unwrap().push(task);\n\n }\n\n\n\n sorted_task_groups\n\n}\n", "file_path": "client/display/helper.rs", "rank": 61, "score": 190640.68522437493 }, { "content": "/// Read the shared secret from a file.\n\npub fn read_shared_secret(path: &Path) -> Result<Vec<u8>, Error> {\n\n let mut file = File::open(path).map_err(|err| {\n\n Error::IoPathError(\n\n path.to_path_buf(),\n\n \"opening secret file. Did you start the daemon at least once?\",\n\n err,\n\n )\n\n })?;\n\n let mut buffer = Vec::new();\n\n file.read_to_end(&mut buffer)\n\n .map_err(|err| Error::IoPathError(path.to_path_buf(), \"reading secret file\", err))?;\n\n\n\n Ok(buffer)\n\n}\n\n\n", "file_path": "lib/src/network/secret.rs", "rank": 62, "score": 190258.31499879202 }, { "content": "/// Read a PID file and throw an error, if another daemon instance is still running.\n\nfn check_for_running_daemon(pid_path: &Path) -> Result<()> {\n\n info!(\"Placing pid file at {pid_path:?}\");\n\n let mut file =\n\n File::open(&pid_path).map_err(|err| Error::IoError(\"opening pid file\".to_string(), err))?;\n\n let mut pid = String::new();\n\n file.read_to_string(&mut pid)\n\n .map_err(|err| Error::IoError(\"reading pid file\".to_string(), err))?;\n\n\n\n let pid: u32 = pid\n\n .parse()\n\n .context(format!(\"Failed to parse PID from file: {pid_path:?}\"))?;\n\n\n\n if process_exists(pid) {\n\n bail!(\n\n \"Pid file already exists and another daemon seems to be running.\\n\\\n\n Please stop the daemon beforehand or delete the file manually: {pid_path:?}\",\n\n );\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "daemon/pid.rs", "rank": 63, "score": 189396.7313934595 }, { "content": "/// Seek the cursor of the current file to the beginning of the line that's located `amount` newlines\n\n/// from the back of the file.\n\npub fn seek_to_last_lines(file: &mut File, amount: usize) -> Result<(), Error> {\n\n let mut reader = RevBufReader::new(file);\n\n // The position from which the RevBufReader starts reading.\n\n // The file size might change while we're reading the file. Hence we have to save it now.\n\n let start_position = reader\n\n .get_mut()\n\n .seek(SeekFrom::Current(0))\n\n .map_err(|err| Error::IoError(\"seeking to start of file\".to_string(), err))?;\n\n let start_position: i64 = start_position.try_into().map_err(|_| {\n\n Error::Generic(\"Failed to convert start cursor position to i64\".to_string())\n\n })?;\n\n\n\n let mut total_read_bytes: i64 = 0;\n\n let mut found_lines = 0;\n\n\n\n // Read in 4KB chunks until there's either nothing left or we found `amount` newline characters.\n\n 'outer: loop {\n\n let mut buffer = vec![0; 4096];\n\n let read_bytes = reader\n\n .read(&mut buffer)\n", "file_path": "lib/src/log.rs", "rank": 64, "score": 189138.6864897114 }, { "content": "pub fn default_runtime_directory() -> Option<PathBuf> {\n\n None\n\n}\n\n\n", "file_path": "lib/src/platform/apple/directories.rs", "rank": 65, "score": 187675.7249301073 }, { "content": "/// Try to find `XDG_RUNTIME_DIR` in the environment.\n\npub fn default_runtime_directory() -> Option<PathBuf> {\n\n if let Ok(path) = std::env::var(\"XDG_RUNTIME_DIR\") {\n\n Some(expand_home(&PathBuf::from(path)))\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "lib/src/platform/linux/directories.rs", "rank": 66, "score": 187675.7249301073 }, { "content": "pub fn get_config_directories() -> Vec<PathBuf> {\n\n vec![\n\n // Windows Terminal stores its config file in the \"AppData/Local\" directory.\n\n default_config_directory(),\n\n PathBuf::from(\".\"),\n\n ]\n\n}\n", "file_path": "lib/src/platform/windows/directories.rs", "rank": 67, "score": 187491.08313861754 }, { "content": "/// Check, whether a specific process is exists or not\n\npub fn process_exists(pid: u32) -> bool {\n\n Path::new(&format!(\"/proc/{pid}\")).exists()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::thread::sleep;\n\n use std::time::Duration;\n\n\n\n /// Assert that certain process id no longer exists\n\n fn process_is_gone(pid: u32) -> bool {\n\n !process_exists(pid)\n\n }\n\n\n\n #[test]\n\n /// Simply check, whether spawning of a shell command works\n\n fn test_spawn_command() {\n\n let mut child = compile_shell_command(\"sleep 0.1\")\n\n .spawn()\n", "file_path": "daemon/platform/apple/process_helper.rs", "rank": 68, "score": 186063.69944790044 }, { "content": "/// Check, whether a specific process is exists or not\n\npub fn process_exists(pid: u32) -> bool {\n\n match Process::new(pid as i32) {\n\n Ok(process) => process.is_alive(),\n\n Err(_) => false,\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::thread::sleep;\n\n use std::time::Duration;\n\n\n\n use pretty_assertions::assert_eq;\n\n\n\n use super::*;\n\n\n\n /// Assert that certain process id no longer exists\n\n fn process_is_gone(pid: i32) -> bool {\n\n !process_exists(pid as u32)\n\n }\n", "file_path": "daemon/platform/linux/process_helper.rs", "rank": 69, "score": 186063.69944790044 }, { "content": "/// This is invoked, whenever a task is actually restarted (in-place) without creating a new task.\n\n/// Update a possibly changed path/command and reset all infos from the previous run.\n\n///\n\n/// The \"not in-place\" restart functionality is actually just a copy the finished task + create a\n\n/// new task, which is completely handled on the client-side.\n\nfn restart(state: &mut MutexGuard<State>, to_restart: &TasksToRestart, stashed: bool) {\n\n // Check if we actually know this task.\n\n let task = if let Some(task) = state.tasks.get_mut(&to_restart.task_id) {\n\n task\n\n } else {\n\n return;\n\n };\n\n\n\n // We cannot restart tasks that haven't finished yet.\n\n if !task.is_done() {\n\n return;\n\n }\n\n\n\n // Either enqueue the task or stash it.\n\n task.status = if stashed {\n\n TaskStatus::Stashed { enqueue_at: None }\n\n } else {\n\n TaskStatus::Queued\n\n };\n\n\n\n // Update command and path.\n\n task.original_command = to_restart.command.clone();\n\n task.command = insert_alias(to_restart.command.clone());\n\n task.path = to_restart.path.clone();\n\n\n\n // Reset all variables of any previous run.\n\n task.start = None;\n\n task.end = None;\n\n}\n", "file_path": "daemon/network/message_handler/restart.rs", "rank": 70, "score": 185212.21403624234 }, { "content": "/// Windowsspecific cleanup handling when getting a SIGINT/SIGTERM.\n\npub fn socket_cleanup(_settings: &Shared) -> Result<(), Error> {\n\n Ok(())\n\n}\n\n\n\n/// This is a helper struct for TCP connections.\n\n/// TCP should always be used in conjunction with TLS.\n\n/// That's why this helper exists, which encapsulates the logic of accepting a new\n\n/// connection and initializing the TLS layer on top of it.\n\n/// This way we can expose an `accept` function and implement the GenericListener trait.\n\npub struct TlsTcpListener {\n\n tcp_listener: TcpListener,\n\n tls_acceptor: TlsAcceptor,\n\n}\n\n\n\n/// A new trait, which can be used to represent Unix- and TcpListeners.\n\n/// This is necessary to easily write generic functions where both types can be used.\n", "file_path": "lib/src/network/platform/windows/socket.rs", "rank": 71, "score": 182434.25802053392 }, { "content": "pub fn compile_shell_command(command_string: &str) -> Command {\n\n // Chain two `powershell` commands, one that sets the output encoding to utf8 and then the user provided one.\n\n let mut command = Command::new(\"powershell\");\n\n command.arg(\"-c\").arg(format!(\n\n \"[Console]::OutputEncoding = [Text.UTF8Encoding]::UTF8; {}\",\n\n command_string\n\n ));\n\n\n\n command\n\n}\n\n\n", "file_path": "daemon/platform/windows/process_helper.rs", "rank": 72, "score": 181961.90654453533 }, { "content": "/// The daemon didn't send any log output, thereby we didn't request any.\n\n/// If that's the case, read the log file from the local pueue directory.\n\npub fn print_local_log(task_id: usize, colors: &Colors, settings: &Settings, lines: Option<usize>) {\n\n let mut file = match get_log_file_handle(task_id, &settings.shared.pueue_directory()) {\n\n Ok(file) => file,\n\n Err(err) => {\n\n println!(\"Failed to get log file handle: {err}\");\n\n return;\n\n }\n\n };\n\n // Stdout handler to directly write log file output to io::stdout\n\n // without having to load anything into memory.\n\n let mut stdout = io::stdout();\n\n\n\n print_local_file(\n\n &mut stdout,\n\n &mut file,\n\n &lines,\n\n style_text(\"output:\", Some(colors.green()), Some(Attribute::Bold)),\n\n );\n\n}\n\n\n", "file_path": "client/display/log/local.rs", "rank": 73, "score": 180980.4522519639 }, { "content": "/// Check if there exists an alias for a given command.\n\n/// Only the first word will be replaced.\n\npub fn insert_alias(command: String) -> String {\n\n let first = match command.split_whitespace().next() {\n\n Some(first) => first,\n\n None => return command,\n\n };\n\n\n\n let aliases = match get_aliases() {\n\n Err(err) => {\n\n warn!(\"Failed to open aliases file: {err}\");\n\n return command;\n\n }\n\n Ok(aliases) => aliases,\n\n };\n\n\n\n for (original, alias) in aliases.iter() {\n\n if original == first {\n\n return command.replacen(original, alias, 1);\n\n }\n\n }\n\n\n\n command\n\n}\n", "file_path": "lib/src/aliasing.rs", "rank": 74, "score": 180052.0256025708 }, { "content": "pub fn get_shared_settings() -> (Shared, TempDir) {\n\n // Create a temporary directory used for testing.\n\n let tempdir = TempDir::new(\"pueue_lib\").unwrap();\n\n let tempdir_path = tempdir.path();\n\n\n\n std::fs::create_dir(tempdir_path.join(\"certs\")).unwrap();\n\n\n\n let shared_settings = Shared {\n\n pueue_directory: Some(tempdir_path.to_path_buf()),\n\n runtime_directory: Some(tempdir_path.to_path_buf()),\n\n #[cfg(not(target_os = \"windows\"))]\n\n use_unix_socket: true,\n\n #[cfg(not(target_os = \"windows\"))]\n\n unix_socket_path: None,\n\n pid_path: None,\n\n host: \"localhost\".to_string(),\n\n port: pick_unused_port()\n\n .expect(\"There should be a free port\")\n\n .to_string(),\n\n daemon_cert: Some(tempdir_path.join(\"certs\").join(\"daemon.cert\")),\n\n daemon_key: Some(tempdir_path.join(\"certs\").join(\"daemon.key\")),\n\n shared_secret_path: Some(tempdir_path.join(\"secret\")),\n\n };\n\n\n\n (shared_settings, tempdir)\n\n}\n", "file_path": "lib/tests/helper.rs", "rank": 75, "score": 176146.02004279138 }, { "content": "/// Invoked when calling `pueue status`.\n\n/// Return the current state.\n\nfn get_status(state: &SharedState) -> Message {\n\n let state = state.lock().unwrap().clone();\n\n Message::StatusResponse(Box::new(state))\n\n}\n\n\n", "file_path": "daemon/network/message_handler/mod.rs", "rank": 76, "score": 175918.2120700522 }, { "content": "/// Send a signal to one of Pueue's child process handles.\n\n///\n\n/// There are two scenarios:\n\n///\n\n/// **Normal case**\n\n///\n\n/// A task, such as `sleep 60` get's spawned by the posix shell `sh`.\n\n/// This results in the process `sh -c 'sleep 60'`.\n\n/// Since the posix shell doesn't propagate any process signals to its children, we have to:\n\n/// 1. Send the signal to the shell.\n\n/// 2. Send the signal directly to the children.\n\n/// In our case this would be the `sleep 60` child process.\n\n///\n\n/// If the user also want's to send the signal to all child processes of the task,\n\n/// we have to get all child-processes of the child process.\n\n///\n\n/// **Special case**\n\n///\n\n/// The posix shell `sh` has some some inconsistent behavior.\n\n/// In some circumstances and environments, the `sh -c $command` doesn't spawn a `sh` process with a\n\n/// `$command` child-process, but rather spawns the `$command` as a top-level process directly.\n\n///\n\n/// This makes things a bit more complicated, since we have to find out whether a shell is spawned\n\n/// or not. If a shell is spawned, we do the **Normal case** handling.\n\n///\n\n/// If **no** shell is spawned, we have to send the signal to the top-level process only.\n\n///\n\n/// If the user also want's to send the signal to all child processes of the task,\n\n/// we have to get all child-processes of that `$command` process. and send them the signal.\n\n///\n\n/// Returns `Ok(true)`, if everything went alright\n\n/// Returns `Ok(false)`, if the process went away while we tried to send the signal.\n\npub fn send_signal_to_child(child: &Child, signal: Signal, send_to_children: bool) -> Result<bool> {\n\n let pid: i32 = child.id().try_into().unwrap();\n\n // Check whether this process actually spawned a shell.\n\n let is_shell = if let Ok(is_shell) = did_process_spawn_shell(pid) {\n\n is_shell\n\n } else {\n\n return Ok(false);\n\n };\n\n\n\n if is_shell {\n\n // If it's a shell, we have to send the signal to the actual shell and to all it's children.\n\n // There might be multiple children, for instance, when users use the `&` operator.\n\n // If the `send_to_children` flag is given, the\n\n\n\n // Get all children before sending the signal to the parent process.\n\n // Otherwise the parent might go away and we'll no longer be able to access the children.\n\n let shell_children = get_child_processes(pid);\n\n\n\n // Send the signal to the shell, don't propagate to its children yet.\n\n send_signal_to_process(pid, signal, false)?;\n", "file_path": "daemon/platform/linux/process_helper.rs", "rank": 77, "score": 174170.28068838466 }, { "content": "/// This is a simple small helper function with the purpose of easily styling text,\n\n/// while also prevent styling if we're printing to a non-tty output.\n\n/// If there's any kind of styling in the code, it should be done with the help of this function.\n\npub fn style_text<T: ToString>(\n\n text: T,\n\n color: Option<Color>,\n\n attribute: Option<Attribute>,\n\n) -> String {\n\n let text = text.to_string();\n\n // No tty, we aren't allowed to do any styling\n\n if !stdout().is_tty() {\n\n return text;\n\n }\n\n\n\n let mut styled = style(text);\n\n if let Some(color) = color {\n\n styled = styled.with(color);\n\n }\n\n if let Some(attribute) = attribute {\n\n styled = styled.attribute(attribute);\n\n }\n\n\n\n styled.to_string()\n\n}\n\n\n", "file_path": "client/display/helper.rs", "rank": 78, "score": 172275.58181042256 }, { "content": "pub fn print_groups(message: GroupResponseMessage, colors: &Colors) {\n\n let mut text = String::new();\n\n let mut group_iter = message.groups.iter().peekable();\n\n while let Some((name, group)) = group_iter.next() {\n\n let styled = get_group_headline(name, group, colors);\n\n\n\n text.push_str(&styled);\n\n if group_iter.peek().is_some() {\n\n text.push('\\n');\n\n }\n\n }\n\n println!(\"{}\", text);\n\n}\n", "file_path": "client/display/group.rs", "rank": 79, "score": 171057.50831717544 }, { "content": "/// Remove all files in the log directory.\n\npub fn reset_task_log_directory(path: &Path) -> Result<(), Error> {\n\n let task_log_dir = path.join(\"task_logs\");\n\n\n\n let files = read_dir(&task_log_dir)\n\n .map_err(|err| Error::IoPathError(task_log_dir, \"reading task log files\", err))?;\n\n\n\n for file in files.flatten() {\n\n if let Err(err) = remove_file(file.path()) {\n\n error!(\"Failed to delete log file: {err}\");\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n/// Read the last `amount` lines of a file to a string.\n\n///\n\n/// Only use this for logic that doesn't stream from daemon to client!\n\n/// For streaming logic use the `seek_to_last_lines` and compress any data.\n\n// We allow this clippy check.\n\n// The iterators cannot be chained, as RevBufReader.lines doesn't implement the necessary traits.\n", "file_path": "lib/src/log.rs", "rank": 80, "score": 169599.32166987943 }, { "content": "/// This is the base setup for all daemon test setups.\n\npub fn daemon_base_setup() -> Result<(Settings, TempDir)> {\n\n // Create a temporary directory used for testing.\n\n let tempdir = TempDir::new().unwrap();\n\n let tempdir_path = tempdir.path();\n\n\n\n std::fs::create_dir(tempdir_path.join(\"certs\")).unwrap();\n\n\n\n let shared = Shared {\n\n pueue_directory: Some(tempdir_path.to_path_buf()),\n\n runtime_directory: Some(tempdir_path.to_path_buf()),\n\n #[cfg(not(target_os = \"windows\"))]\n\n use_unix_socket: true,\n\n #[cfg(not(target_os = \"windows\"))]\n\n unix_socket_path: None,\n\n pid_path: None,\n\n host: \"localhost\".to_string(),\n\n port: \"51230\".to_string(),\n\n daemon_cert: Some(tempdir_path.join(\"certs\").join(\"daemon.cert\")),\n\n daemon_key: Some(tempdir_path.join(\"certs\").join(\"daemon.key\")),\n\n shared_secret_path: Some(tempdir_path.join(\"secret\")),\n", "file_path": "tests/fixtures/daemon.rs", "rank": 81, "score": 168190.55419800428 }, { "content": "/// Return the contents of the alias file, if it exists and can be parsed. \\\n\n/// The file has to be located in `pueue_directory` and named `pueue_aliases.yml`.\n\npub fn get_aliases() -> Result<HashMap<String, String>, Error> {\n\n // Go through all config directories and check for a alias file.\n\n let mut alias_file_path = None;\n\n for directory in get_config_directories() {\n\n let path = directory.join(\"pueue_aliases.yml\");\n\n if path.exists() {\n\n alias_file_path = Some(path);\n\n }\n\n }\n\n\n\n // Return early if we cannot find the file\n\n let path = match alias_file_path {\n\n None => {\n\n info!(\"Didn't find pueue alias file.\");\n\n return Ok(HashMap::new());\n\n }\n\n Some(alias_file_path) => alias_file_path,\n\n };\n\n\n\n // Read the file content\n", "file_path": "lib/src/aliasing.rs", "rank": 82, "score": 167489.26120514737 }, { "content": "pub fn send_internal_signal_to_child(\n\n _child: &Child,\n\n _signal: InternalSignal,\n\n _send_to_children: bool,\n\n) -> Result<bool> {\n\n bail!(\"Trying to send unix signal on a windows machine. This isn't supported.\");\n\n}\n\n\n", "file_path": "daemon/platform/windows/process_helper.rs", "rank": 83, "score": 166872.04574741307 }, { "content": "/// This the default certificates at the default `pueue_dir/certs` location.\n\npub fn create_certificates(shared_settings: &Shared) -> Result<(), Error> {\n\n let daemon_cert_path = shared_settings.daemon_cert();\n\n let daemon_key_path = shared_settings.daemon_key();\n\n\n\n if daemon_key_path.exists() || daemon_cert_path.exists() {\n\n if !(daemon_key_path.exists() && daemon_cert_path.exists()) {\n\n return Err(Error::CertificateFailure(\n\n \"Not all default certificates exist, some are missing. \\\n\n Please fix your cert/key paths.\\n \\\n\n You can also remove the `$pueue_directory/certs` directory \\\n\n and restart the daemon to create new certificates/keys.\"\n\n .into(),\n\n ));\n\n }\n\n info!(\"All default keys do exist.\");\n\n return Ok(());\n\n }\n\n\n\n let subject_alt_names = vec![\"pueue.local\".to_string(), \"localhost\".to_string()];\n\n\n", "file_path": "lib/src/network/certificate.rs", "rank": 84, "score": 166262.2965632227 }, { "content": "pub fn get_home_dir() -> PathBuf {\n\n dirs::home_dir().unwrap_or_else(|| PathBuf::from(\"/\"))\n\n}\n\n\n", "file_path": "lib/src/platform/apple/directories.rs", "rank": 85, "score": 162077.69043469956 }, { "content": "/// This is a small helper which determines the selection depending on given commandline\n\n/// parameters.\n\n/// If no parameters are given, it returns to the default group.\n\npub fn selection_from_params(\n\n all: bool,\n\n group: &Option<String>,\n\n task_ids: &[usize],\n\n) -> TaskSelection {\n\n if all {\n\n TaskSelection::All\n\n } else if let Some(group) = group {\n\n TaskSelection::Group(group.clone())\n\n } else if !task_ids.is_empty() {\n\n TaskSelection::TaskIds(task_ids.to_owned())\n\n } else {\n\n TaskSelection::Group(PUEUE_DEFAULT_GROUP.into())\n\n }\n\n}\n\n\n\nimpl Client {\n\n /// Connect to the daemon, authorize via secret and return a new initialized Client.\n\n pub async fn new(settings: Settings, opt: CliArguments) -> Result<Self> {\n\n // Connect to daemon and get stream used for communication.\n", "file_path": "client/client.rs", "rank": 86, "score": 160701.12961593014 }, { "content": "fn print_single_group(\n\n state: State,\n\n tasks: Vec<Task>,\n\n settings: &Settings,\n\n colors: &Colors,\n\n group_name: String,\n\n) {\n\n // Sort all tasks by their respective group;\n\n let mut sorted_tasks = sort_tasks_by_group(tasks);\n\n\n\n let group = if let Some(group) = state.groups.get(&group_name) {\n\n group\n\n } else {\n\n eprintln!(\"There exists no group \\\"{group_name}\\\"\");\n\n return;\n\n };\n\n\n\n // Only a single group is requested. Print that group and return.\n\n let tasks = sorted_tasks.entry(group_name.clone()).or_default();\n\n let headline = get_group_headline(&group_name, group, colors);\n\n println!(\"{headline}\");\n\n\n\n // Show a message if the requested group doesn't have any tasks.\n\n if tasks.is_empty() {\n\n println!(\"Task list is empty. Add tasks with `pueue add -g {group_name} -- [cmd]`\");\n\n return;\n\n }\n\n print_table(tasks, colors, settings);\n\n}\n\n\n", "file_path": "client/display/state.rs", "rank": 87, "score": 159740.5980051558 }, { "content": "/// Remove old logs that aren't needed any longer.\n\nfn rotate_state(state: &LockedState) -> Result<()> {\n\n let path = state.settings.shared.pueue_directory().join(\"log\");\n\n\n\n // Get all log files in the directory with their respective system time.\n\n let mut entries: BTreeMap<SystemTime, PathBuf> = BTreeMap::new();\n\n let mut directory_list = fs::read_dir(path)?;\n\n while let Some(Ok(entry)) = directory_list.next() {\n\n let path = entry.path();\n\n\n\n let metadata = entry.metadata()?;\n\n let time = metadata.modified()?;\n\n entries.insert(time, path);\n\n }\n\n\n\n // Remove all files above the threshold.\n\n // Old files are removed first (implictly by the BTree order).\n\n let mut number_entries = entries.len();\n\n let mut iter = entries.iter();\n\n while number_entries > 10 {\n\n if let Some((_, path)) = iter.next() {\n\n fs::remove_file(path)?;\n\n number_entries -= 1;\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "daemon/state_helper.rs", "rank": 88, "score": 157202.30165790676 }, { "content": "pub fn get_config_directories() -> Vec<PathBuf> {\n\n vec![default_config_directory(), PathBuf::from(\".\")]\n\n}\n\n\n", "file_path": "lib/src/platform/apple/directories.rs", "rank": 89, "score": 156287.3389679237 }, { "content": "pub fn get_config_directories() -> Vec<PathBuf> {\n\n vec![default_config_directory(), PathBuf::from(\".\")]\n\n}\n\n\n", "file_path": "lib/src/platform/linux/directories.rs", "rank": 90, "score": 156287.3389679237 }, { "content": "pub fn compile_shell_command(command_string: &str) -> Command {\n\n let mut command = Command::new(\"sh\");\n\n command.arg(\"-c\").arg(command_string);\n\n\n\n command\n\n}\n\n\n", "file_path": "daemon/platform/apple/process_helper.rs", "rank": 91, "score": 151573.78407083877 }, { "content": "pub fn compile_shell_command(command_string: &str) -> Command {\n\n let mut command = Command::new(\"sh\");\n\n command.arg(\"-c\").arg(command_string);\n\n\n\n command\n\n}\n\n\n", "file_path": "daemon/platform/linux/process_helper.rs", "rank": 92, "score": 151573.78407083877 }, { "content": "// Log output is send in a compressed form from the daemon.\n\n// We have to unpack it first.\n\npub fn decompress_log(bytes: Vec<u8>) -> Result<String> {\n\n let mut decoder = FrameDecoder::new(&bytes[..]);\n\n let mut output = String::new();\n\n decoder\n\n .read_to_string(&mut output)\n\n .context(\"Failed to decompress remote log output\")?;\n\n\n\n Ok(output)\n\n}\n\n\n\n/// Convenience function to get the log of a specific task.\n\n/// `lines: None` requests all log lines.\n\npub async fn get_task_log(shared: &Shared, task_id: usize, lines: Option<usize>) -> Result<String> {\n\n let message = Message::Log(LogRequestMessage {\n\n task_ids: vec![task_id],\n\n send_logs: true,\n\n lines,\n\n });\n\n let response = send_message(shared, message).await?;\n\n\n", "file_path": "tests/helper/log.rs", "rank": 93, "score": 150770.03960105428 }, { "content": "/// Configure the server using rusttls. \\\n\n/// A TLS server needs a certificate and a fitting private key.\n\npub fn get_tls_listener(settings: &Shared) -> Result<TlsAcceptor, Error> {\n\n // Set the server-side key and certificate that should be used for all communication.\n\n let certs = load_certs(&settings.daemon_cert())?;\n\n let key = load_key(&settings.daemon_key())?;\n\n\n\n let config = ServerConfig::builder()\n\n .with_safe_default_cipher_suites()\n\n .with_safe_default_kx_groups()\n\n .with_safe_default_protocol_versions()\n\n .expect(\"Couldn't enforce TLS1.2 and TLS 1.3. This is a bug.\")\n\n .with_no_client_auth()\n\n .with_single_cert(certs, key)\n\n .map_err(|err| Error::CertificateFailure(format!(\"Failed to build TLS Acceptor: {err}\")))?;\n\n\n\n Ok(TlsAcceptor::from(Arc::new(config)))\n\n}\n\n\n", "file_path": "lib/src/network/tls.rs", "rank": 94, "score": 147651.22441367648 }, { "content": "/// Print some tasks into a nicely formatted table\n\nfn print_table(tasks: &[Task], colors: &Colors, settings: &Settings) {\n\n let (has_delayed_tasks, has_dependencies, has_labels) = has_special_columns(tasks);\n\n\n\n // Create table header row\n\n let mut headers = vec![Cell::new(\"Id\"), Cell::new(\"Status\")];\n\n\n\n if has_delayed_tasks {\n\n headers.push(Cell::new(\"Enqueue At\"));\n\n }\n\n if has_dependencies {\n\n headers.push(Cell::new(\"Deps\"));\n\n }\n\n if has_labels {\n\n headers.push(Cell::new(\"Label\"));\n\n }\n\n\n\n headers.append(&mut vec![\n\n Cell::new(\"Command\"),\n\n Cell::new(\"Path\"),\n\n Cell::new(\"Start\"),\n", "file_path": "client/display/state.rs", "rank": 95, "score": 146978.2277224411 }, { "content": "/// Print the log ouput of finished tasks.\n\n/// Either print the logs of every task\n\n/// or only print the logs of the specified tasks.\n\npub fn print_logs(\n\n mut task_logs: BTreeMap<usize, TaskLogMessage>,\n\n cli_command: &SubCommand,\n\n colors: &Colors,\n\n settings: &Settings,\n\n) {\n\n // Get actual commandline options.\n\n // This is necessary to know how we should display/return the log information.\n\n let (json, task_ids, lines, full) = match cli_command {\n\n SubCommand::Log {\n\n json,\n\n task_ids,\n\n lines,\n\n full,\n\n } => (*json, task_ids.clone(), *lines, *full),\n\n _ => panic!(\"Got wrong Subcommand {cli_command:?} in print_log. This shouldn't happen\"),\n\n };\n\n\n\n let lines = determine_log_line_amount(full, &lines);\n\n\n", "file_path": "client/display/log/mod.rs", "rank": 96, "score": 144114.53289938293 }, { "content": "/// Unix specific cleanup handling when getting a SIGINT/SIGTERM.\n\npub fn socket_cleanup(settings: &Shared) -> Result<(), std::io::Error> {\n\n // Clean up the unix socket if we're using it and it exists.\n\n if settings.use_unix_socket && PathBuf::from(&settings.unix_socket_path()).exists() {\n\n std::fs::remove_file(&settings.unix_socket_path())?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n\n/// A new trait, which can be used to represent Unix- and TcpListeners. \\\n\n/// This is necessary to easily write generic functions where both types can be used.\n", "file_path": "lib/src/network/platform/unix/socket.rs", "rank": 97, "score": 143499.49653780053 }, { "content": "/// Return the output of a task. \\\n\n/// Task output is compressed using [snap] to save some memory and bandwidth.\n\npub fn read_and_compress_log_file(\n\n task_id: usize,\n\n path: &Path,\n\n lines: Option<usize>,\n\n) -> Result<Vec<u8>, Error> {\n\n let mut file = get_log_file_handle(task_id, path)?;\n\n\n\n let mut content = Vec::new();\n\n\n\n // Move the cursor to the last few lines of both files.\n\n if let Some(lines) = lines {\n\n seek_to_last_lines(&mut file, lines)?;\n\n }\n\n\n\n // Compress the full log input and pipe it into the snappy compressor\n\n {\n\n let mut compressor = FrameEncoder::new(&mut content);\n\n io::copy(&mut file, &mut compressor)\n\n .map_err(|err| Error::IoError(\"compressing log output\".to_string(), err))?;\n\n }\n\n\n\n Ok(content)\n\n}\n\n\n", "file_path": "lib/src/log.rs", "rank": 98, "score": 142064.5973552272 }, { "content": "/// Initialize all directories needed for normal operation.\n\nfn init_directories(pueue_dir: &Path) -> Result<()> {\n\n // Pueue base path\n\n if !pueue_dir.exists() {\n\n create_dir_all(&pueue_dir).map_err(|err| {\n\n Error::IoPathError(pueue_dir.to_path_buf(), \"creating main directory\", err)\n\n })?;\n\n }\n\n\n\n // Task log dir\n\n let log_dir = pueue_dir.join(\"log\");\n\n if !log_dir.exists() {\n\n create_dir_all(&log_dir)\n\n .map_err(|err| Error::IoPathError(log_dir, \"creating log directory\", err))?;\n\n }\n\n\n\n // Task certs dir\n\n let certs_dir = pueue_dir.join(\"certs\");\n\n if !certs_dir.exists() {\n\n create_dir_all(&certs_dir)\n\n .map_err(|err| Error::IoPathError(certs_dir, \"creating certificate directory\", err))?;\n", "file_path": "daemon/lib.rs", "rank": 99, "score": 142018.42868512106 } ]
Rust
capi/src/optimizer.rs
shinolab/acoustic-field-calculator
620c363c5003cdff085fe41db10ebfbddd25153d
/* * File: optimizer.rs * Project: src * Created Date: 22/09/2020 * Author: Shun Suzuki * ----- * Last Modified: 17/06/2021 * Modified By: Shun Suzuki (suzuki@hapis.k.u-tokyo.ac.jp) * ----- * Copyright (c) 2020 Hapis Lab. All rights reserved. * */ use libc::c_char; use std::ffi::c_void; use std::ffi::CStr; use std::mem::forget; use super::type_inference_aux::*; use acoustic_field_calculator::prelude::*; use acoustic_field_optimizer::multiple_foci::*; use acoustic_field_optimizer::*; macro_rules! gen_match_src_type { ([$($src_type:ident),*], $st: ident, $handle: ident, $expr: expr) => { match SourceType::from_i32($st) { $(SourceType::$src_type => { let mut system: Box<UniformSystem<$src_type>> = Box::from_raw($handle as *mut _); $expr.optimize(&mut system); forget(system); },)* } }; ($st: ident, $handle: ident, $expr: expr) => { sources!(gen_match_src_type; $st, $handle, $expr) } } #[no_mangle] #[allow(improper_ctypes_definitions)] pub unsafe extern "C" fn AFO_FocalPoint(handle: *mut c_void, point: Vector3, source_type: i32) { gen_match_src_type!(source_type, handle, FocalPoint::new(point)); } #[no_mangle] #[allow(improper_ctypes_definitions)] pub unsafe extern "C" fn AFO_BesselBeam( handle: *mut c_void, point: Vector3, dir: Vector3, theta: f32, source_type: i32, ) { gen_match_src_type!(source_type, handle, BesselBeam::new(point, dir, theta)); } #[no_mangle] #[allow(improper_ctypes_definitions)] pub unsafe extern "C" fn AFO_IFFT( handle: *mut c_void, path: *const c_char, bottom_left: Vector3, top_left: Vector3, bottom_right: Vector3, spacing: f32, z: f32, source_type: i32, ) { let path = CStr::from_ptr(path).to_str().unwrap(); gen_match_src_type!( source_type, handle, Ifft::new(path, bottom_left, top_left, bottom_right, spacing, z) ); } #[no_mangle] pub unsafe extern "C" fn AFO_GSPAT( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!( source_type, handle, Gspat::new(foci.to_vec(), amps.to_vec()) ); } #[no_mangle] pub unsafe extern "C" fn AFO_GS( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!(source_type, handle, GS::new(foci.to_vec(), amps.to_vec())); } #[no_mangle] pub unsafe extern "C" fn AFO_Naive( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!( source_type, handle, Naive::new(foci.to_vec(), amps.to_vec()) ); } #[no_mangle] pub unsafe extern "C" fn AFO_Horn( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!(source_type, handle, Horn::new(foci.to_vec(), amps.to_vec())); } #[no_mangle] pub unsafe extern "C" fn AFO_Long( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!(source_type, handle, Long::new(foci.to_vec(), amps.to_vec())); } #[no_mangle] pub unsafe extern "C" fn AFO_APO( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, lambda: f32, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!( source_type, handle, Apo::new(foci.to_vec(), amps.to_vec(), lambda) ); } #[no_mangle] pub unsafe extern "C" fn AFO_GaussNewton( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!( source_type, handle, GaussNewton::new(foci.to_vec(), amps.to_vec()) ); } #[no_mangle] pub unsafe extern "C" fn AFO_GradientDescent( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!( source_type, handle, GradientDescent::new(foci.to_vec(), amps.to_vec()) ); } #[no_mangle] pub unsafe extern "C" fn AFO_LM( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!(source_type, handle, LM::new(foci.to_vec(), amps.to_vec())); } #[no_mangle] pub unsafe extern "C" fn AFO_Greedy( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, phase_div: u64, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!( source_type, handle, Greedy::new(foci.to_vec(), amps.to_vec(), phase_div as usize) ); }
/* * File: optimizer.rs * Project: src * Created Date: 22/09/2020 * Author: Shun Suzuki * ----- * Last Modified: 17/06/2021 * Modified By: Shun Suzuki (suzuki@hapis.k.u-tokyo.ac.jp) * ----- * Copyright (c) 2020 Hapis Lab. All rights reserved. * */ use libc::c_char; use std::ffi::c_void; use std::ffi::CStr; use std::mem::forget; use super::type_inference_aux::*; use acoustic_field_calculator::prelude::*; use acoustic_field_optimizer::multiple_foci::*; use acoustic_field_optimizer::*; macro_rules! gen_match_src_type { ([$($src_type:ident),*], $st: ident, $handle: ident, $expr: expr) => { match SourceType::from_i32($st) { $(SourceType::$src_type => { let mut system: Box<UniformSystem<$src_type>> = Box::from_raw($handle as *mut _); $expr.optimize(&mut system); forget(system); },)* } }; ($st: ident, $handle: ident, $expr: expr) => { sources!(gen_match_src_type; $st, $handle, $expr) } } #[no_mangle] #[allow(improper_ctypes_definitions)] pub unsafe extern "C" fn AFO_FocalPoint(handle: *mut c_void, point: Vector3, source_type: i32) { gen_match_src_type!(source_type, handle, FocalPoint::new(point)); } #[no_mangle] #[allow(improper_ctypes_definitions)] pub unsafe extern "C" fn AFO_BesselBeam( handle: *mut c_void, point: Vector3, dir: Vector3, theta: f32, source_type: i32, ) { gen_match_src_type!(source_type, handle, BesselBeam::new(point, dir, theta)); } #[no_mangle] #[allow(improper_ctypes_definitions)] pub unsafe extern "C" fn AFO_IFFT( handle: *mut c_void, path: *const c_char, bottom_left: Vector3, top_left: Vector3, bottom_right: Vector3, spacing: f32, z: f32, source_type: i32, ) { let path = CStr::from_ptr(path).to_str().unwrap(); gen_match_src_type!( source_type, handle, Ifft::new(path, bottom_left, top_left, bottom_right, spacing, z) ); } #[no_mangle] pub unsafe extern "C" fn AFO_GSPAT( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, source_type: i32, ) { let len = size as usize; let foc
#[no_mangle] pub unsafe extern "C" fn AFO_GS( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!(source_type, handle, GS::new(foci.to_vec(), amps.to_vec())); } #[no_mangle] pub unsafe extern "C" fn AFO_Naive( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!( source_type, handle, Naive::new(foci.to_vec(), amps.to_vec()) ); } #[no_mangle] pub unsafe extern "C" fn AFO_Horn( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!(source_type, handle, Horn::new(foci.to_vec(), amps.to_vec())); } #[no_mangle] pub unsafe extern "C" fn AFO_Long( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!(source_type, handle, Long::new(foci.to_vec(), amps.to_vec())); } #[no_mangle] pub unsafe extern "C" fn AFO_APO( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, lambda: f32, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!( source_type, handle, Apo::new(foci.to_vec(), amps.to_vec(), lambda) ); } #[no_mangle] pub unsafe extern "C" fn AFO_GaussNewton( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!( source_type, handle, GaussNewton::new(foci.to_vec(), amps.to_vec()) ); } #[no_mangle] pub unsafe extern "C" fn AFO_GradientDescent( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!( source_type, handle, GradientDescent::new(foci.to_vec(), amps.to_vec()) ); } #[no_mangle] pub unsafe extern "C" fn AFO_LM( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!(source_type, handle, LM::new(foci.to_vec(), amps.to_vec())); } #[no_mangle] pub unsafe extern "C" fn AFO_Greedy( handle: *mut c_void, foci: *const c_void, amps: *const f32, size: u64, phase_div: u64, source_type: i32, ) { let len = size as usize; let foci = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!( source_type, handle, Greedy::new(foci.to_vec(), amps.to_vec(), phase_div as usize) ); }
i = std::slice::from_raw_parts(foci as *const Vector3, len); let amps = std::slice::from_raw_parts(amps, len); gen_match_src_type!( source_type, handle, Gspat::new(foci.to_vec(), amps.to_vec()) ); }
function_block-function_prefixed
[ { "content": "pub fn to_vec4(v: Vector3) -> [f32; 4] {\n\n [v[0] as f32, v[1] as f32, v[2] as f32, 0.]\n\n}\n", "file_path": "acoustic-field-calculator/src/gpu/gpu_prelude.rs", "rank": 0, "score": 210100.9783508046 }, { "content": "#[inline(always)]\n\npub fn sub(a: Vector3, b: Vector3) -> Vector3 {\n\n #[cfg(all(feature = \"fmath\", target_arch = \"x86_64\", not(feature = \"accurate\")))]\n\n {\n\n fmath::sub(a, b)\n\n }\n\n #[cfg(not(all(feature = \"fmath\", target_arch = \"x86_64\", not(feature = \"accurate\"))))]\n\n {\n\n a - b\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn sub_test() {\n\n let a = Vector3::new(10., 20., 30.);\n\n let b = Vector3::new(5., 6., 7.);\n\n assert_eq!(sub(a, b), a - b);\n\n }\n\n}\n", "file_path": "acoustic-field-calculator/src/fmath/simd_vec.rs", "rank": 1, "score": 185694.2244007185 }, { "content": "fn fft2d(array: &mut MatrixXcf, w: usize, h: usize) -> MatrixXcf {\n\n for i in 0..h {\n\n let mut planner = FftPlanner::new();\n\n let fft = planner.plan_fft(w, FftDirection::Forward);\n\n fft.process(&mut array.as_mut_slice()[(i * w)..(i * w + w)]);\n\n }\n\n\n\n let mut result = array.transpose();\n\n for i in 0..w {\n\n let mut planner = FftPlanner::new();\n\n let fft = planner.plan_fft(h, FftDirection::Forward);\n\n fft.process(&mut result.as_mut_slice()[(i * h)..(i * h + h)]);\n\n }\n\n\n\n result.transpose()\n\n}\n\n\n\nimpl Optimizer for Ifft {\n\n #[allow(clippy::many_single_char_names)]\n\n #[allow(non_snake_case)]\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/fft/ifft.rs", "rank": 2, "score": 164474.25245616573 }, { "content": "#[allow(non_snake_case)]\n\npub fn make_T(x: &VectorXf, n: usize, m: usize) -> VectorXcf {\n\n VectorXcf::from_iterator(n + m, x.iter().map(|x| Complex::new(0., -x).exp()))\n\n}\n\n\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/nls/macros.rs", "rank": 3, "score": 163890.41410172684 }, { "content": "pub fn to_four_multiple(x: usize) -> usize {\n\n ((x - 1) / 4 + 1) * 4\n\n}\n\n\n", "file_path": "acoustic-field-calculator/src/gpu/gpu_prelude.rs", "rank": 4, "score": 159141.20939171556 }, { "content": "#[allow(non_snake_case)]\n\npub fn calc_Fx(BhB: &MatrixXcf, x: &VectorXf, n: usize, m: usize) -> Float {\n\n let t = VectorXcf::from_iterator(n + m, x.iter().map(|&x| Complex::new(0., x).exp()));\n\n (t.adjoint() * BhB * t)[(0, 0)].re\n\n}\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/nls/macros.rs", "rank": 5, "score": 152948.95287782664 }, { "content": "pub fn propagate<S: WaveSource>(\n\n source: &S,\n\n atten: Float,\n\n wavenum: Float,\n\n target: Vector3,\n\n) -> Complex {\n\n let diff = fmath::sub(target, source.position());\n\n let dist = diff.norm();\n\n let theta = fmath::acos(source.direction().dot(&diff) / dist);\n\n let d = S::directivity(theta);\n\n let r = source.amp() * d * (-dist * atten).exp() / dist;\n\n let phi = source.phase() + wavenum * dist;\n\n Complex::from_polar(r, phi)\n\n}\n\n\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/macros.rs", "rank": 6, "score": 151427.52530565456 }, { "content": "pub fn append_matrix_col(to: MatrixXcf, src: &MatrixXcf) -> MatrixXcf {\n\n assert_eq!(to.nrows(), src.nrows());\n\n\n\n let new_rows = to.nrows();\n\n let to_cols = to.ncols();\n\n let new_cols = to.ncols() + src.ncols();\n\n\n\n let mut new_mat = to.resize(new_rows, new_cols, Default::default());\n\n new_mat\n\n .slice_mut((0, to_cols), (src.nrows(), src.ncols()))\n\n .copy_from(src);\n\n\n\n new_mat\n\n}\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/macros.rs", "rank": 7, "score": 149834.31769574335 }, { "content": "pub fn generate_propagation_matrix<S: WaveSource>(\n\n system: &UniformSystem<S>,\n\n foci: &[Vector3],\n\n) -> MatrixXcf {\n\n let sources = system.wave_sources();\n\n let wavenums = system.wavenums();\n\n let attens = system.attens();\n\n\n\n let m = foci.len();\n\n let n = sources.len();\n\n MatrixXcf::from_iterator(\n\n m,\n\n n,\n\n (0..n)\n\n .map(|i| {\n\n foci.iter()\n\n .map(|&fp| propagate(&sources[i], attens[i], wavenums[i], fp))\n\n .collect::<Vec<_>>()\n\n })\n\n .flatten(),\n\n )\n\n}\n\n\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/macros.rs", "rank": 8, "score": 145885.60063957944 }, { "content": "#[allow(non_snake_case)]\n\npub fn make_BhB<S: WaveSource>(\n\n system: &UniformSystem<S>,\n\n amps: &[Float],\n\n foci: &[Vector3],\n\n m: usize,\n\n) -> MatrixXcf {\n\n let P = MatrixXcf::from_diagonal(&VectorXcf::from_iterator(\n\n m,\n\n amps.iter().map(|a| Complex::new(-a, 0.)),\n\n ));\n\n let G = generate_propagation_matrix(system, foci);\n\n\n\n let B = append_matrix_col(G, &P);\n\n B.adjoint() * B\n\n}\n\n\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/nls/macros.rs", "rank": 9, "score": 143307.90600727202 }, { "content": "fn fftshift(array: &MatrixXcf, w: usize, h: usize) -> MatrixXcf {\n\n let mut result = MatrixXcf::zeros(w, h);\n\n\n\n let half_w = (w + 1) >> 1;\n\n let half_h = (h + 1) >> 1;\n\n\n\n // 2rd to 4th\n\n result\n\n .slice_mut((w - half_w, h - half_h), (half_w, half_h))\n\n .copy_from(&array.slice((0, 0), (half_w, half_h)));\n\n\n\n // 1st to 3rd\n\n result\n\n .slice_mut((0, h - half_h), (w - half_w, half_h))\n\n .copy_from(&array.slice((half_w, 0), (w - half_w, half_h)));\n\n\n\n // 3rd to 1st\n\n result\n\n .slice_mut((w - half_w, 0), (half_w, h - half_h))\n\n .copy_from(&array.slice((0, half_h), (half_w, h - half_h)));\n\n\n\n // 2rd to 4th\n\n result\n\n .slice_mut((0, 0), (w - half_w, h - half_h))\n\n .copy_from(&array.slice((half_w, half_h), (w - half_w, h - half_h)));\n\n\n\n result\n\n}\n\n\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/fft/ifft.rs", "rank": 10, "score": 128706.3641294174 }, { "content": "#[allow(non_snake_case)]\n\npub fn calc_Jtf(BhB: &MatrixXcf, T: &VectorXcf) -> VectorXf {\n\n let TTh = T * T.adjoint();\n\n let BhB_TTh = BhB.component_mul(&TTh);\n\n BhB_TTh.map(|c| c.im).column_sum()\n\n}\n\n\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/nls/macros.rs", "rank": 11, "score": 123343.84206012059 }, { "content": "#[inline(always)]\n\n#[allow(clippy::many_single_char_names)]\n\npub fn exp(x: Float) -> Float {\n\n #[cfg(all(feature = \"fmath\", not(feature = \"accurate\")))]\n\n {\n\n fmath::exp(x)\n\n }\n\n #[cfg(not(all(feature = \"fmath\", not(feature = \"accurate\"))))]\n\n {\n\n x.exp()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use float_cmp::approx_eq;\n\n\n\n const MIN: Float = -104.0;\n\n const MAX: Float = 89.0;\n\n const STEP: Float = 0.01;\n\n const SIZE: usize = ((MAX - MIN) / STEP) as usize;\n\n\n\n #[test]\n\n fn exp_test() {\n\n let _ = (0..SIZE)\n\n .map(|n| MIN + n as Float * STEP)\n\n .map(|x| approx_eq!(Float, x.exp(), exp(x)))\n\n .collect::<Vec<_>>();\n\n }\n\n}\n", "file_path": "acoustic-field-calculator/src/fmath/exp.rs", "rank": 12, "score": 122732.16847851852 }, { "content": "#[inline(always)]\n\npub fn acos(x: Float) -> Float {\n\n #[cfg(all(feature = \"fmath\", not(feature = \"accurate\")))]\n\n {\n\n fmath::acos(x)\n\n }\n\n #[cfg(not(all(feature = \"fmath\", not(feature = \"accurate\"))))]\n\n {\n\n x.acos()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use float_cmp::approx_eq;\n\n\n\n const MIN: Float = -1.0;\n\n const MAX: Float = 1.0;\n\n const STEP: Float = 0.01;\n\n const SIZE: usize = ((MAX - MIN) / STEP) as usize;\n\n\n\n #[test]\n\n fn acos_test() {\n\n let _ = (0..SIZE)\n\n .map(|n| MIN + n as Float * STEP)\n\n .map(|x| approx_eq!(Float, x.acos(), acos(x)))\n\n .collect::<Vec<_>>();\n\n }\n\n}\n", "file_path": "acoustic-field-calculator/src/fmath/trigonometric.rs", "rank": 13, "score": 122732.16847851852 }, { "content": "/// Calculates sound speed from temperature\n\n///\n\n/// # Arguments\n\n///\n\n/// * `t` - Temperature [K]\n\n///\n\npub fn calc_sound_speed(t: Float) -> Float {\n\n 331.3 * (t / 273.15).sqrt() * 1000.0\n\n}\n", "file_path": "acoustic-field-calculator/src/core/sound_speed.rs", "rank": 14, "score": 116061.85329016953 }, { "content": "#[allow(non_snake_case)]\n\npub fn calc_JtJ_Jtf(BhB: &MatrixXcf, T: &VectorXcf) -> (MatrixXf, VectorXf) {\n\n let TTh = T * T.adjoint();\n\n let BhB_TTh = BhB.component_mul(&TTh);\n\n let JtJ = BhB_TTh.map(|c| c.re);\n\n let Jtf = BhB_TTh.map(|c| c.im).column_sum();\n\n (JtJ, Jtf)\n\n}\n\n\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/nls/macros.rs", "rank": 15, "score": 113871.94011671391 }, { "content": "fn append_matrix_row(to: MatrixXcf, src: &MatrixXcf) -> MatrixXcf {\n\n assert_eq!(to.ncols(), src.ncols());\n\n\n\n let new_cols = to.ncols();\n\n let to_rows = to.nrows();\n\n let new_rows = to.nrows() + src.nrows();\n\n\n\n let mut new_mat = to.resize(new_rows, new_cols, Default::default());\n\n new_mat\n\n .slice_mut((to_rows, 0), (src.nrows(), src.ncols()))\n\n .copy_from(src);\n\n\n\n new_mat\n\n}\n\n\n\nimpl Optimizer for Long {\n\n #[allow(non_snake_case)]\n\n #[allow(clippy::many_single_char_names)]\n\n fn optimize<S: WaveSource>(&self, system: &mut UniformSystem<S>) {\n\n for source in system.wave_sources_mut() {\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/matrix/long.rs", "rank": 16, "score": 112592.49256382573 }, { "content": "pub trait SizedArea: ObserveArea {\n\n fn size(&self) -> (u32, u32, u32);\n\n}\n\n\n\nimpl<D> SizedArea for GridArea<D> {\n\n fn size(&self) -> (u32, u32, u32) {\n\n let b = self.bounds();\n\n (b[0] as u32, b[1] as u32, b[2] as u32)\n\n }\n\n}\n\n\n\nimpl SizedArea for ScatterArea {\n\n fn size(&self) -> (u32, u32, u32) {\n\n (self.points().len() as u32, 1, 1)\n\n }\n\n}\n", "file_path": "acoustic-field-calculator/src/gpu/observe_area/traits.rs", "rank": 17, "score": 104341.65246889196 }, { "content": "pub trait WaveSourceContainer<S: WaveSource> {\n\n /// Returns all of the wave sources\n\n fn wave_sources(&self) -> &[S];\n\n\n\n /// Returns all of the wave sources as mutable\n\n fn wave_sources_mut(&mut self) -> &mut Vec<S>;\n\n\n\n /// Add new wave source\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `source` - Wave source\n\n ///\n\n fn add_wave_source(&mut self, source: S);\n\n\n\n /// Add new wave sources\n\n ///\n\n /// # Arguments\n\n ///\n\n /// * `sources` - A vector of wave sources\n\n ///\n\n fn add_wave_sources(&mut self, sources: Vec<S>) {\n\n for source in sources {\n\n self.add_wave_source(source)\n\n }\n\n }\n\n}\n\n\n", "file_path": "acoustic-field-calculator/src/system/traits.rs", "rank": 18, "score": 101903.65778733464 }, { "content": "fn main() {\n\n let array_center = Vector3::new(\n\n TRANS_SIZE * (NUM_TRANS_X - 1) as Float / 2.0,\n\n TRANS_SIZE * (NUM_TRANS_Y - 1) as Float / 2.0,\n\n 0.,\n\n );\n\n let z = 150.0;\n\n let focal_pos = array_center + z * Vector3::z();\n\n\n\n let mut system = UniformSystem::new(TEMPERATURE);\n\n for y in 0..NUM_TRANS_Y {\n\n for x in 0..NUM_TRANS_X {\n\n let pos = Vector3::new(TRANS_SIZE * x as Float, TRANS_SIZE * y as Float, 0.);\n\n system.add_wave_source(T4010A1::new(pos, Vector3::z(), 1.0, 0.0, FREQUENCY));\n\n }\n\n }\n\n\n\n let r = 200.0;\n\n let area = GridAreaBuilder::new()\n\n .x_range(array_center[0] - r / 2.0, array_center[0] + r / 2.0)\n", "file_path": "acoustic-field-optimizer/examples/foci.rs", "rank": 19, "score": 97158.9013743735 }, { "content": "/// Returns an attenuation coefficients due to atmospheric absorption in a unit of [Np/mm].\n\n///\n\n/// Bass, Henry E., et al. \"Atmospheric absorption of sound: Further developments.\" The Journal of the Acoustical Society of America 97.1 (1995): 680-683.\n\n///\n\n/// # Arguments\n\n///\n\n/// * `freq` - A frequency of sound [Hz]\n\n/// * `hr` - A relative humidity [%]\n\n/// * `ps` - An atmospheric pressure [atm]\n\n/// * `ps0` - A reference atmospheric pressure [atm]\n\n/// * `t` - An atmospheric temperature [K]\n\n///\n\npub fn attenuation_coef(freq: Float, hr: Float, ps: Float, ps0: Float, t: Float) -> Float {\n\n let ten: Float = 10.0;\n\n let psat = ps0 * ten.powf(-6.8346 * (T01 / t).powf(1.261) + 4.6151);\n\n let h = ps0 * (hr / ps) * (psat / ps0);\n\n let f_ro = (24. + 4.04e4 * h * (0.02 + h) / (0.391 + h)) / ps0;\n\n let f_rn = (1. / ps0)\n\n * (9. + 280. * h * (-4.17 * ((T0 / t).powf(1. / 3.) - 1.)).exp())\n\n * (T0 / t).powf(1. / 2.);\n\n let f = freq / ps;\n\n\n\n let alpha = (f * f) / ps0\n\n * ps\n\n * (1.84 * (t / T0).powf(1. / 2.) * 1e-11\n\n + (t / T0).powf(-5. / 2.)\n\n * (0.01278 * (-2239.1 / t).exp() / (f_ro + f * f / f_ro)\n\n + 0.1068 * (-3352. / t).exp() / (f_rn + f * f / f_rn)));\n\n alpha * 1e-3\n\n}\n", "file_path": "acoustic-field-calculator/src/core/attenuation.rs", "rank": 20, "score": 91327.39751891798 }, { "content": "fn pseudo_inverse_with_reg(m: &MatrixXcf, alpha: Float) -> MatrixXcf {\n\n let svd = m.clone().svd(true, true);\n\n let s_inv = MatrixXcf::from_diagonal(\n\n &svd.singular_values\n\n .map(|s| Complex::new(s / (s * s + alpha * alpha), 0.)),\n\n );\n\n match (&svd.v_t, &svd.u) {\n\n (Some(v_t), Some(u)) => v_t.adjoint() * s_inv * u.adjoint(),\n\n _ => unreachable!(),\n\n }\n\n}\n\n\n\nimpl Optimizer for Horn {\n\n #[allow(clippy::many_single_char_names)]\n\n #[allow(non_snake_case)]\n\n fn optimize<S: WaveSource>(&self, system: &mut UniformSystem<S>) {\n\n for source in system.wave_sources_mut() {\n\n source.set_phase(0.);\n\n }\n\n\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/matrix/horn.rs", "rank": 21, "score": 90761.19042791225 }, { "content": "pub trait PropagationMedium<S: WaveSource>: Sync + WaveSourceContainer<S> {\n\n fn propagate(&self, idx: usize, target: Vector3) -> Complex;\n\n fn propagate_all(&self, target: Vector3) -> Complex;\n\n}\n", "file_path": "acoustic-field-calculator/src/system/traits.rs", "rank": 22, "score": 90067.64821698761 }, { "content": "pub trait GpuPropagationMedium<S: WaveSource>: WaveSourceContainer<S> {\n\n fn wavenums(&self) -> &[f32];\n\n fn attenuations(&self) -> &[f32];\n\n fn directivities(&self) -> Vec<f32>;\n\n}\n", "file_path": "acoustic-field-calculator/src/gpu/system/traits.rs", "rank": 23, "score": 89579.23421667342 }, { "content": "pub trait AccPropagationMedium<S: WaveSource>: Sync + WaveSourceContainer<S> {\n\n fn propagate(&self, target: Vector3) -> Complex;\n\n}\n\n\n\nimpl<S: WaveSource> AccPropagationMedium<S> for UniformSystem<S> {\n\n fn propagate(&self, target: Vector3) -> Complex {\n\n let mut re_heap = BinaryHeap::new_min();\n\n let mut im_heap = BinaryHeap::new_min();\n\n\n\n let sources = self.wave_sources();\n\n let wavenums = self.wavenums();\n\n let attens = self.attens();\n\n\n\n re_heap.reserve(sources.len());\n\n im_heap.reserve(sources.len());\n\n\n\n for i in 0..sources.len() {\n\n let source = &sources[i];\n\n let diff = crate::fmath::sub(target, source.position());\n\n let dist = diff.norm();\n", "file_path": "acoustic-field-calculator/src/accurate/system_acc.rs", "rank": 24, "score": 86794.57093226018 }, { "content": "pub trait Optimizer {\n\n fn optimize<S: WaveSource>(&self, system: &mut UniformSystem<S>);\n\n}\n", "file_path": "acoustic-field-optimizer/src/traits.rs", "rank": 25, "score": 86486.40609282785 }, { "content": "pub trait ObserveArea {\n\n /// Returns all observation points\n\n fn points(&self) -> &[Vector3];\n\n}\n", "file_path": "acoustic-field-calculator/src/observe_area/traits.rs", "rank": 26, "score": 81427.52378409094 }, { "content": "pub trait FieldBuffer<T> {\n\n fn buffer(&self) -> &[T];\n\n fn buffer_mut(&mut self) -> &mut Vec<T>;\n\n fn calc_from_complex_pressure(cp: Complex) -> T;\n\n}\n\n\n", "file_path": "acoustic-field-calculator/src/field_buffer/traits.rs", "rank": 27, "score": 77147.83318855411 }, { "content": "/*\n\n * File: system.rs\n\n * Project: src\n\n * Created Date: 17/11/2020\n\n * Author: Shun Suzuki\n\n * -----\n\n * Last Modified: 19/11/2020\n\n * Modified By: Shun Suzuki (suzuki@hapis.k.u-tokyo.ac.jp)\n\n * -----\n\n * Copyright (c) 2020 Hapis Lab. All rights reserved.\n\n *\n\n */\n\n\n\nuse super::type_inference_aux::SourceType;\n\nuse acoustic_field_calculator::prelude::*;\n\n\n\nuse std::ffi::{c_void, CString};\n\nuse std::mem::forget;\n\nuse std::os::raw::c_char;\n\n\n", "file_path": "capi/src/system.rs", "rank": 28, "score": 72375.5204978647 }, { "content": "}\n\n\n\n#[no_mangle]\n\n#[allow(improper_ctypes_definitions)]\n\npub unsafe extern \"C\" fn AFC_AddT4010A1(handle: *mut c_void, source: T4010A1) {\n\n let mut system: Box<UniformSystem<T4010A1>> = Box::from_raw(handle as *mut _);\n\n system.add_wave_source(source);\n\n forget(system);\n\n}\n\n\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn AFC_GetWaveSources(\n\n handle: *mut c_void,\n\n out: *mut *mut c_void,\n\n source_type: i32,\n\n) -> u64 {\n\n macro_rules! match_src_type {\n\n ($( $src_type:ident),*) => {\n\n match SourceType::from_i32(source_type) {\n\n $(SourceType::$src_type => {\n", "file_path": "capi/src/system.rs", "rank": 29, "score": 72362.9687817943 }, { "content": "}\n\n\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn AFC_FreeUniformSystem(handle: *mut c_void, source_type: i32) {\n\n macro_rules! gen_system {\n\n ($($t:ident ),*) => {\n\n match SourceType::from_i32(source_type) {\n\n $(SourceType::$t => {\n\n let _system: Box<UniformSystem<$t>> = Box::from_raw(handle as *mut _);\n\n },)*\n\n }\n\n }\n\n }\n\n sources!(gen_system);\n\n}\n\n\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn AFC_UniformSystemSoundSpeed(handle: *mut c_void, source_type: i32) -> f32 {\n\n macro_rules! gen_system {\n\n ($($t:ident ),*) => {\n", "file_path": "capi/src/system.rs", "rank": 30, "score": 72361.76348322589 }, { "content": "#[no_mangle]\n\npub unsafe extern \"C\" fn AFC_CreateUniformSystem(\n\n out: *mut *mut c_void,\n\n temperature: f32,\n\n source_type: i32,\n\n) {\n\n macro_rules! gen_system {\n\n ($($t:ident),*) => {\n\n match SourceType::from_i32(source_type) {\n\n $(SourceType::$t => {\n\n let system = UniformSystem::<$t>::new(temperature);\n\n let mut system = Box::new(system);\n\n let ptr = system.as_mut() as *mut _;\n\n forget(system);\n\n *out = ptr as *mut _;\n\n },)*\n\n }\n\n }\n\n }\n\n sources!(gen_system);\n", "file_path": "capi/src/system.rs", "rank": 31, "score": 72357.5208050996 }, { "content": " match SourceType::from_i32(source_type) {\n\n $(SourceType::$t => {\n\n let system: Box<UniformSystem<$t>> = Box::from_raw(handle as *mut _);\n\n let info = system.info();\n\n forget(system);\n\n CString::new(info).unwrap().into_raw()\n\n },)*\n\n }\n\n }\n\n }\n\n sources!(gen_system)\n\n}\n\n\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn AFC_UniformSystemSourceInfo(\n\n handle: *mut c_void,\n\n source_idx: u64,\n\n source_type: i32,\n\n) -> *mut c_char {\n\n macro_rules! gen_system {\n", "file_path": "capi/src/system.rs", "rank": 32, "score": 72355.4641959269 }, { "content": " match SourceType::from_i32(source_type) {\n\n $(SourceType::$t => {\n\n let system: Box<UniformSystem<$t>> = Box::from_raw(handle as *mut _);\n\n let sound_speed = system.sound_speed();\n\n forget(system);\n\n sound_speed\n\n },)*\n\n }\n\n }\n\n }\n\n sources!(gen_system)\n\n}\n\n\n\n#[no_mangle]\n\npub unsafe extern \"C\" fn AFC_UniformSystemInfo(\n\n handle: *mut c_void,\n\n source_type: i32,\n\n) -> *mut c_char {\n\n macro_rules! gen_system {\n\n ($($t:ident ),*) => {\n", "file_path": "capi/src/system.rs", "rank": 33, "score": 72355.39778086869 }, { "content": " ($($t:ident ),*) => {\n\n match SourceType::from_i32(source_type) {\n\n $(SourceType::$t => {\n\n let system: Box<UniformSystem<$t>> = Box::from_raw(handle as *mut _);\n\n let info = system.info_of_source(source_idx as usize);\n\n forget(system);\n\n CString::new(info).unwrap().into_raw()\n\n },)*\n\n }\n\n }\n\n }\n\n sources!(gen_system)\n\n}\n\n\n\n#[no_mangle]\n\n#[allow(improper_ctypes_definitions)]\n\npub unsafe extern \"C\" fn AFC_AddSphereWaveSource(handle: *mut c_void, source: SphereWaveSource) {\n\n let mut system: Box<UniformSystem<SphereWaveSource>> = Box::from_raw(handle as *mut _);\n\n system.add_wave_source(source);\n\n forget(system);\n", "file_path": "capi/src/system.rs", "rank": 34, "score": 72354.53664338621 }, { "content": " let mut system: Box<UniformSystem<$src_type>> = Box::from_raw(handle as *mut _);\n\n let sources = (*system).wave_sources_mut();\n\n let ptr = sources.as_ptr() as *mut $src_type;\n\n let len = sources.len();\n\n forget(system);\n\n *out = ptr as *mut c_void;\n\n len as u64\n\n },)*\n\n }\n\n }\n\n }\n\n sources!(match_src_type)\n\n}\n", "file_path": "capi/src/system.rs", "rank": 35, "score": 72350.62722443826 }, { "content": "pub trait ScalarFieldBuffer<T>: FieldBuffer<T> {\n\n fn max_result(&self) -> T;\n\n}\n\n\n\nimpl<T> ScalarFieldBuffer<Float> for T\n\nwhere\n\n T: FieldBuffer<Float>,\n\n{\n\n fn max_result(&self) -> Float {\n\n self.buffer().iter().fold(Float::NAN, |m, v| v.max(m))\n\n }\n\n}\n\n\n\nimpl<T> ScalarFieldBuffer<Complex> for T\n\nwhere\n\n T: FieldBuffer<Complex>,\n\n{\n\n fn max_result(&self) -> Complex {\n\n self.buffer()\n\n .iter()\n\n .fold(Complex::new(Float::NAN, Float::NAN), |m, &v| -> Complex {\n\n if v.abs() < m.abs() {\n\n m\n\n } else {\n\n v\n\n }\n\n })\n\n }\n\n}\n", "file_path": "acoustic-field-calculator/src/field_buffer/traits.rs", "rank": 36, "score": 67909.91474271116 }, { "content": "/// Calculate field at observe area\n\n///\n\n/// # Arguments\n\n///\n\n/// * `medium` - Propagation medium\n\n/// * `observe_area` - Observation area\n\n/// * `buffer` - Buffer which contains results and define result field type\n\n///\n\npub trait FieldCalculator<S, M, A, O, F> {\n\n fn calculate(&self, medium: &M, observe_area: &A, buffer: &mut F);\n\n}\n", "file_path": "acoustic-field-calculator/src/calculator/traits.rs", "rank": 37, "score": 67852.45730349753 }, { "content": "/// Calculate field by gpu calculator\n\npub trait GpuFieldBuffer<T>: FieldBuffer<T> {\n\n fn calculate_field<\n\n S: WaveSource,\n\n M: GpuPropagationMedium<S>,\n\n F: GpuFieldBuffer<T>,\n\n A: SizedArea,\n\n >(\n\n medium: &M,\n\n observe_area: &A,\n\n buffer: &mut F,\n\n device: GpuDevice,\n\n queue: GpuQueue,\n\n );\n\n}\n", "file_path": "acoustic-field-calculator/src/gpu/field_buffer/traits.rs", "rank": 38, "score": 66724.38800537972 }, { "content": "/*\n\n * File: focal_point.rs\n\n * Project: multiple_foci\n\n * Created Date: 27/05/2020\n\n * Author: Shun Suzuki\n\n * -----\n\n * Last Modified: 19/11/2020\n\n * Modified By: Shun Suzuki (suzuki@hapis.k.u-tokyo.ac.jp)\n\n * -----\n\n * Copyright (c) 2020 Hapis Lab. All rights reserved.\n\n *\n\n */\n\n\n\nuse crate::*;\n\n\n\n/// Producing a single focal point\n\npub struct FocalPoint {\n\n point: Vector3,\n\n}\n\n\n", "file_path": "acoustic-field-optimizer/src/focal_point.rs", "rank": 39, "score": 66341.71826681482 }, { "content": "impl FocalPoint {\n\n pub fn new(point: Vector3) -> Self {\n\n Self { point }\n\n }\n\n}\n\n\n\nimpl Optimizer for FocalPoint {\n\n fn optimize<S: WaveSource>(&self, system: &mut UniformSystem<S>) {\n\n let focal_point = self.point;\n\n let sound_speed = system.sound_speed();\n\n for source in system.wave_sources_mut() {\n\n let pos = source.position();\n\n let d = (pos - focal_point).norm();\n\n let wave_length = sound_speed / source.frequency();\n\n let phase = (d % wave_length) / wave_length;\n\n let phase = -2.0 * PI * phase;\n\n source.set_phase(phase);\n\n }\n\n }\n\n}\n", "file_path": "acoustic-field-optimizer/src/focal_point.rs", "rank": 40, "score": 66299.2779885275 }, { "content": "/*\n\n * File: mod.rs\n\n * Project: system\n\n * Created Date: 18/11/2020\n\n * Author: Shun Suzuki\n\n * -----\n\n * Last Modified: 18/11/2020\n\n * Modified By: Shun Suzuki (suzuki@hapis.k.u-tokyo.ac.jp)\n\n * -----\n\n * Copyright (c) 2020 Hapis Lab. All rights reserved.\n\n *\n\n */\n\n\n\nmod traits;\n\nmod uniform_medium;\n\n\n\npub use traits::*;\n\npub use uniform_medium::UniformSystem;\n", "file_path": "acoustic-field-calculator/src/system/mod.rs", "rank": 41, "score": 65998.76488603394 }, { "content": "/*\n\n * File: traits.rs\n\n * Project: system\n\n * Created Date: 18/11/2020\n\n * Author: Shun Suzuki\n\n * -----\n\n * Last Modified: 17/06/2021\n\n * Modified By: Shun Suzuki (suzuki@hapis.k.u-tokyo.ac.jp)\n\n * -----\n\n * Copyright (c) 2020 Hapis Lab. All rights reserved.\n\n *\n\n */\n\n\n\nuse crate::core::wave_sources::WaveSource;\n\nuse crate::core::{Complex, Vector3};\n\n\n", "file_path": "acoustic-field-calculator/src/system/traits.rs", "rank": 42, "score": 65997.85653142234 }, { "content": " def dir(self):\n", "file_path": "python/afc/nativemethods.py", "rank": 43, "score": 65379.873066066015 }, { "content": "class Vector3(Structure):\n\n _fields_ = [(\"x\", c_float), (\"y\", c_float), (\"z\", c_float)]\n\n\n\n def __init__(self, position):\n\n super().__init__()\n\n self.x = position[0]\n\n self.y = position[1]\n", "file_path": "python/afc/nativemethods.py", "rank": 44, "score": 65279.08700734285 }, { "content": "/*\n\n * File: mod.rs\n\n * Project: system\n\n * Created Date: 18/11/2020\n\n * Author: Shun Suzuki\n\n * -----\n\n * Last Modified: 18/11/2020\n\n * Modified By: Shun Suzuki (suzuki@hapis.k.u-tokyo.ac.jp)\n\n * -----\n\n * Copyright (c) 2020 Hapis Lab. All rights reserved.\n\n *\n\n */\n\n\n\nmod traits;\n\nmod uniform_medium;\n\n\n\npub use traits::*;\n", "file_path": "acoustic-field-calculator/src/gpu/system/mod.rs", "rank": 45, "score": 64115.11151842043 }, { "content": "/*\n\n* File: traits.rs\n\n* Project: system\n\n* Created Date: 18/11/2020\n\n* Author: Shun Suzuki\n\n* -----\n\n* Last Modified: 19/11/2020\n\n* Modified By: Shun Suzuki (suzuki@hapis.k.u-tokyo.ac.jp)\n\n* -----\n\n* Copyright (c) 2020 Hapis Lab. All rights reserved.\n\n*\n\n*/\n\n\n\nuse crate::{core::wave_sources::WaveSource, system::WaveSourceContainer};\n\n\n", "file_path": "acoustic-field-calculator/src/gpu/system/traits.rs", "rank": 46, "score": 64111.95118914218 }, { "content": "/*\n\n * File: acc_system.rs\n\n * Project: accurate\n\n * Created Date: 18/11/2020\n\n * Author: Shun Suzuki\n\n * -----\n\n * Last Modified: 19/11/2020\n\n * Modified By: Shun Suzuki (suzuki@hapis.k.u-tokyo.ac.jp)\n\n * -----\n\n * Copyright (c) 2020 Hapis Lab. All rights reserved.\n\n *\n\n */\n\n\n\nuse crate::{\n\n core::{wave_sources::WaveSource, Complex, Vector3},\n\n system::{UniformSystem, WaveSourceContainer},\n\n Float,\n\n};\n\nuse binary_heap_plus::*;\n\nuse ordered_float::OrderedFloat;\n\n\n", "file_path": "acoustic-field-calculator/src/accurate/system_acc.rs", "rank": 47, "score": 64111.10595863982 }, { "content": "/*\n\n * File: uniform_medium.rs\n\n * Project: system\n\n * Created Date: 18/11/2020\n\n * Author: Shun Suzuki\n\n * -----\n\n * Last Modified: 17/06/2021\n\n * Modified By: Shun Suzuki (suzuki@hapis.k.u-tokyo.ac.jp)\n\n * -----\n\n * Copyright (c) 2020 Hapis Lab. All rights reserved.\n\n *\n\n */\n\n\n\nuse super::traits::{PropagationMedium, WaveSourceContainer};\n\nuse crate::core::{\n\n attenuation::attenuation_coef, sound_speed::calc_sound_speed, wave_sources::*, Complex, Float,\n\n Vector3, PI,\n\n};\n\nuse num::Zero;\n\n\n", "file_path": "acoustic-field-calculator/src/system/uniform_medium.rs", "rank": 48, "score": 64108.675234903996 }, { "content": "\n\n fn add_wave_source(&mut self, source: S) {\n\n let sound_speed = calc_sound_speed(self.temperature);\n\n let wavenum = 2.0 * PI * source.frequency() / sound_speed;\n\n let atten = attenuation_coef(source.frequency(), 30., 1., 1., self.temperature);\n\n self.add_wave_source_with_wavenum_and_atten(source, wavenum, atten);\n\n }\n\n}\n\n\n\nimpl<S: WaveSource> PropagationMedium<S> for UniformSystem<S> {\n\n fn propagate(&self, idx: usize, target: Vector3) -> Complex {\n\n let source = &self.wave_sources[idx];\n\n let diff = crate::fmath::sub(target, source.position());\n\n let dist = diff.norm();\n\n let theta = crate::fmath::acos(source.direction().dot(&diff) / dist);\n\n let d = S::directivity(theta);\n\n let r = source.amp() * d * (-dist * self.attens[idx]).exp() / dist;\n\n let phi = source.phase() + self.wavenums[idx] * dist;\n\n Complex::from_polar(r, phi)\n\n }\n", "file_path": "acoustic-field-calculator/src/system/uniform_medium.rs", "rank": 49, "score": 64073.94700145522 }, { "content": "\n\n pub fn info_of_source(&self, idx: usize) -> String {\n\n format!(\n\n \"{}-th wave source:\\n Wavelength: {} mm\\n Wavenumber: {} mm^-1\\n Attenuation: {} Np/mm\",\n\n idx,\n\n 2.0 * PI / self.wavenums[idx],\n\n self.wavenums[idx],\n\n self.attens[idx]\n\n )\n\n }\n\n}\n\n\n\nimpl<S: WaveSource> WaveSourceContainer<S> for UniformSystem<S> {\n\n fn wave_sources(&self) -> &[S] {\n\n &self.wave_sources\n\n }\n\n\n\n fn wave_sources_mut(&mut self) -> &mut Vec<S> {\n\n &mut self.wave_sources\n\n }\n", "file_path": "acoustic-field-calculator/src/system/uniform_medium.rs", "rank": 50, "score": 64072.95949853797 }, { "content": "\n\n fn propagate_all(&self, target: Vector3) -> Complex {\n\n let mut c = Complex::zero();\n\n for i in 0..self.wave_sources.len() {\n\n c += self.propagate(i, target);\n\n }\n\n c\n\n }\n\n}\n", "file_path": "acoustic-field-calculator/src/system/uniform_medium.rs", "rank": 51, "score": 64072.23090888871 }, { "content": "pub struct UniformSystem<S: WaveSource> {\n\n wave_sources: Vec<S>,\n\n wavenums: Vec<Float>,\n\n attens: Vec<Float>,\n\n temperature: Float,\n\n sound_speed: Float,\n\n}\n\n\n\nimpl<S: WaveSource> UniformSystem<S> {\n\n pub fn new(temperature: Float) -> Self {\n\n Self {\n\n wave_sources: vec![],\n\n wavenums: vec![],\n\n attens: vec![],\n\n temperature,\n\n sound_speed: calc_sound_speed(temperature),\n\n }\n\n }\n\n\n\n pub fn add_wave_source_with_wavenum(&mut self, source: S, wavenum: Float) {\n", "file_path": "acoustic-field-calculator/src/system/uniform_medium.rs", "rank": 52, "score": 64070.65619607477 }, { "content": " let atten = attenuation_coef(source.frequency(), 30., 1., 1., self.temperature);\n\n self.add_wave_source_with_wavenum_and_atten(source, wavenum, atten);\n\n }\n\n\n\n pub fn add_wave_source_with_atten(&mut self, source: S, atten: Float) {\n\n let wavenum = 2.0 * PI * source.frequency() / self.sound_speed;\n\n self.add_wave_source_with_wavenum_and_atten(source, wavenum, atten);\n\n }\n\n\n\n pub fn add_wave_source_with_wavenum_and_atten(\n\n &mut self,\n\n source: S,\n\n wavenum: Float,\n\n atten: Float,\n\n ) {\n\n self.wave_sources.push(source);\n\n self.wavenums.push(wavenum);\n\n self.attens.push(atten);\n\n }\n\n\n", "file_path": "acoustic-field-calculator/src/system/uniform_medium.rs", "rank": 53, "score": 64067.615807379305 }, { "content": " let theta = crate::fmath::acos(source.direction().dot(&diff) / dist);\n\n let d = S::directivity(theta);\n\n let r = source.amp() * d * (-dist * attens[i]).exp() / dist;\n\n let phi = source.phase() + wavenums[i] * dist;\n\n re_heap.push(OrderedFloat(r * phi.cos()));\n\n im_heap.push(OrderedFloat(r * phi.sin()));\n\n }\n\n\n\n let re: Float = re_heap.into_iter_sorted().map(|v| v.into_inner()).sum();\n\n let im: Float = im_heap.into_iter_sorted().map(|v| v.into_inner()).sum();\n\n\n\n Complex::new(re, im)\n\n }\n\n}\n", "file_path": "acoustic-field-calculator/src/accurate/system_acc.rs", "rank": 54, "score": 64067.1448281007 }, { "content": " pub fn wavenums(&self) -> &[Float] {\n\n &self.wavenums\n\n }\n\n\n\n pub fn attens(&self) -> &[Float] {\n\n &self.attens\n\n }\n\n\n\n pub fn sound_speed(&self) -> Float {\n\n self.sound_speed\n\n }\n\n\n\n pub fn info(&self) -> String {\n\n format!(\n\n \"Uniform Medium:\\n Temperature: {} K = {} ℃\\n Sound Speed: {} mm/s\",\n\n self.temperature,\n\n self.temperature - 273.15,\n\n calc_sound_speed(self.temperature)\n\n )\n\n }\n", "file_path": "acoustic-field-calculator/src/system/uniform_medium.rs", "rank": 55, "score": 64065.65683520814 }, { "content": "/*\n\n * File: macros.rs\n\n * Project: multiple_foci\n\n * Created Date: 18/11/2020\n\n * Author: Shun Suzuki\n\n * -----\n\n * Last Modified: 18/11/2020\n\n * Modified By: Shun Suzuki (suzuki@hapis.k.u-tokyo.ac.jp)\n\n * -----\n\n * Copyright (c) 2020 Hapis Lab. All rights reserved.\n\n *\n\n */\n\n\n\nuse crate::*;\n\nuse acoustic_field_calculator::fmath;\n\n\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/macros.rs", "rank": 56, "score": 63338.814968176666 }, { "content": "/*\n\n * File: mod.rs\n\n * Project: multiple_foci\n\n * Created Date: 27/05/2020\n\n * Author: Shun Suzuki\n\n * -----\n\n * Last Modified: 17/06/2021\n\n * Modified By: Shun Suzuki (suzuki@hapis.k.u-tokyo.ac.jp)\n\n * -----\n\n * Copyright (c) 2020 Hapis Lab. All rights reserved.\n\n *\n\n */\n\n\n\nmod combination;\n\nmod fft;\n\nmod linear_synthesis;\n\npub mod macros;\n\nmod matrix;\n\nmod nls;\n\n\n\npub use combination::*;\n\npub use fft::*;\n\npub use linear_synthesis::*;\n\npub use matrix::*;\n\npub use nls::*;\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/mod.rs", "rank": 57, "score": 63336.22230278294 }, { "content": "/*\n\n * File: uniform_medium.rs\n\n * Project: system\n\n * Created Date: 18/11/2020\n\n * Author: Shun Suzuki\n\n * -----\n\n * Last Modified: 19/11/2020\n\n * Modified By: Shun Suzuki (suzuki@hapis.k.u-tokyo.ac.jp)\n\n * -----\n\n * Copyright (c) 2020 Hapis Lab. All rights reserved.\n\n *\n\n */\n\n\n\nuse crate::{core::wave_sources::WaveSource, system::UniformSystem};\n\n\n\nuse super::traits::GpuPropagationMedium;\n\n\n\npub const GPU_DIRECTIVITY_CACHE_SIZE: usize = 1800;\n\n\n\nimpl<S: WaveSource> GpuPropagationMedium<S> for UniformSystem<S> {\n", "file_path": "acoustic-field-calculator/src/gpu/system/uniform_medium.rs", "rank": 58, "score": 62334.34753973439 }, { "content": " fn wavenums(&self) -> &[f32] {\n\n self.wavenums()\n\n }\n\n\n\n fn attenuations(&self) -> &[f32] {\n\n self.attens()\n\n }\n\n\n\n fn directivities(&self) -> Vec<f32> {\n\n let mut directivities = Vec::with_capacity(GPU_DIRECTIVITY_CACHE_SIZE);\n\n for i in 0..GPU_DIRECTIVITY_CACHE_SIZE {\n\n let theta = std::f32::consts::PI * (i as f32 / GPU_DIRECTIVITY_CACHE_SIZE as f32);\n\n let d = S::directivity(theta);\n\n directivities.push(d);\n\n }\n\n directivities\n\n }\n\n}\n", "file_path": "acoustic-field-calculator/src/gpu/system/uniform_medium.rs", "rank": 59, "score": 62298.3432449689 }, { "content": "fn main() {\n\n let array_center = Vector3::new(\n\n TRANS_SIZE * (NUM_TRANS_X - 1) as Float / 2.0,\n\n TRANS_SIZE * (NUM_TRANS_Y - 1) as Float / 2.0,\n\n 0.,\n\n );\n\n let z = 150.0;\n\n let focal_pos = array_center + z * Vector3::z();\n\n\n\n let mut system = UniformSystem::new(TEMPERATURE);\n\n for y in 0..NUM_TRANS_Y {\n\n for x in 0..NUM_TRANS_X {\n\n let pos = Vector3::new(TRANS_SIZE * x as Float, TRANS_SIZE * y as Float, 0.);\n\n system.add_wave_source(T4010A1::new(pos, Vector3::z(), 1.0, 0.0, FREQUENCY));\n\n }\n\n }\n\n FocalPoint::new(focal_pos).optimize(&mut system);\n\n\n\n let r = 100.0;\n\n let area = GridAreaBuilder::new()\n", "file_path": "acoustic-field-optimizer/examples/simple.rs", "rank": 60, "score": 62120.578611816585 }, { "content": "fn main() {\n\n let array_center = Vector3::new(\n\n TRANS_SIZE * (NUM_TRANS_X - 1) as Float / 2.0,\n\n TRANS_SIZE * (NUM_TRANS_Y - 1) as Float / 2.0,\n\n 0.,\n\n );\n\n let z = 150.0;\n\n let focal_pos = array_center + z * Vector3::z();\n\n\n\n // UniformSystem is a uniform medium of sound\n\n let mut system = UniformSystem::new(TEMPERATURE);\n\n println!(\"{}\", system.info());\n\n\n\n // Placing sound sources which produce focus at 'focal_pos'\n\n let sound_speed = system.sound_speed();\n\n let amp = 1.0;\n\n let dir = Vector3::z();\n\n for y in 0..NUM_TRANS_Y {\n\n for x in 0..NUM_TRANS_X {\n\n let pos = Vector3::new(TRANS_SIZE * x as Float, TRANS_SIZE * y as Float, 0.);\n", "file_path": "acoustic-field-calculator/examples/main.rs", "rank": 61, "score": 62120.578611816585 }, { "content": "/*\n\n * File: greedy.rs\n\n * Project: combination\n\n * Created Date: 17/06/2021\n\n * Author: Shun Suzuki\n\n * -----\n\n * Last Modified: 17/06/2021\n\n * Modified By: Shun Suzuki (suzuki@hapis.k.u-tokyo.ac.jp)\n\n * -----\n\n * Copyright (c) 2021 Hapis Lab. All rights reserved.\n\n *\n\n */\n\n\n\nuse crate::*;\n\n\n\npub struct Greedy {\n\n foci: Vec<Vector3>,\n\n amps: Vec<Float>,\n\n phases: Vec<Complex>,\n\n}\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/combination/greedy.rs", "rank": 62, "score": 61586.962839907836 }, { "content": "/*\n\n * File: long.rs\n\n * Project: multiple_foci\n\n * Created Date: 22/09/2020\n\n * Author: Shun Suzuki\n\n * -----\n\n * Last Modified: 19/11/2020\n\n * Modified By: Shun Suzuki (suzuki@hapis.k.u-tokyo.ac.jp)\n\n * -----\n\n * Copyright (c) 2020 Hapis Lab. All rights reserved.\n\n *\n\n */\n\n\n\nuse crate::multiple_foci::macros::*;\n\nuse crate::*;\n\n\n\n/// Long et al., 2014\n\npub struct Long {\n\n foci: Vec<Vector3>,\n\n amps: Vec<Float>,\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/matrix/long.rs", "rank": 63, "score": 61586.67886974381 }, { "content": "/*\n\n * File: apo.rs\n\n * Project: nls\n\n * Created Date: 03/10/2020\n\n * Author: Shun Suzuki\n\n * -----\n\n * Last Modified: 17/06/2021\n\n * Modified By: Shun Suzuki (suzuki@hapis.k.u-tokyo.ac.jp)\n\n * -----\n\n * Copyright (c) 2020 Hapis Lab. All rights reserved.\n\n *\n\n */\n\n\n\nuse crate::multiple_foci::macros::*;\n\nuse crate::*;\n\n\n\nconst EPS: Float = 1e-8;\n\nconst K_MAX: usize = 200;\n\nconst LINE_SEARCH_MAX: usize = 100;\n\n\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/nls/apo.rs", "rank": 64, "score": 61582.07704187589 }, { "content": "/*\n\n * File: lm.rs\n\n * Project: multiple_foci\n\n * Created Date: 21/09/2020\n\n * Author: Shun Suzuki\n\n * -----\n\n * Last Modified: 17/02/2021\n\n * Modified By: Shun Suzuki (suzuki@hapis.k.u-tokyo.ac.jp)\n\n * -----\n\n * Copyright (c) 2020 Hapis Lab. All rights reserved.\n\n *\n\n */\n\n\n\nuse super::macros::*;\n\nuse crate::*;\n\n\n\nconst EPS_1: Float = 1e-8;\n\nconst EPS_2: Float = 1e-8;\n\nconst TAU: Float = 1e-3;\n\nconst K_MAX: usize = 200;\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/nls/lm.rs", "rank": 65, "score": 61581.52177800511 }, { "content": "/*\n\n * File: horn.rs\n\n * Project: multiple_foci\n\n * Created Date: 27/05/2020\n\n * Author: Shun Suzuki\n\n * -----\n\n * Last Modified: 18/02/2021\n\n * Modified By: Shun Suzuki (suzuki@hapis.k.u-tokyo.ac.jp)\n\n * -----\n\n * Copyright (c) 2020 Hapis Lab. All rights reserved.\n\n *\n\n */\n\n\n\nuse crate::multiple_foci::macros::*;\n\nuse crate::*;\n\n\n\nuse rand::{thread_rng, Rng};\n\n\n\nconst REPEAT_SDP: usize = 100;\n\nconst LAMBDA_SDP: Float = 0.8;\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/matrix/horn.rs", "rank": 66, "score": 61581.40511146781 }, { "content": "/*\n\n * File: mod.rs\n\n * Project: fft\n\n * Created Date: 03/10/2020\n\n * Author: Shun Suzuki\n\n * -----\n\n * Last Modified: 17/06/2021\n\n * Modified By: Shun Suzuki (suzuki@hapis.k.u-tokyo.ac.jp)\n\n * -----\n\n * Copyright (c) 2020 Hapis Lab. All rights reserved.\n\n *\n\n */\n\n\n\nmod ifft;\n\n\n\npub use ifft::Ifft;\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/fft/mod.rs", "rank": 67, "score": 61581.2630204773 }, { "content": "/*\n\n * File: mod.rs\n\n * Project: combination\n\n * Created Date: 17/06/2021\n\n * Author: Shun Suzuki\n\n * -----\n\n * Last Modified: 17/06/2021\n\n * Modified By: Shun Suzuki (suzuki@hapis.k.u-tokyo.ac.jp)\n\n * -----\n\n * Copyright (c) 2021 Hapis Lab. All rights reserved.\n\n *\n\n */\n\n\n\nmod greedy;\n\n\n\npub use greedy::Greedy;\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/combination/mod.rs", "rank": 68, "score": 61581.2630204773 }, { "content": "/*\n\n * File: mod.rs\n\n * Project: matrix\n\n * Created Date: 03/10/2020\n\n * Author: Shun Suzuki\n\n * -----\n\n * Last Modified: 18/11/2020\n\n * Modified By: Shun Suzuki (suzuki@hapis.k.u-tokyo.ac.jp)\n\n * -----\n\n * Copyright (c) 2020 Hapis Lab. All rights reserved.\n\n *\n\n */\n\n\n\nmod horn;\n\nmod long;\n\n\n\npub use horn::Horn;\n\npub use long::Long;\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/matrix/mod.rs", "rank": 69, "score": 61580.57653726336 }, { "content": "/*\n\n * File: ifft.rs\n\n * Project: multiple_foci\n\n * Created Date: 02/10/2020\n\n * Author: Shun Suzuki\n\n * -----\n\n * Last Modified: 17/06/2021\n\n * Modified By: Shun Suzuki (suzuki@hapis.k.u-tokyo.ac.jp)\n\n * -----\n\n * Copyright (c) 2020 Hapis Lab. All rights reserved.\n\n *\n\n */\n\n\n\nuse crate::*;\n\n\n\nuse na::{ComplexField, Dynamic, Matrix, VecStorage};\n\n\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/fft/ifft.rs", "rank": 70, "score": 61580.22826007117 }, { "content": "/*\n\n * File: macros.rs\n\n * Project: nls\n\n * Created Date: 18/11/2020\n\n * Author: Shun Suzuki\n\n * -----\n\n * Last Modified: 18/11/2020\n\n * Modified By: Shun Suzuki (suzuki@hapis.k.u-tokyo.ac.jp)\n\n * -----\n\n * Copyright (c) 2020 Hapis Lab. All rights reserved.\n\n *\n\n */\n\n\n\nuse crate::{multiple_foci::macros::*, *};\n\n\n\n#[allow(non_snake_case)]\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/nls/macros.rs", "rank": 71, "score": 61580.14552890191 }, { "content": "/*\n\n * File: mod.rs\n\n * Project: nls\n\n * Created Date: 03/10/2020\n\n * Author: Shun Suzuki\n\n * -----\n\n * Last Modified: 17/06/2021\n\n * Modified By: Shun Suzuki (suzuki@hapis.k.u-tokyo.ac.jp)\n\n * -----\n\n * Copyright (c) 2020 Hapis Lab. All rights reserved.\n\n *\n\n */\n\n\n\nmod apo;\n\nmod gauss_newton;\n\nmod gradient_descent;\n\nmod lm;\n\nmod macros;\n\n\n\npub use apo::Apo;\n\npub use gauss_newton::GaussNewton;\n\npub use gradient_descent::GradientDescent;\n\npub use lm::LM;\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/nls/mod.rs", "rank": 72, "score": 61575.529714597345 }, { "content": "\n\nimpl Greedy {\n\n pub fn new(foci: Vec<Vector3>, amps: Vec<Float>, phase_div: usize) -> Self {\n\n let mut phases = Vec::with_capacity(phase_div);\n\n for i in 0..phase_div {\n\n phases.push(Complex::new(0., 2.0 * PI * i as Float / phase_div as Float).exp());\n\n }\n\n Self { foci, amps, phases }\n\n }\n\n}\n\n\n\nimpl Optimizer for Greedy {\n\n #[allow(clippy::many_single_char_names)]\n\n #[allow(non_snake_case)]\n\n fn optimize<S: WaveSource>(&self, system: &mut UniformSystem<S>) {\n\n for source in system.wave_sources_mut() {\n\n source.set_phase(0.);\n\n }\n\n\n\n let m = self.foci.len();\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/combination/greedy.rs", "rank": 73, "score": 61552.75229102463 }, { "content": "\n\n let mut tmp = Vec::with_capacity(self.phases.len());\n\n tmp.resize(self.phases.len(), vec![Complex::new(0., 0.); m]);\n\n\n\n let mut cache = Vec::with_capacity(m);\n\n cache.resize(m, Complex::new(0., 0.));\n\n\n\n fn transfer_foci<S: WaveSource>(\n\n system: &mut UniformSystem<S>,\n\n source_idx: usize,\n\n phase: Complex,\n\n foci: &[Vector3],\n\n res: &mut [Complex],\n\n ) {\n\n for i in 0..foci.len() {\n\n res[i] = system.propagate(source_idx, foci[i]) * phase;\n\n }\n\n }\n\n\n\n for i in 0..system.wave_sources().len() {\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/combination/greedy.rs", "rank": 74, "score": 61551.49577114945 }, { "content": " for source in system.wave_sources_mut() {\n\n source.set_phase(0.);\n\n }\n\n\n\n let m = self.foci.len();\n\n let n = system.wave_sources().len();\n\n\n\n let n_param = n + m;\n\n\n\n let x0 = VectorXf::zeros(n_param);\n\n\n\n let I = MatrixXf::identity(n_param, n_param);\n\n\n\n let BhB = make_BhB(system, &self.amps, &self.foci, m);\n\n\n\n let mut x = x0;\n\n let mut nu = 2.0;\n\n\n\n let T = make_T(&x, n, m);\n\n let (mut A, mut g) = calc_JtJ_Jtf(&BhB, &T);\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/nls/lm.rs", "rank": 75, "score": 61550.566577785154 }, { "content": " let m = self.foci.len();\n\n\n\n let G = generate_propagation_matrix(system, &self.foci);\n\n let P = MatrixXcf::from_diagonal(&VectorXcf::from_iterator(\n\n m,\n\n self.amps.iter().map(|&a| Complex::new(a, 0.)),\n\n ));\n\n\n\n let G_pinv = pseudo_inverse_with_reg(&G, self.tikhonov_parameter);\n\n let MM = &P * (MatrixXcf::identity(m, m) - G * &G_pinv) * &P;\n\n let mut X = MatrixXcf::identity(m, m);\n\n\n\n let mut rng = thread_rng();\n\n let lambda = self.lambda;\n\n for _ in 0..(m * self.repeat) {\n\n let ii = (m as f32 * rng.gen_range(0f32..1f32)) as usize;\n\n let Xc = X.clone().remove_row(ii).remove_column(ii);\n\n let MMc = MM.column(ii).remove_row(ii);\n\n let Xb = Xc * &MMc;\n\n let gamma = (Xb.adjoint() * MMc)[(0, 0)];\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/matrix/horn.rs", "rank": 76, "score": 61550.212222468166 }, { "content": "/// Acoustic Power Optimization\n\npub struct Apo {\n\n foci: Vec<Vector3>,\n\n amps: Vec<Float>,\n\n lambda: Float,\n\n}\n\n\n\n/// References\n\n/// * Keisuke Hasegawa, Hiroyuki Shinoda, and Takaaki Nara. Volumetric acoustic holography andits application to self-positioning by single channel measurement.Journal of Applied Physics,127(24):244904, 2020.7\n\nimpl Apo {\n\n pub fn new(foci: Vec<Vector3>, amps: Vec<Float>, lambda: Float) -> Self {\n\n Self { foci, amps, lambda }\n\n }\n\n}\n\n\n\nimpl Apo {\n\n #[allow(non_snake_case)]\n\n fn make_Ri(G: &MatrixXcf, i: usize, m: usize) -> MatrixXcf {\n\n let mut Di = MatrixXcf::zeros(m, m);\n\n Di[(i, i)] = Complex::new(1., 0.);\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/nls/apo.rs", "rank": 77, "score": 61548.42040765668 }, { "content": " let mut tmp = fftshift(&grat_mat, w, h);\n\n let tmp = fft2d(&mut tmp, w, h);\n\n let tmp = fftshift(&tmp, w, h);\n\n\n\n let right = self.bottom_right - self.bottom_left;\n\n let up = self.top_left - self.bottom_left;\n\n let center = (right + up) / 2.0;\n\n\n\n let array_w = (right.norm() / self.spacing).ceil() as usize;\n\n let array_h = (up.norm() / self.spacing).ceil() as usize;\n\n\n\n let right = right.normalize();\n\n let up = up.normalize();\n\n\n\n let max = tmp.iter().fold(Float::NAN, |m, v| v.abs().max(m));\n\n let sound_speed = system.sound_speed();\n\n for source in system.wave_sources_mut() {\n\n let pos = source.position() - self.bottom_left;\n\n\n\n let x = (right.dot(&pos) / self.spacing).ceil() as isize;\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/fft/ifft.rs", "rank": 78, "score": 61548.12472611789 }, { "content": " #[allow(non_snake_case, clippy::many_single_char_names)]\n\n fn optimize<S: WaveSource>(&self, system: &mut UniformSystem<S>) {\n\n for source in system.wave_sources_mut() {\n\n source.set_phase(0.);\n\n }\n\n\n\n let m = self.foci.len();\n\n let n = system.wave_sources().len();\n\n\n\n let G = generate_propagation_matrix(system, &self.foci);\n\n\n\n let p = VectorXcf::from_iterator(m, self.amps.iter().map(|&a| Complex::new(a, 0.)));\n\n let p2 = p.map(|v| v.norm_squared());\n\n\n\n let I = MatrixXcf::identity(n, n);\n\n let q0 = (G.adjoint() * &G + I.scale(self.lambda))\n\n .qr()\n\n .solve(&(G.adjoint() * &p))\n\n .unwrap();\n\n let Ris: Vec<_> = (0..m).map(|i| Self::make_Ri(&G, i, m)).collect();\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/nls/apo.rs", "rank": 79, "score": 61547.65460666937 }, { "content": "\n\n/// Levenberg-Marquardt\n\npub struct LM {\n\n foci: Vec<Vector3>,\n\n amps: Vec<Float>,\n\n}\n\n\n\n/// References\n\n/// * K.Levenberg, “A method for the solution of certain non-linear problems in least squares,” Quarterly of applied mathematics, vol.2, no.2, pp.164–168, 1944.\n\n/// * D.W.Marquardt, “An algorithm for least-squares estimation of non-linear parameters,” Journal of the society for Industrial and AppliedMathematics, vol.11, no.2, pp.431–441, 1963.\n\n/// * K.Madsen, H.Nielsen, and O.Tingleff, “Methods for non-linear least squares problems (2nd ed.),” 2004.\n\nimpl LM {\n\n pub fn new(foci: Vec<Vector3>, amps: Vec<Float>) -> Self {\n\n Self { foci, amps }\n\n }\n\n}\n\n\n\nimpl Optimizer for LM {\n\n #[allow(non_snake_case, clippy::many_single_char_names)]\n\n fn optimize<S: WaveSource>(&self, system: &mut UniformSystem<S>) {\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/nls/lm.rs", "rank": 80, "score": 61546.67414623239 }, { "content": "}\n\n\n\n/// Reference\n\n/// * Long, Benjamin, et al. \"Rendering volumetric haptic shapes in mid-air using ultrasound.\" ACM Transactions on Graphics (TOG) 33.6 (2014): 1-10.\n\nimpl Long {\n\n pub fn new(foci: Vec<Vector3>, amps: Vec<Float>) -> Self {\n\n Self { foci, amps }\n\n }\n\n}\n\n\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/matrix/long.rs", "rank": 81, "score": 61546.30565945784 }, { "content": "const TIKHONOV_DEFAULT: Float = 1e-5;\n\n\n\n/// Inoue et al., 2015\n\npub struct Horn {\n\n foci: Vec<Vector3>,\n\n amps: Vec<Float>,\n\n repeat: usize,\n\n lambda: Float,\n\n tikhonov_parameter: Float,\n\n}\n\n\n\n/// Reference\n\n/// * Inoue, Seki, Yasutoshi Makino, and Hiroyuki Shinoda. \"Active touch perception produced by airborne ultrasonic haptic hologram.\" 2015 IEEE World Haptics Conference (WHC). IEEE, 2015.\n\nimpl Horn {\n\n pub fn new(foci: Vec<Vector3>, amps: Vec<Float>) -> Self {\n\n Self {\n\n foci,\n\n amps,\n\n repeat: REPEAT_SDP,\n\n lambda: LAMBDA_SDP,\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/matrix/horn.rs", "rank": 82, "score": 61546.03117997008 }, { "content": " bottom_right: Vector3,\n\n spacing: Float,\n\n z: Float,\n\n ) -> Self {\n\n Self {\n\n image_path: path.to_owned(),\n\n bottom_left,\n\n top_left,\n\n bottom_right,\n\n spacing,\n\n z,\n\n }\n\n }\n\n}\n\n\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/fft/ifft.rs", "rank": 83, "score": 61545.91475931834 }, { "content": " let mut min_idx = 0;\n\n let mut min_v = Float::INFINITY;\n\n for (idx, &phase) in self.phases.iter().enumerate() {\n\n transfer_foci(system, i, phase, &self.foci, &mut tmp[idx]);\n\n let mut v = 0.0;\n\n for (j, c) in cache.iter().enumerate() {\n\n v += (self.amps[j] - (tmp[idx][j] + c).abs()).abs();\n\n }\n\n\n\n if v < min_v {\n\n min_v = v;\n\n min_idx = idx;\n\n }\n\n }\n\n\n\n for (j, c) in cache.iter_mut().enumerate() {\n\n *c += tmp[min_idx][j];\n\n }\n\n\n\n let phase = self.phases[min_idx].argument();\n\n system.wave_sources_mut()[i].set_phase(phase);\n\n }\n\n }\n\n}\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/combination/greedy.rs", "rank": 84, "score": 61543.477870184994 }, { "content": " source.set_phase(0.);\n\n }\n\n\n\n let m = self.foci.len();\n\n let n = system.wave_sources().len();\n\n\n\n let G = generate_propagation_matrix(system, &self.foci);\n\n\n\n let denomi = G.column_sum();\n\n let X = G\n\n .map_with_location(|i, _, a| Complex::new(self.amps[i], 0.0) * a.conj() / denomi[i])\n\n .transpose();\n\n\n\n let R = &G * X;\n\n\n\n let eig = R.symmetric_eigen();\n\n let e_arg = eig\n\n .eigenvectors\n\n .row(eig.eigenvalues.imax())\n\n .map(|e| e.argument());\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/matrix/long.rs", "rank": 85, "score": 61543.127970399204 }, { "content": " fn optimize<S: WaveSource>(&self, system: &mut UniformSystem<S>) {\n\n for source in system.wave_sources_mut() {\n\n source.set_phase(0.);\n\n }\n\n\n\n let img = image::open(&self.image_path).unwrap();\n\n let gray = img.grayscale();\n\n\n\n let w = gray.width() as usize;\n\n let h = gray.height() as usize;\n\n\n\n let max = *gray.to_bytes().iter().max().unwrap() as Float;\n\n let grat_mat = MatrixXcf::from_iterator(\n\n w,\n\n h,\n\n gray.to_bytes()\n\n .iter()\n\n .map(|&v| Complex::new(v as Float / max, 0.)),\n\n );\n\n\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/fft/ifft.rs", "rank": 86, "score": 61542.99668844575 }, { "content": " let y = (up.dot(&pos) / self.spacing).ceil() as isize;\n\n let x = (w / 2) as isize - (array_w / 2) as isize + x;\n\n let y = (h / 2) as isize - (array_h / 2) as isize + y;\n\n if x < 0 || x >= w as isize {\n\n continue;\n\n }\n\n if y < 0 || y >= h as isize {\n\n continue;\n\n }\n\n let x = x as usize;\n\n let y = y as usize;\n\n\n\n let r = (pos - center + Vector3::new(0., 0., self.z)).norm();\n\n\n\n source.set_amp(source.amp() * tmp[(x, y)].abs() / max);\n\n source.set_phase(tmp[(x, y)].arg() - 2.0 * PI * source.frequency() / sound_speed * r);\n\n }\n\n }\n\n}\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/fft/ifft.rs", "rank": 87, "score": 61542.798116743856 }, { "content": " tikhonov_parameter: TIKHONOV_DEFAULT,\n\n }\n\n }\n\n\n\n pub fn set_repeat(&mut self, repeat: usize) {\n\n self.repeat = repeat;\n\n }\n\n\n\n pub fn set_lambda(&mut self, lambda: Float) {\n\n self.lambda = lambda;\n\n }\n\n\n\n pub fn set_tikhonov_param(&mut self, a: Float) {\n\n self.tikhonov_parameter = a;\n\n }\n\n}\n\n\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/matrix/horn.rs", "rank": 88, "score": 61541.191951868626 }, { "content": " .chain((0..n).map(|_| Complex::new(0., 0.))),\n\n );\n\n\n\n let gt = g.adjoint();\n\n let gtg = &gt * g;\n\n let gtf = gt * f;\n\n let q = gtg.qr().solve(&gtf).unwrap();\n\n\n\n let max_coeff = q.camax();\n\n for (wave_source, qe) in system.wave_sources_mut().iter_mut().zip(q.iter()) {\n\n let amp = wave_source.amp() * qe.abs() / max_coeff;\n\n let phase = qe.argument();\n\n wave_source.set_amp(amp);\n\n wave_source.set_phase(phase);\n\n }\n\n }\n\n}\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/matrix/long.rs", "rank": 89, "score": 61539.82626183091 }, { "content": "\n\n let eig = na::SymmetricEigen::new(X);\n\n let u = eig.eigenvectors.column(eig.eigenvalues.imax());\n\n let q = G_pinv * P * u;\n\n let max_coeff = q.camax();\n\n for (wave_source, qe) in system.wave_sources_mut().iter_mut().zip(q.iter()) {\n\n let amp = wave_source.amp() * qe.abs() / max_coeff;\n\n let phase = qe.argument();\n\n wave_source.set_amp(amp);\n\n wave_source.set_phase(phase);\n\n }\n\n }\n\n}\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/matrix/horn.rs", "rank": 90, "score": 61539.82626183091 }, { "content": " let s = &nabla_J_new - nabla_J;\n\n let y = d;\n\n\n\n H = &H + &y * y.transpose() / y.dot(&s)\n\n - (&H * &s * s.transpose() * H.transpose()) / ((s.transpose() * &H * s)[0]);\n\n\n\n q = q_new;\n\n nabla_J = nabla_J_new;\n\n }\n\n\n\n let max_coeff = q.camax();\n\n for (wave_source, qe) in system.wave_sources_mut().iter_mut().zip(q.iter()) {\n\n let amp = wave_source.amp() * qe.abs() / max_coeff;\n\n let phase = qe.argument();\n\n wave_source.set_amp(amp);\n\n wave_source.set_phase(phase);\n\n }\n\n }\n\n}\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/nls/apo.rs", "rank": 91, "score": 61539.561932909666 }, { "content": "\n\n let A_max = A.diagonal().max();\n\n let mut mu = TAU * A_max;\n\n let mut found = g.max() <= EPS_1;\n\n let mut Fx = calc_Fx(&BhB, &x, n, m);\n\n const ONE: Float = 1.0;\n\n for _ in 0..K_MAX {\n\n if found {\n\n break;\n\n }\n\n\n\n let h_lm = match (&A + &I.scale(mu)).qr().solve(&g) {\n\n Some(v) => -v,\n\n None => {\n\n break;\n\n }\n\n };\n\n if h_lm.norm() <= EPS_2 * (x.norm() + EPS_2) {\n\n found = true;\n\n } else {\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/nls/lm.rs", "rank": 92, "score": 61536.49060448583 }, { "content": " m: usize,\n\n lambda: Float,\n\n ) -> Float {\n\n let mut alpha = 0.;\n\n let mut min = Float::INFINITY;\n\n\n\n for i in 0..LINE_SEARCH_MAX {\n\n let a = i as Float / LINE_SEARCH_MAX as Float;\n\n let v = Self::calc_J(p2, &(q + d.scale(a)), Ris, m, lambda);\n\n if v < min {\n\n alpha = a;\n\n min = v;\n\n }\n\n }\n\n\n\n alpha\n\n }\n\n}\n\n\n\nimpl Optimizer for Apo {\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/nls/apo.rs", "rank": 93, "score": 61536.3241697748 }, { "content": " let x_new = &x + &h_lm;\n\n let Fx_new = calc_Fx(&BhB, &x_new, n, m);\n\n let L0_Lhlm = 0.5 * h_lm.dot(&(mu * &h_lm - &g));\n\n let rho = (Fx - Fx_new) / L0_Lhlm;\n\n Fx = Fx_new;\n\n if rho > 0.0 {\n\n x = x_new;\n\n let T = make_T(&x, n, m);\n\n let (A_new, g_new) = calc_JtJ_Jtf(&BhB, &T);\n\n A = A_new;\n\n g = g_new;\n\n found = g.max() <= EPS_1;\n\n mu *= (ONE / 3.).max(1. - (2. * rho - 1.).powf(3.));\n\n nu = 2.0;\n\n } else {\n\n mu *= nu;\n\n nu *= 2.0;\n\n }\n\n }\n\n }\n\n\n\n for (wave_source, &xe) in system.wave_sources_mut().iter_mut().zip(x.iter().take(n)) {\n\n wave_source.set_phase(xe);\n\n }\n\n }\n\n}\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/nls/lm.rs", "rank": 94, "score": 61534.870155314544 }, { "content": "\n\n let sigma = MatrixXcf::from_diagonal(&VectorXcf::from_iterator(\n\n n,\n\n G.column_iter()\n\n .map(|col| {\n\n col.iter()\n\n .zip(self.amps.iter())\n\n .map(|(a, &amp)| a.abs() * amp)\n\n .sum()\n\n })\n\n .map(|s: Float| Complex::new((s / m as Float).sqrt(), 0.0)),\n\n ));\n\n\n\n let g = append_matrix_row(G, &sigma);\n\n let f = VectorXcf::from_iterator(\n\n m + n,\n\n self.amps\n\n .iter()\n\n .zip(e_arg.iter())\n\n .map(|(amp, &e)| amp * (Complex::new(0., e)).exp())\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/matrix/long.rs", "rank": 95, "score": 61534.246198431196 }, { "content": " if gamma.re > 0. {\n\n let Xb = Xb.scale(-(lambda / gamma.re).sqrt());\n\n X.slice_mut((ii, 0), (1, ii))\n\n .copy_from(&Xb.slice((0, 0), (ii, 1)).adjoint());\n\n X.slice_mut((ii, ii + 1), (1, m - ii - 1))\n\n .copy_from(&Xb.slice((ii, 0), (m - 1 - ii, 1)).adjoint());\n\n X.slice_mut((0, ii), (ii, 1))\n\n .copy_from(&Xb.slice((0, 0), (ii, 1)));\n\n X.slice_mut((ii + 1, ii), (m - ii - 1, 1))\n\n .copy_from(&Xb.slice((ii, 0), (m - 1 - ii, 1)));\n\n } else {\n\n let z1 = VectorXcf::zeros(ii);\n\n let z2 = VectorXcf::zeros(m - ii - 1);\n\n X.slice_mut((ii, 0), (1, ii)).copy_from(&z1.adjoint());\n\n X.slice_mut((ii, ii + 1), (1, m - ii - 1))\n\n .copy_from(&z2.adjoint());\n\n X.slice_mut((0, ii), (ii, 1)).copy_from(&z1);\n\n X.slice_mut((ii + 1, ii), (m - ii - 1, 1)).copy_from(&z2);\n\n }\n\n }\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/matrix/horn.rs", "rank": 96, "score": 61533.85974139268 }, { "content": "\n\n let mut H = I;\n\n let mut q = q0;\n\n\n\n let mut nabla_J = Self::calc_nabla_J(&p2, &q, &Ris, m, self.lambda);\n\n for _ in 0..K_MAX {\n\n let d = -(&H * &nabla_J);\n\n\n\n // let alpha: Float = 0.01;\n\n let alpha = Self::line_search(&q, &d, &p2, &Ris, m, self.lambda);\n\n\n\n let d = d.scale(alpha);\n\n\n\n if d.norm() < EPS {\n\n break;\n\n }\n\n\n\n let q_new = &q + &d;\n\n let nabla_J_new = Self::calc_nabla_J(&p2, &q_new, &Ris, m, self.lambda);\n\n\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/nls/apo.rs", "rank": 97, "score": 61533.469970797756 }, { "content": " G.adjoint() * Di * G\n\n }\n\n\n\n #[allow(non_snake_case)]\n\n fn calc_J(p2: &VectorXf, q: &VectorXcf, Ris: &[MatrixXcf], m: usize, lambda: Float) -> Float {\n\n (0..m)\n\n .map(|i| {\n\n let s = (q.adjoint() * &Ris[i] * q)[0] - p2[i];\n\n s.norm_squared()\n\n })\n\n .sum::<Float>()\n\n + q.dot(&q).abs() * lambda\n\n }\n\n\n\n #[allow(non_snake_case)]\n\n fn calc_nabla_J(\n\n p2: &VectorXf,\n\n q: &VectorXcf,\n\n Ris: &[MatrixXcf],\n\n m: usize,\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/nls/apo.rs", "rank": 98, "score": 61533.10479271085 }, { "content": " lambda: Float,\n\n ) -> VectorXcf {\n\n (0..m)\n\n .map(|i| {\n\n let s = p2[i] - (q.adjoint() * &Ris[i] * q)[0].abs();\n\n (&Ris[i] * q).scale(s)\n\n })\n\n .sum::<VectorXcf>()\n\n + q.scale(lambda)\n\n }\n\n\n\n // Does not consider Wolfe-Powell condition\n\n // Only search alpha in [0,1)\n\n #[allow(non_snake_case)]\n\n #[allow(clippy::many_single_char_names)]\n\n fn line_search(\n\n q: &VectorXcf,\n\n d: &VectorXcf,\n\n p2: &VectorXf,\n\n Ris: &[MatrixXcf],\n", "file_path": "acoustic-field-optimizer/src/multiple_foci/nls/apo.rs", "rank": 99, "score": 61529.50041721324 } ]
Rust
src/test/util/matchable.rs
PhotonQuantum/mongo-rust-driver
fd0f75c1af911d02d55659876456838d5f2b8bf0
use std::{any::Any, fmt::Debug, time::Duration}; use crate::{ bson::{Bson, Document}, bson_util, options::{AuthMechanism, Credential}, }; pub trait Matchable: Sized + 'static { fn is_placeholder(&self) -> bool { false } fn content_matches(&self, expected: &Self) -> Result<(), String>; fn matches<T: Matchable + Any>(&self, expected: &T) -> Result<(), String> { if expected.is_placeholder() { return Ok(()); } if let Some(expected) = <dyn Any>::downcast_ref::<Self>(expected) { self.content_matches(expected) } else { Err(format!( "Couldn't down downcast expected ({:?}) to self ({:?})", expected.type_id(), self.type_id() )) } } } pub trait MatchErrExt { fn prefix(self, name: &str) -> Self; } impl MatchErrExt for Result<(), String> { fn prefix(self, name: &str) -> Self { self.map_err(|s| format!("{}: {}", name, s)) } } pub fn eq_matches<T: PartialEq + Debug>( name: &str, actual: &T, expected: &T, ) -> Result<(), String> { if actual != expected { return Err(format!( "expected {} {:?}, got {:?}", name, expected, actual )); } Ok(()) } impl Matchable for Bson { fn is_placeholder(&self) -> bool { if let Bson::String(string) = self { string.as_str() == "42" || string.as_str() == "" } else { get_int(self) == Some(42) } } fn content_matches(&self, expected: &Bson) -> Result<(), String> { match (self, expected) { (Bson::Document(actual_doc), Bson::Document(expected_doc)) => { actual_doc.matches(expected_doc) } (Bson::Array(actual_array), Bson::Array(expected_array)) => { if actual_array.len() < expected_array.len() { return Err(format!( "expected {} array elements, got {}", expected_array.len(), actual_array.len() )); } for (actual, expected) in actual_array.iter().zip(expected_array.iter()) { actual.matches(expected)?; } Ok(()) } _ => { match (bson_util::get_int(self), get_int(expected)) { (Some(actual_int), Some(expected_int)) => { eq_matches("int", &actual_int, &expected_int)? } (None, Some(expected_int)) => { return Err(format!("expected int {}, got none", expected_int)) } _ => eq_matches("bson", self, expected)?, } Ok(()) } } } } impl Matchable for Document { fn content_matches(&self, expected: &Document) -> Result<(), String> { for (k, v) in expected.iter() { if k == "upsertedCount" { continue; } if k == "afterClusterTime" { continue; } if k == "recoveryToken" && v.is_placeholder() && self.get_document(k).is_ok() { continue; } if k == "readConcern" { if let Some(doc) = v.as_document() { if doc.len() == 1 && doc.get_i32("afterClusterTime") == Ok(42) { continue; } } } match self.get(k) { Some(actual_v) => actual_v.matches(v).prefix(k)?, None => { if v != &Bson::Null { return Err(format!("{:?}: expected value {:?}, got null", k, v)); } } } } Ok(()) } } impl Matchable for Credential { fn content_matches(&self, expected: &Credential) -> Result<(), String> { self.username .content_matches(&expected.username) .prefix("username")?; self.source .content_matches(&expected.source) .prefix("source")?; self.password .content_matches(&expected.password) .prefix("password")?; self.mechanism .content_matches(&expected.mechanism) .prefix("mechanism")?; self.mechanism_properties .content_matches(&expected.mechanism_properties) .prefix("mechanism_properties")?; Ok(()) } } impl Matchable for AuthMechanism { fn content_matches(&self, expected: &AuthMechanism) -> Result<(), String> { eq_matches("AuthMechanism", self, expected) } } impl Matchable for bool { fn content_matches(&self, expected: &bool) -> Result<(), String> { eq_matches("bool", self, expected) } } impl Matchable for u32 { fn is_placeholder(&self) -> bool { self == &42 } fn content_matches(&self, expected: &u32) -> Result<(), String> { eq_matches("u32", self, expected) } } impl Matchable for String { fn is_placeholder(&self) -> bool { self.as_str() == "42" } fn content_matches(&self, expected: &String) -> Result<(), String> { eq_matches("String", self, expected) } } impl Matchable for Duration { fn content_matches(&self, expected: &Duration) -> Result<(), String> { eq_matches("Duration", self, expected) } } impl<T: Matchable> Matchable for Option<T> { fn is_placeholder(&self) -> bool { match self { Some(ref v) => v.is_placeholder(), None => true, } } fn content_matches(&self, expected: &Option<T>) -> Result<(), String> { if let Some(expected_value) = expected { return match self { Some(actual_value) => actual_value.content_matches(expected_value), None => Err("expected Some(_), got None".to_string()), }; } Ok(()) } } pub fn assert_matches<A: Matchable + Debug, E: Matchable + Debug>( actual: &A, expected: &E, description: Option<&str>, ) { let result = actual.matches(expected); assert!( result.is_ok(), "{}\n{:#?}\n did not MATCH \n{:#?}\n MATCH failure: {}", description.unwrap_or(""), actual, expected, result.unwrap_err(), ); } fn parse_i64_ext_json(doc: &Document) -> Option<i64> { let number_string = doc.get("$numberLong").and_then(Bson::as_str)?; number_string.parse::<i64>().ok() } fn get_int(value: &Bson) -> Option<i64> { bson_util::get_int(value).or_else(|| value.as_document().and_then(parse_i64_ext_json)) }
use std::{any::Any, fmt::Debug, time::Duration}; use crate::{ bson::{Bson, Document}, bson_util, options::{AuthMechanism, Credential}, }; pub trait Matchable: Sized + 'static { fn is_placeholder(&self) -> bool { false } fn content_matches(&self, expected: &Self) -> Result<(), String>; fn matches<T: Matchable + Any>(&self, expected: &T) -> Result<(), String> { if expected.is_placeholder() { return Ok(()); } if let Some(expected) = <dyn Any>::downcast_ref::<Self>(expected) { self.content_matches(expected) } else { Err(format!( "Couldn't down downcast expected ({:?}) to self ({:?})", expected.type_id(), self.type_id() )) } } } pub trait MatchErrExt { fn prefix(self, name: &str) -> Self; } impl MatchErrExt for Result<(), String> { fn prefix(self, name: &str) -> Self { self.map_err(|s| format!("{}: {}", name, s)) } } pub fn eq_matches<T: PartialEq + Debug>( name: &str, actual: &T, expected: &T, ) -> Result<(), String> { if actual != expected { return Err(format!( "expected {} {:?}, got {:?}", name, expected, actual )); } Ok(()) } impl Matchable for Bson { fn is_placeholder(&self) -> bool { if let Bson::String(string) = self { string.as_str() == "42" || string.as_str() == "" } else { get_int(self) == Some(42) } } fn content_matches(&self, expected: &Bson) -> Result<(), String> { match (self, expected) { (Bson::Document(actual_doc), Bson::Document(expected_doc)) => { actual_doc.matches(expected_doc) } (Bson::Array(actual_array), Bson::Array(expected_array)) => { if actual_array.len() < expected_array.len() { return Err(format!( "expected {} array elements, got {}", expected_array.len(), actual_array.len() )); } for (actual, expected) in actual_array.iter().zip(expected_array.iter()) { actual.matches(expected)?; } Ok(()) } _ => { match (bson_util::get_int(self), get_int(expected)) { (Some(actual_int), Some(expected_int)) => { eq_matches("int", &actual_int, &expected_int)? } (None, Some(expected_int)) => { return Err(format!("expected int {}, got none", expected_int)) } _ => eq_matches("bson", self, expected)?, } Ok(()) } } } } impl Matchable for Document { fn content_matches(&self, expected: &Document) -> Result<(), String> { for (k, v) in expected.iter() { if k == "upsertedCount" { continue; } if k == "afterClusterTime" { continue; } if k == "recoveryToken" && v.is_placeholder() && self.get_document(k).is_ok() { continue; } if k == "readConcern" { if let Some(doc) = v.as_document() { if doc.len() == 1 && doc.get_i32("afterClusterTime") == Ok(42) { continue; } } } match self.get(k) { Some(actual_v) => actual_v.matches(v).prefix(
.prefix("username")?; self.source .content_matches(&expected.source) .prefix("source")?; self.password .content_matches(&expected.password) .prefix("password")?; self.mechanism .content_matches(&expected.mechanism) .prefix("mechanism")?; self.mechanism_properties .content_matches(&expected.mechanism_properties) .prefix("mechanism_properties")?; Ok(()) } } impl Matchable for AuthMechanism { fn content_matches(&self, expected: &AuthMechanism) -> Result<(), String> { eq_matches("AuthMechanism", self, expected) } } impl Matchable for bool { fn content_matches(&self, expected: &bool) -> Result<(), String> { eq_matches("bool", self, expected) } } impl Matchable for u32 { fn is_placeholder(&self) -> bool { self == &42 } fn content_matches(&self, expected: &u32) -> Result<(), String> { eq_matches("u32", self, expected) } } impl Matchable for String { fn is_placeholder(&self) -> bool { self.as_str() == "42" } fn content_matches(&self, expected: &String) -> Result<(), String> { eq_matches("String", self, expected) } } impl Matchable for Duration { fn content_matches(&self, expected: &Duration) -> Result<(), String> { eq_matches("Duration", self, expected) } } impl<T: Matchable> Matchable for Option<T> { fn is_placeholder(&self) -> bool { match self { Some(ref v) => v.is_placeholder(), None => true, } } fn content_matches(&self, expected: &Option<T>) -> Result<(), String> { if let Some(expected_value) = expected { return match self { Some(actual_value) => actual_value.content_matches(expected_value), None => Err("expected Some(_), got None".to_string()), }; } Ok(()) } } pub fn assert_matches<A: Matchable + Debug, E: Matchable + Debug>( actual: &A, expected: &E, description: Option<&str>, ) { let result = actual.matches(expected); assert!( result.is_ok(), "{}\n{:#?}\n did not MATCH \n{:#?}\n MATCH failure: {}", description.unwrap_or(""), actual, expected, result.unwrap_err(), ); } fn parse_i64_ext_json(doc: &Document) -> Option<i64> { let number_string = doc.get("$numberLong").and_then(Bson::as_str)?; number_string.parse::<i64>().ok() } fn get_int(value: &Bson) -> Option<i64> { bson_util::get_int(value).or_else(|| value.as_document().and_then(parse_i64_ext_json)) }
k)?, None => { if v != &Bson::Null { return Err(format!("{:?}: expected value {:?}, got null", k, v)); } } } } Ok(()) } } impl Matchable for Credential { fn content_matches(&self, expected: &Credential) -> Result<(), String> { self.username .content_matches(&expected.username)
random
[ { "content": "fn numbers_match(actual: &Bson, expected: &Bson) -> bool {\n\n if actual.element_type() == expected.element_type() {\n\n return actual == expected;\n\n }\n\n\n\n match (get_int(actual), get_int(expected)) {\n\n (Some(actual), Some(expected)) => actual == expected,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "src/test/spec/unified_runner/matcher.rs", "rank": 0, "score": 356398.5618442062 }, { "content": "fn entity_matches(id: &str, actual: Option<&Bson>, entities: &EntityMap) -> bool {\n\n let bson = entities.get(id).unwrap().as_bson();\n\n results_match_inner(actual, bson, false, false, Some(entities))\n\n}\n\n\n", "file_path": "src/test/spec/unified_runner/matcher.rs", "rank": 2, "score": 318224.2988825569 }, { "content": "fn type_matches(types: &Bson, actual: &Bson) -> bool {\n\n match types {\n\n Bson::Array(types) => types.iter().any(|t| type_matches(t, actual)),\n\n Bson::String(str) => match str.as_ref() {\n\n \"double\" => actual.element_type() == ElementType::Double,\n\n \"string\" => actual.element_type() == ElementType::String,\n\n \"object\" => actual.element_type() == ElementType::EmbeddedDocument,\n\n \"array\" => actual.element_type() == ElementType::Array,\n\n \"binData\" => actual.element_type() == ElementType::Binary,\n\n \"undefined\" => actual.element_type() == ElementType::Undefined,\n\n \"objectId\" => actual.element_type() == ElementType::ObjectId,\n\n \"bool\" => actual.element_type() == ElementType::Boolean,\n\n \"date\" => actual.element_type() == ElementType::DateTime,\n\n \"null\" => actual.element_type() == ElementType::Null,\n\n \"regex\" => actual.element_type() == ElementType::RegularExpression,\n\n \"dbPointer\" => actual.element_type() == ElementType::DbPointer,\n\n \"javascript\" => actual.element_type() == ElementType::JavaScriptCode,\n\n \"symbol\" => actual.element_type() == ElementType::Symbol,\n\n \"javascriptWithScope\" => actual.element_type() == ElementType::JavaScriptCodeWithScope,\n\n \"int\" => actual.element_type() == ElementType::Int32,\n", "file_path": "src/test/spec/unified_runner/matcher.rs", "rank": 3, "score": 314701.4136685454 }, { "content": "/// Parses a string slice of the form \"<expected_key>=<body>\" into \"<body>\", if possible.\n\nfn parse_kvp(str: &str, expected_key: char) -> Result<String> {\n\n if !str.starts_with(expected_key) || str.chars().nth(1) != Some('=') {\n\n Err(Error::invalid_authentication_response(\"SCRAM\"))\n\n } else {\n\n Ok(str.chars().skip(2).collect())\n\n }\n\n}\n\n\n\n/// Model of the first message sent by the client.\n\n#[derive(Debug)]\n\npub(crate) struct ClientFirst {\n\n source: String,\n\n\n\n message: String,\n\n\n\n gs2_header: Range<usize>,\n\n\n\n bare: Range<usize>,\n\n\n\n nonce: String,\n", "file_path": "src/client/auth/scram.rs", "rank": 4, "score": 308910.0430940224 }, { "content": "pub fn get_default_name(description: &str) -> String {\n\n let mut db_name = description\n\n .replace('$', \"%\")\n\n .replace(' ', \"_\")\n\n .replace('.', \"_\");\n\n // database names must have fewer than 64 characters\n\n db_name.truncate(63);\n\n db_name\n\n}\n", "file_path": "src/test/util/mod.rs", "rank": 7, "score": 307554.5885490599 }, { "content": "fn lists_eq(actual: &Option<Vec<String>>, expected: &[String]) -> bool {\n\n if let Some(actual) = actual {\n\n actual.as_slice() == expected\n\n } else {\n\n expected.is_empty()\n\n }\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\", deny_unknown_fields)]\n\npub struct TestTopologyDescription {\n\n topology_type: String,\n\n set_name: Option<String>,\n\n servers: Vec<TestServerDescription>,\n\n}\n\n\n\nimpl PartialEq<TestTopologyDescription> for TopologyDescription {\n\n fn eq(&self, other: &TestTopologyDescription) -> bool {\n\n if self.topology_type.as_str() != other.topology_type.as_str()\n\n || self.set_name != other.set_name\n", "file_path": "src/sdam/description/topology/test/event.rs", "rank": 8, "score": 296164.6007067143 }, { "content": "fn percent_decode(s: &str, err_message: &str) -> Result<String> {\n\n match percent_encoding::percent_decode_str(s).decode_utf8() {\n\n Ok(result) => Ok(result.to_string()),\n\n Err(_) => Err(ErrorKind::InvalidArgument {\n\n message: err_message.to_string(),\n\n }\n\n .into()),\n\n }\n\n}\n\n\n", "file_path": "src/client/options/mod.rs", "rank": 10, "score": 270634.2664038961 }, { "content": "fn return_document_to_bool(return_document: Option<ReturnDocument>) -> Option<bool> {\n\n if let Some(return_document) = return_document {\n\n return match return_document {\n\n ReturnDocument::After => Some(true),\n\n ReturnDocument::Before => Some(false),\n\n };\n\n }\n\n None\n\n}\n", "file_path": "src/operation/find_and_modify/options.rs", "rank": 11, "score": 253159.04018224837 }, { "content": "/// Validates that a `saslStart` or `saslContinue` command response is successful.\n\nfn validate_command_success(auth_mechanism: &str, response: &Document) -> Result<()> {\n\n let ok = match response.get(\"ok\") {\n\n Some(ok) => ok,\n\n None => return Ok(()),\n\n };\n\n\n\n match bson_util::get_int(ok) {\n\n Some(1) => Ok(()),\n\n Some(_) => Err(Error::authentication_error(\n\n auth_mechanism,\n\n response\n\n .get_str(\"errmsg\")\n\n .unwrap_or(\"Authentication failure\"),\n\n )),\n\n _ => Err(Error::invalid_authentication_response(auth_mechanism)),\n\n }\n\n}\n\n\n\n/// Encapsulates the parsing of the response to a `saslStart` or `saslContinue` command.\n\npub(super) struct SaslResponse {\n", "file_path": "src/client/auth/sasl.rs", "rank": 12, "score": 250364.19618510967 }, { "content": "fn init_db_and_coll(client: &Client, db_name: &str, coll_name: &str) -> Collection<Document> {\n\n let coll = client.database(db_name).collection(coll_name);\n\n coll.drop(None).unwrap();\n\n coll\n\n}\n\n\n", "file_path": "src/sync/test.rs", "rank": 13, "score": 243787.65889477904 }, { "content": "pub fn results_match(\n\n actual: Option<&Bson>,\n\n expected: &Bson,\n\n returns_root_documents: bool,\n\n entities: Option<&EntityMap>,\n\n) -> bool {\n\n results_match_inner(actual, expected, returns_root_documents, true, entities)\n\n}\n\n\n", "file_path": "src/test/spec/unified_runner/matcher.rs", "rank": 14, "score": 238362.01351018308 }, { "content": "pub trait TestOperation: Debug {\n\n fn execute_test_runner_operation<'a>(\n\n &'a self,\n\n _test_runner: &'a mut TestRunner,\n\n ) -> BoxFuture<'a, ()> {\n\n todo!()\n\n }\n\n\n\n fn execute_entity_operation<'a>(\n\n &'a self,\n\n _id: &'a str,\n\n _test_runner: &'a mut TestRunner,\n\n ) -> BoxFuture<'a, Result<Option<Entity>>> {\n\n todo!()\n\n }\n\n\n\n /// Whether or not this operation returns an array of root documents. This information is\n\n /// necessary to determine how the return value of an operation should be compared to the\n\n /// expected value.\n\n fn returns_root_documents(&self) -> bool {\n", "file_path": "src/test/spec/unified_runner/operation.rs", "rank": 16, "score": 214683.8956574282 }, { "content": "pub trait TestOperation: Debug {\n\n fn execute_on_collection<'a>(\n\n &'a self,\n\n _collection: &'a Collection<Document>,\n\n _session: Option<&'a mut ClientSession>,\n\n ) -> BoxFuture<'a, Result<Option<Bson>>> {\n\n todo!()\n\n }\n\n\n\n fn execute_on_database<'a>(\n\n &'a self,\n\n _database: &'a Database,\n\n _session: Option<&'a mut ClientSession>,\n\n ) -> BoxFuture<'a, Result<Option<Bson>>> {\n\n todo!()\n\n }\n\n\n\n fn execute_on_client<'a>(\n\n &'a self,\n\n _client: &'a TestClient,\n", "file_path": "src/test/spec/v2_runner/operation.rs", "rank": 17, "score": 214683.8956574282 }, { "content": "fn validate_userinfo(s: &str, userinfo_type: &str) -> Result<()> {\n\n if s.chars().any(|c| USERINFO_RESERVED_CHARACTERS.contains(&c)) {\n\n return Err(ErrorKind::InvalidArgument {\n\n message: format!(\"{} must be URL encoded\", userinfo_type),\n\n }\n\n .into());\n\n }\n\n\n\n // All instances of '%' in the username must be part of an percent-encoded substring. This means\n\n // that there must be two hexidecimal digits following any '%' in the username.\n\n if s.split('%')\n\n .skip(1)\n\n .any(|part| part.len() < 2 || part[0..2].chars().any(|c| !c.is_ascii_hexdigit()))\n\n {\n\n return Err(ErrorKind::InvalidArgument {\n\n message: \"username/password cannot contain unescaped %\".to_string(),\n\n }\n\n .into());\n\n }\n\n\n", "file_path": "src/client/options/mod.rs", "rank": 18, "score": 214648.97299876367 }, { "content": "fn write_concern_to_document(write_concern: &WriteConcern) -> Result<Document> {\n\n match bson::to_bson(&write_concern)? {\n\n Bson::Document(doc) => Ok(doc),\n\n _ => unreachable!(),\n\n }\n\n}\n", "file_path": "src/test/spec/read_write_concern/mod.rs", "rank": 19, "score": 197640.28036775635 }, { "content": "#[async_trait::async_trait]\n\npub trait Benchmark: Sized {\n\n type Options;\n\n\n\n /// execute once before benchmarking\n\n async fn setup(options: Self::Options) -> Result<Self>;\n\n\n\n /// execute at the beginning of every iteration\n\n async fn before_task(&mut self) -> Result<()> {\n\n Ok(())\n\n }\n\n\n\n async fn do_task(&self) -> Result<()>;\n\n\n\n /// execute at the end of every iteration\n\n async fn after_task(&self) -> Result<()> {\n\n Ok(())\n\n }\n\n\n\n /// execute once after benchmarking\n\n async fn teardown(&self) -> Result<()> {\n", "file_path": "benchmarks/src/bench/mod.rs", "rank": 20, "score": 197101.38501041738 }, { "content": "pub fn events_match(\n\n actual: &TestEvent,\n\n expected: &TestEvent,\n\n entities: Option<&EntityMap>,\n\n) -> bool {\n\n match (actual, expected) {\n\n (\n\n TestEvent::Started {\n\n command_name: actual_command_name,\n\n database_name: actual_database_name,\n\n command: actual_command,\n\n },\n\n TestEvent::Started {\n\n command_name: expected_command_name,\n\n database_name: expected_database_name,\n\n command: expected_command,\n\n },\n\n ) => {\n\n if expected_command_name.is_some() && actual_command_name != expected_command_name {\n\n return false;\n", "file_path": "src/test/spec/unified_runner/matcher.rs", "rank": 21, "score": 192914.75825798826 }, { "content": "fn verify_max_staleness_inner(max_staleness: Option<Duration>) -> std::result::Result<(), String> {\n\n if max_staleness\n\n .map(|staleness| staleness > Duration::from_secs(0) && staleness < Duration::from_secs(90))\n\n .unwrap_or(false)\n\n {\n\n return Err(\"max staleness cannot be both positive and below 90 seconds\".into());\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "src/sdam/description/topology/mod.rs", "rank": 22, "score": 192541.96402708424 }, { "content": "fn parse_ids(matches: ArgMatches) -> Vec<bool> {\n\n let id_list: Vec<usize> = match matches.value_of(\"ids\") {\n\n Some(\"all\") => vec![1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15],\n\n Some(id_list) => id_list\n\n .split(',')\n\n .map(|str| {\n\n str.parse::<usize>()\n\n .expect(\"invalid test IDs provided, see README\")\n\n })\n\n .collect(),\n\n None => vec![],\n\n };\n\n\n\n let mut ids = vec![false; 15];\n\n for id in id_list {\n\n if id < 1 || id > 15 {\n\n panic!(\"invalid test IDs provided, see README\");\n\n }\n\n ids[id - 1] = true;\n\n }\n", "file_path": "benchmarks/src/main.rs", "rank": 23, "score": 189895.53877577645 }, { "content": "fn command_write_concerns(client: &EventClient, key: &str) -> Vec<Document> {\n\n client\n\n .get_command_started_events(&[key])\n\n .into_iter()\n\n .map(|d| d.command.get_document(\"writeConcern\").unwrap().clone())\n\n .collect()\n\n}\n", "file_path": "src/concern/test.rs", "rank": 24, "score": 183258.38110065938 }, { "content": "fn serialize_true<S: Serializer>(s: S) -> std::result::Result<S::Ok, S::Error> {\n\n s.serialize_bool(true)\n\n}\n\n\n\n#[serde_with::skip_serializing_none]\n\n#[derive(Debug, TypedBuilder, Serialize)]\n\n#[builder(field_defaults(setter(into)))]\n\n#[serde(rename_all = \"camelCase\")]\n\npub(super) struct FindAndModifyOptions {\n\n #[serde(flatten)]\n\n pub(crate) modification: Modification,\n\n\n\n #[builder(default)]\n\n pub(crate) sort: Option<Document>,\n\n\n\n #[builder(default)]\n\n pub(crate) new: Option<bool>,\n\n\n\n #[builder(default)]\n\n pub(crate) upsert: Option<bool>,\n", "file_path": "src/operation/find_and_modify/options.rs", "rank": 25, "score": 181357.06562498029 }, { "content": "fn init_db_and_typed_coll<T>(client: &Client, db_name: &str, coll_name: &str) -> Collection<T>\n\nwhere\n\n T: Serialize + DeserializeOwned + Unpin + Debug,\n\n{\n\n let coll = client.database(db_name).collection(coll_name);\n\n coll.drop(None).unwrap();\n\n coll\n\n}\n\n\n", "file_path": "src/sync/test.rs", "rank": 26, "score": 180769.02426546533 }, { "content": "fn normalize_write_concern_doc(mut write_concern_doc: Document) -> Document {\n\n if let Some(w_timeout) = write_concern_doc.remove(\"wtimeout\") {\n\n write_concern_doc.insert(\"wtimeoutMS\", w_timeout);\n\n }\n\n\n\n if let Some(j) = write_concern_doc.remove(\"j\") {\n\n write_concern_doc.insert(\"journal\", j);\n\n }\n\n\n\n write_concern_doc\n\n}\n\n\n\nasync fn run_connection_string_test(test_file: TestFile) {\n\n for test_case in test_file.tests {\n\n match ClientOptions::parse(&test_case.uri).await {\n\n Ok(options) => {\n\n assert!(test_case.valid);\n\n\n\n if let Some(ref expected_read_concern) = test_case.read_concern {\n\n let mut actual_read_concern = Document::new();\n", "file_path": "src/test/spec/read_write_concern/connection_string.rs", "rank": 27, "score": 179593.10794560454 }, { "content": "fn verify_max_staleness(max_staleness: Option<Duration>) -> crate::error::Result<()> {\n\n verify_max_staleness_inner(max_staleness)\n\n .map_err(|s| crate::error::ErrorKind::InvalidArgument { message: s }.into())\n\n}\n\n\n", "file_path": "src/sdam/description/topology/mod.rs", "rank": 28, "score": 178520.61478071482 }, { "content": "pub fn deserialize_server_api_test_format<'de, D>(\n\n deserializer: D,\n\n) -> std::result::Result<Option<ServerApi>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n #[derive(Debug, Deserialize)]\n\n #[serde(rename_all = \"camelCase\", deny_unknown_fields)]\n\n struct ApiHelper {\n\n version: ServerApiVersion,\n\n strict: Option<bool>,\n\n deprecation_errors: Option<bool>,\n\n }\n\n\n\n let h = ApiHelper::deserialize(deserializer)?;\n\n Ok(Some(ServerApi {\n\n version: h.version,\n\n strict: h.strict,\n\n deprecation_errors: h.deprecation_errors,\n\n }))\n\n}\n\n\n", "file_path": "src/test/spec/unified_runner/test_file.rs", "rank": 30, "score": 167446.6656766028 }, { "content": "pub fn deserialize_uri_options_to_uri_string<'de, D>(\n\n deserializer: D,\n\n) -> std::result::Result<String, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let uri_options = Document::deserialize(deserializer)?;\n\n\n\n let mut default_uri_parts = DEFAULT_URI.split('?');\n\n\n\n let mut uri = String::from(default_uri_parts.next().unwrap());\n\n // A connection string has two slashes before the host list and one slash before the auth db\n\n // name. If an auth db name is not provided the latter slash might not be present, so it needs\n\n // to be added manually.\n\n if uri.chars().filter(|c| *c == '/').count() < 3 {\n\n uri.push('/');\n\n }\n\n uri.push('?');\n\n\n\n if let Some(options) = default_uri_parts.next() {\n", "file_path": "src/test/spec/unified_runner/test_file.rs", "rank": 31, "score": 167419.96820308175 }, { "content": "fn results_match_inner(\n\n actual: Option<&Bson>,\n\n expected: &Bson,\n\n returns_root_documents: bool,\n\n root: bool,\n\n entities: Option<&EntityMap>,\n\n) -> bool {\n\n match expected {\n\n Bson::Document(expected_doc) => {\n\n if let Some((key, value)) = expected_doc.iter().next() {\n\n if key.starts_with(\"$$\") && expected_doc.len() == 1 {\n\n return special_operator_matches((key, value), actual, entities);\n\n }\n\n }\n\n\n\n let actual_doc = match actual {\n\n Some(Bson::Document(actual)) => actual,\n\n // The only case in which None is an acceptable value is if the expected document\n\n // is a special operator; otherwise, the two documents do not match.\n\n _ => return false,\n", "file_path": "src/test/spec/unified_runner/matcher.rs", "rank": 32, "score": 166220.89062157754 }, { "content": "/// Returns a vector of documents that cannot be sent in one batch (35000 documents).\n\n/// Includes duplicate _id's across different batches.\n\nfn multibatch_documents_with_duplicate_keys() -> Vec<Document> {\n\n let large_doc = LARGE_DOC.clone();\n\n\n\n let mut docs: Vec<Document> = Vec::new();\n\n docs.extend(vec![large_doc.clone(); 7498]);\n\n\n\n docs.push(doc! { \"_id\": 1 });\n\n docs.push(doc! { \"_id\": 1 }); // error in first batch, index 7499\n\n\n\n docs.extend(vec![large_doc.clone(); 14999]);\n\n docs.push(doc! { \"_id\": 1 }); // error in second batch, index 22499\n\n\n\n docs.extend(vec![large_doc.clone(); 9999]);\n\n docs.push(doc! { \"_id\": 1 }); // error in third batch, index 32499\n\n\n\n docs.extend(vec![large_doc; 2500]);\n\n\n\n assert_eq!(docs.len(), 35000);\n\n docs\n\n}\n", "file_path": "src/test/coll.rs", "rank": 33, "score": 154687.36545867508 }, { "content": "/// Splits a string into a section before a given index and a section exclusively after the index.\n\n/// Empty portions are returned as `None`.\n\nfn exclusive_split_at(s: &str, i: usize) -> (Option<&str>, Option<&str>) {\n\n let (l, r) = s.split_at(i);\n\n\n\n let lout = if !l.is_empty() { Some(l) } else { None };\n\n let rout = if r.len() > 1 { Some(&r[1..]) } else { None };\n\n\n\n (lout, rout)\n\n}\n\n\n", "file_path": "src/client/options/mod.rs", "rank": 34, "score": 152287.8323656766 }, { "content": "fn get_default_uri() -> String {\n\n if let Ok(uri) = std::env::var(\"MONGODB_URI\") {\n\n return uri;\n\n }\n\n if let Some(mut home) = home_dir() {\n\n home.push(\".mongodb_uri\");\n\n if let Ok(uri) = read_to_string(home) {\n\n return uri;\n\n }\n\n }\n\n \"mongodb://localhost:27017\".to_string()\n\n}\n", "file_path": "src/test/mod.rs", "rank": 35, "score": 151977.33617549977 }, { "content": "fn default_uri() -> String {\n\n DEFAULT_URI.clone()\n\n}\n\n\n", "file_path": "src/test/spec/unified_runner/test_file.rs", "rank": 36, "score": 145590.50128606753 }, { "content": "fn payload_bytes(username: &str, password: &str) -> Vec<u8> {\n\n let mut bytes = vec![0];\n\n bytes.extend(username.as_bytes());\n\n\n\n bytes.push(0);\n\n bytes.extend(password.as_bytes());\n\n\n\n bytes\n\n}\n", "file_path": "src/client/auth/plain.rs", "rank": 37, "score": 145345.3339256724 }, { "content": "/// Applications can implement this trait to specify custom logic to run on each command event sent\n\n/// by the driver.\n\n///\n\n/// ```rust\n\n/// # use std::sync::Arc;\n\n/// #\n\n/// # use mongodb::{\n\n/// # error::Result,\n\n/// # event::command::{\n\n/// # CommandEventHandler,\n\n/// # CommandFailedEvent\n\n/// # },\n\n/// # options::ClientOptions,\n\n/// # };\n\n/// # #[cfg(feature = \"sync\")]\n\n/// # use mongodb::sync::Client;\n\n/// # #[cfg(not(feature = \"sync\"))]\n\n/// # use mongodb::Client;\n\n/// #\n\n/// struct FailedCommandLogger;\n\n///\n\n/// impl CommandEventHandler for FailedCommandLogger {\n\n/// fn handle_command_failed_event(&self, event: CommandFailedEvent) {\n\n/// eprintln!(\"Failed command: {:?}\", event);\n\n/// }\n\n/// }\n\n///\n\n/// # fn do_stuff() -> Result<()> {\n\n/// let handler: Arc<dyn CommandEventHandler> = Arc::new(FailedCommandLogger);\n\n/// let options = ClientOptions::builder()\n\n/// .command_event_handler(handler)\n\n/// .build();\n\n/// let client = Client::with_options(options)?;\n\n///\n\n/// // Do things with the client, and failed command events will be logged to stderr.\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub trait CommandEventHandler: Send + Sync {\n\n /// A [`Client`](../../struct.Client.html) will call this method on each registered handler\n\n /// whenever a database command is initiated.\n\n fn handle_command_started_event(&self, _event: CommandStartedEvent) {}\n\n\n\n /// A [`Client`](../../struct.Client.html) will call this method on each registered handler\n\n /// whenever a database command successfully completes.\n\n fn handle_command_succeeded_event(&self, _event: CommandSucceededEvent) {}\n\n\n\n /// A [`Client`](../../struct.Client.html) will call this method on each registered handler\n\n /// whenever a database command fails to complete successfully.\n\n fn handle_command_failed_event(&self, _event: CommandFailedEvent) {}\n\n}\n", "file_path": "src/event/command.rs", "rank": 38, "score": 140758.05876523396 }, { "content": "/// Applications can implement this trait to specify custom logic to run on each CMAP event sent\n\n/// by the driver.\n\n///\n\n/// ```rust\n\n/// # use std::sync::Arc;\n\n/// #\n\n/// # use mongodb::{\n\n/// # error::Result,\n\n/// # event::cmap::{\n\n/// # CmapEventHandler,\n\n/// # ConnectionCheckoutFailedEvent\n\n/// # },\n\n/// # options::ClientOptions,\n\n/// # };\n\n/// # #[cfg(feature = \"sync\")]\n\n/// # use mongodb::sync::Client;\n\n/// # #[cfg(not(feature = \"sync\"))]\n\n/// # use mongodb::Client;\n\n/// #\n\n/// struct FailedCheckoutLogger;\n\n///\n\n/// impl CmapEventHandler for FailedCheckoutLogger {\n\n/// fn handle_connection_checkout_failed_event(&self, event: ConnectionCheckoutFailedEvent) {\n\n/// eprintln!(\"Failed connection checkout: {:?}\", event);\n\n/// }\n\n/// }\n\n///\n\n/// # fn do_stuff() -> Result<()> {\n\n/// let handler: Arc<dyn CmapEventHandler> = Arc::new(FailedCheckoutLogger);\n\n/// let options = ClientOptions::builder()\n\n/// .cmap_event_handler(handler)\n\n/// .build();\n\n/// let client = Client::with_options(options)?;\n\n///\n\n/// // Do things with the client, and failed connection pool checkouts will be logged to stderr.\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub trait CmapEventHandler: Send + Sync {\n\n /// A [`Client`](../../struct.Client.html) will call this method on each registered handler\n\n /// whenever a connection pool is created.\n\n fn handle_pool_created_event(&self, _event: PoolCreatedEvent) {}\n\n\n\n /// A [`Client`](../../struct.Client.html) will call this method on each registered handler\n\n /// whenever a connection pool marked as ready for use.\n\n ///\n\n /// Connections may not be created by or checked out from the pool until it has been marked as\n\n /// ready.\n\n fn handle_pool_ready_event(&self, _event: PoolReadyEvent) {}\n\n\n\n /// A [`Client`](../../struct.Client.html) will call this method on each registered handler\n\n /// whenever a connection pool is cleared.\n\n fn handle_pool_cleared_event(&self, _event: PoolClearedEvent) {}\n\n\n\n /// A [`Client`](../../struct.Client.html) will call this method on each registered handler\n\n /// whenever a connection pool is cleared.\n\n fn handle_pool_closed_event(&self, _event: PoolClosedEvent) {}\n\n\n", "file_path": "src/event/cmap.rs", "rank": 39, "score": 140757.49623230443 }, { "content": "fn write_concern_from_document(write_concern_doc: Document) -> Option<WriteConcern> {\n\n let mut write_concern = WriteConcern::default();\n\n\n\n for (key, value) in write_concern_doc {\n\n match (&key[..], value) {\n\n (\"w\", Bson::Int32(i)) => {\n\n write_concern.w = Some(Acknowledgment::from(i as u32));\n\n }\n\n (\"w\", Bson::String(s)) => {\n\n write_concern.w = Some(Acknowledgment::from(s));\n\n }\n\n (\"journal\", Bson::Boolean(b)) => {\n\n write_concern.journal = Some(b);\n\n }\n\n (\"wtimeoutMS\", Bson::Int32(i)) if i > 0 => {\n\n write_concern.w_timeout = Some(Duration::from_millis(i as u64));\n\n }\n\n (\"wtimeoutMS\", Bson::Int32(_)) => {\n\n // WriteConcern has an unsigned integer for the wtimeout field, so this is\n\n // impossible to test.\n", "file_path": "src/test/spec/read_write_concern/document.rs", "rank": 40, "score": 140716.1910332501 }, { "content": "fn is_auth_error(error: Error) -> bool {\n\n matches!(*error.kind, ErrorKind::Authentication { .. })\n\n}\n\n\n\n/// Performs an operation that requires authentication and verifies that it either succeeded or\n\n/// failed with an authentication error according to the `should_succeed` parameter.\n\nasync fn auth_test(client: Client, should_succeed: bool) {\n\n let result = client.list_database_names(None, None).await;\n\n if should_succeed {\n\n result.expect(\"operation should have succeeded\");\n\n } else {\n\n assert!(is_auth_error(result.unwrap_err()));\n\n }\n\n}\n\n\n\n/// Attempts to authenticate using the given username/password, optionally specifying a mechanism\n\n/// via the `ClientOptions` api.\n\n///\n\n/// Asserts that the authentication's success matches the provided parameter.\n\nasync fn auth_test_options(\n", "file_path": "src/test/client.rs", "rank": 41, "score": 140654.81786312017 }, { "content": "fn server_type_from_str(s: &str) -> Option<ServerType> {\n\n let t = match s {\n\n \"Standalone\" => ServerType::Standalone,\n\n \"Mongos\" => ServerType::Mongos,\n\n \"RSPrimary\" => ServerType::RsPrimary,\n\n \"RSSecondary\" => ServerType::RsSecondary,\n\n \"RSArbiter\" => ServerType::RsArbiter,\n\n \"RSOther\" => ServerType::RsOther,\n\n \"RSGhost\" => ServerType::RsGhost,\n\n \"Unknown\" | \"PossiblePrimary\" => ServerType::Unknown,\n\n _ => return None,\n\n };\n\n\n\n Some(t)\n\n}\n\n\n\nasync fn run_test(test_file: TestFile) {\n\n let test_description = &test_file.description;\n\n\n\n // TODO: RUST-360 unskip tests that rely on topology version\n", "file_path": "src/sdam/description/topology/test/sdam.rs", "rank": 42, "score": 140220.18451078568 }, { "content": "/// Applications can implement this trait to specify custom logic to run on each SDAM event sent\n\n/// by the driver.\n\n///\n\n/// ```rust\n\n/// # use std::sync::Arc;\n\n/// #\n\n/// # use mongodb::{\n\n/// # error::Result,\n\n/// # event::sdam::{\n\n/// # SdamEventHandler,\n\n/// # ServerHeartbeatFailedEvent,\n\n/// # },\n\n/// # options::ClientOptions,\n\n/// # };\n\n/// # #[cfg(feature = \"sync\")]\n\n/// # use mongodb::sync::Client;\n\n/// # #[cfg(not(feature = \"sync\"))]\n\n/// # use mongodb::Client;\n\n/// #\n\n/// struct FailedHeartbeatLogger;\n\n///\n\n/// impl SdamEventHandler for FailedHeartbeatLogger {\n\n/// fn handle_server_heartbeat_failed_event(&self, event: ServerHeartbeatFailedEvent) {\n\n/// eprintln!(\"Failed server heartbeat: {:?}\", event);\n\n/// }\n\n/// }\n\n///\n\n/// # fn do_stuff() -> Result<()> {\n\n/// let handler: Arc<dyn SdamEventHandler> = Arc::new(FailedHeartbeatLogger);\n\n/// let options = ClientOptions::builder()\n\n/// .sdam_event_handler(handler)\n\n/// .build();\n\n/// let client = Client::with_options(options)?;\n\n///\n\n/// // Do things with the client, and failed server heartbeats will be logged to stderr.\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\npub trait SdamEventHandler: Send + Sync {\n\n /// A [`Client`](../../struct.Client.html) will call this method on each registered handler when\n\n /// a server description changes.\n\n fn handle_server_description_changed_event(&self, _event: ServerDescriptionChangedEvent) {}\n\n\n\n /// A [`Client`](../../struct.Client.html) will call this method on each registered handler when\n\n /// a server is initialized.\n\n fn handle_server_opening_event(&self, _event: ServerOpeningEvent) {}\n\n\n\n /// A [`Client`](../../struct.Client.html) will call this method on each registered handler when\n\n /// a server is closed.\n\n fn handle_server_closed_event(&self, _event: ServerClosedEvent) {}\n\n\n\n /// A [`Client`](../../struct.Client.html) will call this method on each registered handler when\n\n /// its topology description changes.\n\n fn handle_topology_description_changed_event(&self, _event: TopologyDescriptionChangedEvent) {}\n\n\n\n /// A [`Client`](../../struct.Client.html) will call this method on each registered handler when\n\n /// its topology is initialized.\n\n fn handle_topology_opening_event(&self, _event: TopologyOpeningEvent) {}\n", "file_path": "src/event/sdam/mod.rs", "rank": 43, "score": 138722.13390544933 }, { "content": "fn kill_cursors_sent(client: &EventClient) -> bool {\n\n !client\n\n .get_command_started_events(&[\"killCursors\"])\n\n .is_empty()\n\n}\n\n\n\n#[cfg_attr(feature = \"tokio-runtime\", tokio::test)]\n\n#[cfg_attr(feature = \"async-std-runtime\", async_std::test)]\n\n#[function_name::named]\n\nasync fn kill_cursors_on_drop() {\n\n let _guard: RwLockReadGuard<()> = LOCK.run_concurrently().await;\n\n\n\n let client = TestClient::new().await;\n\n let db = client.database(function_name!());\n\n let coll = db.collection(function_name!());\n\n\n\n drop_collection(&coll).await;\n\n\n\n coll.insert_many(vec![doc! { \"x\": 1 }, doc! { \"x\": 2 }], None)\n\n .await\n", "file_path": "src/test/coll.rs", "rank": 44, "score": 136496.05239041144 }, { "content": "fn finished(duration: Duration, iter: usize) -> bool {\n\n let elapsed = duration.as_secs();\n\n elapsed >= *MAX_EXECUTION_TIME\n\n || (iter >= *TARGET_ITERATION_COUNT && elapsed > *MIN_EXECUTION_TIME)\n\n}\n\n\n\npub async fn run_benchmark<B: Benchmark + Send + Sync>(\n\n options: B::Options,\n\n) -> Result<Vec<Duration>> {\n\n let mut test = B::setup(options).await?;\n\n\n\n let mut test_durations = Vec::new();\n\n\n\n let progress_bar = ProgressBar::new(*TARGET_ITERATION_COUNT as u64);\n\n progress_bar.set_style(\n\n ProgressStyle::default_bar()\n\n .template(\n\n \"{spinner:.green} [{elapsed_precise}] [{bar:40.cyan/blue}] {pos:>2}/{len:2} \\\n\n ({eta})\",\n\n )\n", "file_path": "benchmarks/src/bench/mod.rs", "rank": 45, "score": 129931.9685562989 }, { "content": "fn deserialize_schema_version<'de, D>(deserializer: D) -> std::result::Result<Version, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let mut schema_version = String::deserialize(deserializer)?;\n\n // If the schema version does not contain a minor or patch version, append as necessary to\n\n // ensure the String parses correctly into a semver::Version.\n\n let count = schema_version.split('.').count();\n\n if count == 1 {\n\n schema_version.push_str(\".0.0\");\n\n } else if count == 2 {\n\n schema_version.push_str(\".0\");\n\n }\n\n Version::parse(&schema_version).map_err(|e| serde::de::Error::custom(format!(\"{}\", e)))\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\", deny_unknown_fields)]\n\npub struct RunOnRequirement {\n\n min_server_version: Option<String>,\n", "file_path": "src/test/spec/unified_runner/test_file.rs", "rank": 46, "score": 125170.16917956763 }, { "content": "fn special_operator_matches(\n\n (key, value): (&String, &Bson),\n\n actual: Option<&Bson>,\n\n entities: Option<&EntityMap>,\n\n) -> bool {\n\n match key.as_ref() {\n\n \"$$exists\" => value.as_bool().unwrap() == actual.is_some(),\n\n \"$$type\" => type_matches(value, actual.unwrap()),\n\n \"$$unsetOrMatches\" => {\n\n if actual.is_some() {\n\n results_match_inner(actual, value, false, false, entities)\n\n } else {\n\n true\n\n }\n\n }\n\n \"$$matchesEntity\" => {\n\n let id = value.as_str().unwrap();\n\n entity_matches(id, actual, entities.unwrap())\n\n }\n\n \"$$matchesHexBytes\" => panic!(\"GridFS not implemented\"),\n", "file_path": "src/test/spec/unified_runner/matcher.rs", "rank": 47, "score": 120773.63536938273 }, { "content": "fn spawn<T>(future: T) -> impl Future<Output = <T as Future>::Output>\n\nwhere\n\n T: Future + Send + 'static,\n\n T::Output: Send + 'static,\n\n{\n\n #[cfg(feature = \"tokio-runtime\")]\n\n {\n\n tokio::task::spawn(future).map(|result| result.unwrap())\n\n }\n\n\n\n #[cfg(feature = \"async-std-runtime\")]\n\n {\n\n async_std::task::spawn(future)\n\n }\n\n}\n\n\n\nmod bench;\n\nmod fs;\n\nmod models;\n\nmod score;\n", "file_path": "benchmarks/src/main.rs", "rank": 48, "score": 113944.23006092304 }, { "content": "fn deserialize_pool_created<'de, D>(deserializer: D) -> Result<PoolCreatedEvent, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let helper = PoolCreatedEventHelper::deserialize(deserializer)?;\n\n\n\n // The CMAP spec tests use \"42\" as a placeholder in the expected events to indicate that the\n\n // driver should assert that a value is present without any constraints on the value itself.\n\n // This idiom is used for the connection pool creation options even when no options are\n\n // specified, meaning that there isn't any useful assertion we can do based on this value.\n\n // Because of this, we deserialize the value `42` into `None` for the options, which prevents\n\n // deserialization failure due to an unexpected type. For other integer values, we raise an\n\n // error indicating that we expect `42` instead.\n\n let options = match helper.options {\n\n Some(PoolOptionsHelper::Options(opts)) => Some(opts),\n\n Some(PoolOptionsHelper::Number(42)) | None => None,\n\n Some(PoolOptionsHelper::Number(other)) => {\n\n return Err(serde::de::Error::invalid_value(\n\n Unexpected::Unsigned(other),\n\n &\"42\",\n", "file_path": "src/cmap/test/event.rs", "rank": 49, "score": 110780.70027616782 }, { "content": "/// The number of digits in `n` in base 10.\n\n/// Useful for calculating the size of an array entry in BSON.\n\nfn num_decimal_digits(mut n: usize) -> u64 {\n\n let mut digits = 0;\n\n\n\n loop {\n\n n /= 10;\n\n digits += 1;\n\n\n\n if n == 0 {\n\n return digits;\n\n }\n\n }\n\n}\n\n\n\n/// Read a document's raw BSON bytes from the provided reader.\n\npub(crate) fn read_document_bytes<R: Read>(mut reader: R) -> Result<Vec<u8>> {\n\n let length = reader.read_i32()?;\n\n\n\n let mut bytes = Vec::with_capacity(length as usize);\n\n bytes.write_i32(length)?;\n\n\n", "file_path": "src/bson_util/mod.rs", "rank": 50, "score": 109878.7424624164 }, { "content": "fn mac_verify<M: Mac + NewMac>(key: &[u8], input: &[u8], signature: &[u8]) -> Result<()> {\n\n let mut mac =\n\n M::new_from_slice(key).map_err(|_| Error::unknown_authentication_error(\"SCRAM\"))?;\n\n mac.update(input);\n\n match mac.verify(signature) {\n\n Ok(_) => Ok(()),\n\n Err(_) => Err(Error::authentication_error(\n\n \"SCRAM\",\n\n \"Authentication failed.\",\n\n )),\n\n }\n\n}\n\n\n", "file_path": "src/client/auth/scram.rs", "rank": 51, "score": 107294.16759252906 }, { "content": "fn deserialize_uri_options_to_uri_string_option<'de, D>(\n\n deserializer: D,\n\n) -> std::result::Result<Option<String>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n Ok(Some(deserialize_uri_options_to_uri_string(deserializer)?))\n\n}\n\n\n\n#[derive(Debug, Deserialize)]\n\npub struct Outcome {\n\n pub collection: CollectionOutcome,\n\n}\n\n\n\nimpl Outcome {\n\n pub async fn matches_actual(\n\n self,\n\n db_name: String,\n\n coll_name: String,\n\n client: &EventClient,\n", "file_path": "src/test/spec/v2_runner/test_file.rs", "rank": 52, "score": 104608.65350254072 }, { "content": "#[allow(dead_code)]\n\ntype Result<T> = std::result::Result<T, Err>;\n\n\n\n#[cfg(not(feature = \"sync\"))]\n\nasync fn _connecting() -> Result<()> {\n\n use mongodb::{options::ClientOptions, Client};\n\n\n\n // Parse a connection string into an options struct.\n\n let mut client_options = ClientOptions::parse(\"mongodb://localhost:27017\").await?;\n\n\n\n // Manually set an option.\n\n client_options.app_name = Some(\"My App\".to_string());\n\n\n\n // Get a handle to the deployment.\n\n let client = Client::with_options(client_options)?;\n\n\n\n // List the names of the databases in that deployment.\n\n for db_name in client.list_database_names(None, None).await? {\n\n println!(\"{}\", db_name);\n\n }\n\n\n", "file_path": "tests/readme_examples.rs", "rank": 53, "score": 97166.65372347792 }, { "content": "/// Data used for creating a BSON array.\n\nstruct DocumentArraySpec {\n\n /// The sum of the lengths of all the documents.\n\n length: i32,\n\n\n\n /// The serialized documents to be inserted.\n\n documents: Vec<Vec<u8>>,\n\n}\n\n\n\n#[derive(Serialize)]\n\npub(crate) struct InsertCommand {\n\n insert: String,\n\n\n\n /// will be serialized in `serialize_command`\n\n #[serde(skip)]\n\n documents: DocumentArraySpec,\n\n\n\n #[serde(flatten)]\n\n options: InsertManyOptions,\n\n}\n\n\n\nimpl CommandBody for InsertCommand {}\n", "file_path": "src/operation/insert/mod.rs", "rank": 54, "score": 95181.27626165542 }, { "content": "#[test]\n\n#[function_name::named]\n\nfn collection() {\n\n let _guard: RwLockReadGuard<()> = RUNTIME.block_on(async { LOCK.run_concurrently().await });\n\n\n\n let options = CLIENT_OPTIONS.clone();\n\n let client = Client::with_options(options).expect(\"client creation should succeed\");\n\n let coll = init_db_and_coll(&client, function_name!(), function_name!());\n\n\n\n coll.insert_one(doc! { \"x\": 1 }, None)\n\n .expect(\"insert should succeed\");\n\n\n\n let find_options = FindOptions::builder().projection(doc! { \"_id\": 0 }).build();\n\n let cursor = coll\n\n .find(doc! { \"x\": 1 }, find_options)\n\n .expect(\"find should succeed\");\n\n let results = cursor\n\n .collect::<Result<Vec<Document>>>()\n\n .expect(\"cursor iteration should succeed\");\n\n assert_eq!(results, vec![doc! { \"x\": 1 }]);\n\n\n\n let pipeline = vec![\n", "file_path": "src/sync/test.rs", "rank": 55, "score": 81139.38889876519 }, { "content": "#[test]\n\n#[function_name::named]\n\nfn client() {\n\n let _guard: RwLockReadGuard<()> = RUNTIME.block_on(async { LOCK.run_concurrently().await });\n\n\n\n let options = CLIENT_OPTIONS.clone();\n\n let client = Client::with_options(options).expect(\"client creation should succeed\");\n\n\n\n client\n\n .database(function_name!())\n\n .collection(function_name!())\n\n .insert_one(Document::new(), None)\n\n .expect(\"insert should succeed\");\n\n\n\n let db_names = client\n\n .list_database_names(None, None)\n\n .expect(\"list_database_names should succeed\");\n\n assert!(db_names.contains(&function_name!().to_string()));\n\n}\n\n\n", "file_path": "src/sync/test.rs", "rank": 56, "score": 81139.38889876519 }, { "content": "#[test]\n\n#[function_name::named]\n\nfn database() {\n\n let _guard: RwLockReadGuard<()> = RUNTIME.block_on(async { LOCK.run_concurrently().await });\n\n\n\n let options = CLIENT_OPTIONS.clone();\n\n let client = Client::with_options(options).expect(\"client creation should succeed\");\n\n let db = client.database(function_name!());\n\n\n\n let coll = init_db_and_coll(&client, function_name!(), function_name!());\n\n\n\n coll.insert_one(doc! { \"x\": 1 }, None)\n\n .expect(\"insert should succeed\");\n\n\n\n let coll_names = db\n\n .list_collection_names(None)\n\n .expect(\"list_database_names should succeed\");\n\n assert!(coll_names.contains(&function_name!().to_string()));\n\n\n\n let admin_db = client.database(\"admin\");\n\n let pipeline = vec![\n\n doc! { \"$currentOp\": {} },\n", "file_path": "src/sync/test.rs", "rank": 57, "score": 81139.38889876519 }, { "content": "#[test]\n\n#[function_name::named]\n\nfn transactions() {\n\n let _guard: RwLockReadGuard<()> = RUNTIME.block_on(async { LOCK.run_concurrently().await });\n\n\n\n let should_skip = RUNTIME.block_on(async {\n\n let test_client = AsyncTestClient::new().await;\n\n !test_client.supports_transactions()\n\n });\n\n if should_skip {\n\n return;\n\n }\n\n\n\n let options = CLIENT_OPTIONS.clone();\n\n let client = Client::with_options(options).expect(\"client creation should succeed\");\n\n let mut session = client\n\n .start_session(None)\n\n .expect(\"session creation should succeed\");\n\n let coll = init_db_and_typed_coll(&client, function_name!(), function_name!());\n\n\n\n client\n\n .database(function_name!())\n", "file_path": "src/sync/test.rs", "rank": 58, "score": 81139.38889876519 }, { "content": "#[test]\n\n#[function_name::named]\n\nfn typed_collection() {\n\n let _guard: RwLockReadGuard<()> = RUNTIME.block_on(async { LOCK.run_concurrently().await });\n\n\n\n let options = CLIENT_OPTIONS.clone();\n\n let client = Client::with_options(options).expect(\"client creation should succeed\");\n\n let coll = init_db_and_typed_coll(&client, function_name!(), function_name!());\n\n\n\n #[derive(Serialize, Deserialize, Debug)]\n\n struct MyType {\n\n x: i32,\n\n str: String,\n\n }\n\n let my_type = MyType {\n\n x: 1,\n\n str: \"hello\".into(),\n\n };\n\n\n\n assert!(coll.insert_one(my_type, None).is_ok());\n\n}\n\n\n", "file_path": "src/sync/test.rs", "rank": 59, "score": 79850.26028906417 }, { "content": "#[test]\n\nfn client_options() {\n\n let _guard: RwLockReadGuard<()> = RUNTIME.block_on(async { LOCK.run_concurrently().await });\n\n\n\n let mut options = ClientOptions::parse(\"mongodb://localhost:27017/\").unwrap();\n\n\n\n options.original_uri.take();\n\n\n\n assert_eq!(\n\n options,\n\n ClientOptions::builder()\n\n .hosts(vec![ServerAddress::Tcp {\n\n host: \"localhost\".into(),\n\n port: Some(27017)\n\n }])\n\n .build()\n\n );\n\n}\n\n\n", "file_path": "src/sync/test.rs", "rank": 60, "score": 79843.77872652843 }, { "content": "#[test]\n\n#[function_name::named]\n\nfn collection_generic_bounds() {\n\n let _guard: RwLockReadGuard<()> = RUNTIME.block_on(async { LOCK.run_concurrently().await });\n\n\n\n #[derive(Deserialize)]\n\n struct Foo;\n\n\n\n let options = CLIENT_OPTIONS.clone();\n\n let client = Client::with_options(options).expect(\"client creation should succeed\");\n\n\n\n // ensure this code successfully compiles\n\n let coll: Collection<Foo> = client\n\n .database(function_name!())\n\n .collection(function_name!());\n\n let _result: Result<Option<Foo>> = coll.find_one(None, None);\n\n\n\n #[derive(Serialize)]\n\n struct Bar;\n\n\n\n // ensure this code successfully compiles\n\n let coll: Collection<Bar> = client\n\n .database(function_name!())\n\n .collection(function_name!());\n\n let _result = coll.insert_one(Bar {}, None);\n\n}\n", "file_path": "src/sync/test.rs", "rank": 61, "score": 78624.47566699453 }, { "content": "#[test]\n\nfn write_concern_deserialize() {\n\n let w_1 = doc! { \"w\": 1 };\n\n let wc: WriteConcern = bson::from_bson(Bson::Document(w_1)).unwrap();\n\n assert_eq!(\n\n wc,\n\n WriteConcern {\n\n w: Acknowledgment::Nodes(1).into(),\n\n w_timeout: None,\n\n journal: None\n\n }\n\n );\n\n\n\n let w_majority = doc! { \"w\": \"majority\" };\n\n let wc: WriteConcern = bson::from_bson(Bson::Document(w_majority)).unwrap();\n\n assert_eq!(\n\n wc,\n\n WriteConcern {\n\n w: Acknowledgment::Majority.into(),\n\n w_timeout: None,\n\n journal: None\n", "file_path": "src/concern/test.rs", "rank": 62, "score": 78617.99410445879 }, { "content": "#[test]\n\nfn write_concern_is_acknowledged() {\n\n let w_1 = WriteConcern::builder()\n\n .w(Acknowledgment::Nodes(1))\n\n .journal(false)\n\n .build();\n\n assert!(w_1.is_acknowledged());\n\n\n\n let w_majority = WriteConcern::builder()\n\n .w(Acknowledgment::Majority)\n\n .journal(false)\n\n .build();\n\n assert!(w_majority.is_acknowledged());\n\n\n\n let w_0 = WriteConcern::builder()\n\n .w(Acknowledgment::Nodes(0))\n\n .journal(false)\n\n .build();\n\n assert!(!w_0.is_acknowledged());\n\n\n\n let w_0 = WriteConcern::builder().w(Acknowledgment::Nodes(0)).build();\n", "file_path": "src/concern/test.rs", "rank": 63, "score": 78617.99410445879 }, { "content": "fn build_test(\n\n ns: Namespace,\n\n filter: Option<Document>,\n\n options: Option<FindOptions>,\n\n mut expected_body: Document,\n\n) {\n\n let mut find = Find::<Document>::new(ns.clone(), filter, options);\n\n\n\n let mut cmd = find.build(&StreamDescription::new_testing()).unwrap();\n\n\n\n assert_eq!(cmd.name.as_str(), \"find\");\n\n assert_eq!(cmd.target_db.as_str(), ns.db.as_str());\n\n\n\n bson_util::sort_document(&mut expected_body);\n\n bson_util::sort_document(&mut cmd.body);\n\n\n\n assert_eq!(cmd.body, expected_body);\n\n}\n\n\n\n#[cfg_attr(feature = \"tokio-runtime\", tokio::test)]\n", "file_path": "src/operation/find/test.rs", "rank": 64, "score": 78617.99410445879 }, { "content": "fn build_test(\n\n ns: Namespace,\n\n cursor_id: i64,\n\n address: ServerAddress,\n\n batch_size: Option<u32>,\n\n max_time: Option<Duration>,\n\n mut expected_body: Document,\n\n) {\n\n let info = CursorInformation {\n\n ns: ns.clone(),\n\n id: cursor_id,\n\n address,\n\n batch_size,\n\n max_time,\n\n };\n\n let mut get_more = GetMore::<Document>::new(info, None);\n\n\n\n let build_result = get_more.build(&StreamDescription::new_testing());\n\n assert!(build_result.is_ok());\n\n\n", "file_path": "src/operation/get_more/test.rs", "rank": 65, "score": 78617.99410445879 }, { "content": "fn build_test(\n\n target: impl Into<AggregateTarget>,\n\n pipeline: Vec<Document>,\n\n options: Option<AggregateOptions>,\n\n mut expected_body: Document,\n\n) {\n\n let target = target.into();\n\n\n\n let mut aggregate = Aggregate::new(target.clone(), pipeline, options);\n\n\n\n let mut cmd = aggregate.build(&StreamDescription::new_testing()).unwrap();\n\n\n\n assert_eq!(cmd.name.as_str(), \"aggregate\");\n\n assert_eq!(cmd.target_db.as_str(), target.db_name());\n\n\n\n bson_util::sort_document(&mut expected_body);\n\n bson_util::sort_document(&mut cmd.body);\n\n\n\n assert_eq!(cmd.body, expected_body);\n\n}\n", "file_path": "src/operation/aggregate/test.rs", "rank": 66, "score": 78617.99410445879 }, { "content": "#[test]\n\nfn metadata_no_options() {\n\n let handshaker = Handshaker::new(None);\n\n\n\n let metadata = handshaker.command.body.get_document(\"client\").unwrap();\n\n assert!(!metadata.contains_key(\"application\"));\n\n\n\n let driver = metadata.get_document(\"driver\").unwrap();\n\n assert_eq!(driver.keys().collect::<Vec<_>>(), vec![\"name\", \"version\"]);\n\n assert_eq!(driver.get_str(\"name\"), Ok(\"mongo-rust-driver\"));\n\n assert_eq!(driver.get_str(\"version\"), Ok(env!(\"CARGO_PKG_VERSION\")));\n\n\n\n let os = metadata.get_document(\"os\").unwrap();\n\n assert_eq!(os.get_str(\"type\"), Ok(std::env::consts::OS));\n\n assert_eq!(os.get_str(\"architecture\"), Ok(std::env::consts::ARCH));\n\n}\n\n\n", "file_path": "src/cmap/establish/handshake/test.rs", "rank": 67, "score": 77450.9966193214 }, { "content": "#[test]\n\nfn metadata_with_options() {\n\n let app_name = \"myspace 2.0\";\n\n let name = \"even better Rust driver\";\n\n let version = \"the best version, of course\";\n\n\n\n let options = ConnectionPoolOptions::from_client_options(\n\n &ClientOptions::builder()\n\n .app_name(app_name.to_string())\n\n .driver_info(\n\n DriverInfo::builder()\n\n .name(name.to_string())\n\n .version(version.to_string())\n\n .build(),\n\n )\n\n .build(),\n\n );\n\n\n\n let handshaker = Handshaker::new(Some(options.into()));\n\n\n\n let metadata = handshaker.command.body.get_document(\"client\").unwrap();\n", "file_path": "src/cmap/establish/handshake/test.rs", "rank": 68, "score": 77450.9966193214 }, { "content": "fn build_test(\n\n db_name: &str,\n\n mut list_collections: ListCollections<Document>,\n\n mut expected_body: Document,\n\n) {\n\n let mut cmd = list_collections\n\n .build(&StreamDescription::new_testing())\n\n .expect(\"build should succeed\");\n\n assert_eq!(cmd.name, \"listCollections\");\n\n assert_eq!(cmd.target_db, db_name);\n\n\n\n bson_util::sort_document(&mut cmd.body);\n\n bson_util::sort_document(&mut expected_body);\n\n\n\n assert_eq!(cmd.body, expected_body);\n\n}\n\n\n\n#[cfg_attr(feature = \"tokio-runtime\", tokio::test)]\n\n#[cfg_attr(feature = \"async-std-runtime\", async_std::test)]\n\nasync fn build() {\n", "file_path": "src/operation/list_collections/test.rs", "rank": 69, "score": 77450.9966193214 }, { "content": "fn emit_event<F>(\n\n topology: Option<&WeakTopology>,\n\n handler: &Option<Arc<dyn SdamEventHandler>>,\n\n emit: F,\n\n) where\n\n F: FnOnce(&Arc<dyn SdamEventHandler>),\n\n{\n\n if let Some(handler) = handler {\n\n if let Some(topology) = topology {\n\n if topology.is_alive() {\n\n emit(handler);\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub(crate) struct IsMasterReply {\n\n pub server_address: ServerAddress,\n\n pub command_response: IsMasterCommandResponse,\n", "file_path": "src/is_master.rs", "rank": 70, "score": 77000.48509902616 }, { "content": "/// Updates the handshake command document with the speculative authenitication info.\n\nfn set_speculative_auth_info(\n\n command: &mut Document,\n\n credential: Option<&Credential>,\n\n) -> Result<Option<ClientFirst>> {\n\n let credential = match credential {\n\n Some(credential) => credential,\n\n None => return Ok(None),\n\n };\n\n\n\n // The spec indicates that SCRAM-SHA-256 should be assumed for speculative authentication if no\n\n // mechanism is provided. This doesn't cause issues with servers where SCRAM-SHA-256 is not the\n\n // default due to them being too old to support speculative authentication at all.\n\n let auth_mechanism = credential\n\n .mechanism\n\n .as_ref()\n\n .unwrap_or(&AuthMechanism::ScramSha256);\n\n\n\n let client_first = match auth_mechanism.build_speculative_client_first(credential)? {\n\n Some(client_first) => client_first,\n\n None => return Ok(None),\n\n };\n\n\n\n command.insert(\"speculativeAuthenticate\", client_first.to_document());\n\n\n\n Ok(Some(client_first))\n\n}\n", "file_path": "src/cmap/establish/handshake/mod.rs", "rank": 71, "score": 75282.7516039341 }, { "content": "fn verify_description_outcome(\n\n outcome: DescriptionOutcome,\n\n topology_description: TopologyDescription,\n\n test_description: &str,\n\n phase_description: String,\n\n) {\n\n assert_eq!(\n\n topology_description.topology_type, outcome.topology_type,\n\n \"{}: {}\",\n\n test_description, phase_description\n\n );\n\n\n\n assert_eq!(\n\n topology_description.set_name, outcome.set_name,\n\n \"{}: {}\",\n\n test_description, phase_description,\n\n );\n\n\n\n let expected_timeout = outcome\n\n .logical_session_timeout_minutes\n", "file_path": "src/sdam/description/topology/test/sdam.rs", "rank": 72, "score": 75277.22105402168 }, { "content": "/// Get an Insert operation and the documents/options used to construct it.\n\nfn fixtures() -> TestFixtures {\n\n lazy_static! {\n\n static ref DOCUMENTS: Vec<Document> = vec![\n\n Document::new(),\n\n doc! {\"_id\": 1234, \"a\": 1},\n\n doc! {\"a\": 123, \"b\": \"hello world\" },\n\n ];\n\n }\n\n\n\n let options = InsertManyOptions {\n\n ordered: Some(true),\n\n write_concern: Some(WriteConcern::builder().journal(true).build()),\n\n ..Default::default()\n\n };\n\n\n\n let op = Insert::new(\n\n Namespace {\n\n db: \"test_db\".to_string(),\n\n coll: \"test_coll\".to_string(),\n\n },\n", "file_path": "src/operation/insert/test.rs", "rank": 73, "score": 74618.82494640343 }, { "content": "/// The default connection ID to use for deserialization of events from test files.\n\n/// This value will \"match\" any connection ID.\n\nfn default_connection_id() -> u32 {\n\n 42\n\n}\n\n\n", "file_path": "src/event/cmap.rs", "rank": 74, "score": 74618.45280665714 }, { "content": "/// Custom serializer used to serialize limit as its absolute value.\n\nfn serialize_absolute_value<S>(\n\n val: &Option<i64>,\n\n serializer: S,\n\n) -> std::result::Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n match val {\n\n Some(v) => serializer.serialize_i64(v.abs()),\n\n None => serializer.serialize_none(),\n\n }\n\n}\n\n\n\n/// Specifies the options to a [`Collection::find_one`](../struct.Collection.html#method.find_one)\n\n/// operation.\n\n#[derive(Clone, Debug, Default, Deserialize, TypedBuilder)]\n\n#[serde(rename_all = \"camelCase\")]\n\n#[builder(field_defaults(default, setter(into)))]\n\n#[non_exhaustive]\n\npub struct FindOneOptions {\n", "file_path": "src/coll/options.rs", "rank": 75, "score": 74613.294396491 }, { "content": "/// We implement `Deserialize` for all of the event types so that we can more easily parse the CMAP\n\n/// spec tests. However, we have no need to parse the address field from the JSON files (if it's\n\n/// even present). To facilitate populating the address field with an empty value when\n\n/// deserializing, we define a private `empty_address` function that the events can specify as the\n\n/// custom deserialization value for each address field.\n\nfn empty_address() -> ServerAddress {\n\n ServerAddress::Tcp {\n\n host: Default::default(),\n\n port: None,\n\n }\n\n}\n\n\n\n/// Event emitted when a connection pool is created.\n\n#[derive(Clone, Debug, Deserialize, PartialEq)]\n\n#[non_exhaustive]\n\npub struct PoolCreatedEvent {\n\n /// The address of the server that the pool's connections will connect to.\n\n #[serde(default = \"self::empty_address\")]\n\n #[serde(skip)]\n\n pub address: ServerAddress,\n\n\n\n /// The options used for the pool.\n\n pub options: Option<ConnectionPoolOptions>,\n\n}\n\n\n", "file_path": "src/event/cmap.rs", "rank": 76, "score": 74607.70299181914 }, { "content": "fn empty_delete() -> FindAndModify {\n\n let ns = Namespace {\n\n db: \"test_db\".to_string(),\n\n coll: \"test_coll\".to_string(),\n\n };\n\n let filter = doc! {};\n\n FindAndModify::with_delete(ns, filter, None)\n\n}\n\n\n\n#[cfg_attr(feature = \"tokio-runtime\", tokio::test)]\n\n#[cfg_attr(feature = \"async-std-runtime\", async_std::test)]\n\nasync fn build_with_delete_hint() {\n\n let ns = Namespace {\n\n db: \"test_db\".to_string(),\n\n coll: \"test_coll\".to_string(),\n\n };\n\n let filter = doc! {\n\n \"x\": 2,\n\n \"y\": { \"$gt\": 1 },\n\n };\n", "file_path": "src/operation/find_and_modify/test.rs", "rank": 77, "score": 72433.92742651941 }, { "content": "fn empty_update() -> FindAndModify {\n\n let ns = Namespace {\n\n db: \"test_db\".to_string(),\n\n coll: \"test_coll\".to_string(),\n\n };\n\n let filter = doc! {};\n\n let update = UpdateModifications::Document(doc! { \"$x\": { \"$inc\": 1 } });\n\n FindAndModify::with_update(ns, filter, update, None).unwrap()\n\n}\n\n\n\n#[cfg_attr(feature = \"tokio-runtime\", tokio::test)]\n\n#[cfg_attr(feature = \"async-std-runtime\", async_std::test)]\n\nasync fn build_with_update_hint() {\n\n let ns = Namespace {\n\n db: \"test_db\".to_string(),\n\n coll: \"test_coll\".to_string(),\n\n };\n\n let filter = doc! { \"x\": { \"$gt\": 1 } };\n\n let update = UpdateModifications::Document(doc! { \"$x\": { \"$inc\": 1 } });\n\n let options = FindOneAndUpdateOptions {\n", "file_path": "src/operation/find_and_modify/test.rs", "rank": 78, "score": 72433.92742651941 }, { "content": "fn empty_replace() -> FindAndModify {\n\n let ns = Namespace {\n\n db: \"test_db\".to_string(),\n\n coll: \"test_coll\".to_string(),\n\n };\n\n let filter = doc! {};\n\n let replacement = doc! { \"x\": { \"inc\": 1 } };\n\n FindAndModify::with_replace(ns, filter, replacement, None).unwrap()\n\n}\n\n\n\n#[cfg_attr(feature = \"tokio-runtime\", tokio::test)]\n\n#[cfg_attr(feature = \"async-std-runtime\", async_std::test)]\n\nasync fn build_with_replace_hint() {\n\n let ns = Namespace {\n\n db: \"test_db\".to_string(),\n\n coll: \"test_coll\".to_string(),\n\n };\n\n let filter = doc! { \"x\": { \"$gt\": 1 } };\n\n let replacement = doc! { \"x\": { \"inc\": 1 } };\n\n let options = FindOneAndReplaceOptions {\n", "file_path": "src/operation/find_and_modify/test.rs", "rank": 79, "score": 72433.92742651941 }, { "content": "fn majority_write_concern() -> WriteConcern {\n\n WriteConcern::builder().w(Acknowledgment::Majority).build()\n\n}\n\n\n", "file_path": "src/test/spec/v2_runner/mod.rs", "rank": 80, "score": 70450.40636759768 }, { "content": "fn default_hosts() -> Vec<ServerAddress> {\n\n vec![ServerAddress::default()]\n\n}\n\n\n\nimpl Default for ClientOptions {\n\n fn default() -> Self {\n\n Self::builder().build()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nimpl Serialize for ClientOptions {\n\n fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>\n\n where\n\n S: serde::Serializer,\n\n {\n\n #[derive(Serialize)]\n\n struct ClientOptionsHelper<'a> {\n\n appname: &'a Option<String>,\n\n compressors: &'a Option<Vec<String>>,\n", "file_path": "src/client/options/mod.rs", "rank": 81, "score": 70083.6612036163 }, { "content": "fn serialize_block_connection<S: Serializer>(\n\n val: &Option<Duration>,\n\n serializer: S,\n\n) -> std::result::Result<S::Ok, S::Error> {\n\n match val {\n\n Some(duration) => {\n\n (doc! { \"blockConnection\": true, \"blockTimeMS\": duration.as_millis() as i64})\n\n .serialize(serializer)\n\n }\n\n None => serializer.serialize_none(),\n\n }\n\n}\n", "file_path": "src/test/util/failpoint.rs", "rank": 82, "score": 70083.6612036163 }, { "content": "fn deserialize_checkout_failed<'de, D>(\n\n deserializer: D,\n\n) -> Result<ConnectionCheckoutFailedEvent, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let helper = ConnectionCheckoutFailedHelper::deserialize(deserializer)?;\n\n\n\n // The driver doesn't have a concept of a \"closed pool\", instead having the pool closed when the\n\n // pool is dropped. Because of this, the driver doesn't implement the \"poolClosed\" reason for a\n\n // connection checkout failure. While we skip over the corresponding tests in our spec test\n\n // runner, we still need to be able to deserialize the \"poolClosed\" reason to avoid the test\n\n // harness panicking, so we arbitrarily map the \"poolClosed\" to \"connectionError\".\n\n let reason = match helper.reason {\n\n CheckoutFailedReasonHelper::PoolClosed | CheckoutFailedReasonHelper::ConnectionError => {\n\n ConnectionCheckoutFailedReason::ConnectionError\n\n }\n\n CheckoutFailedReasonHelper::Timeout => ConnectionCheckoutFailedReason::Timeout,\n\n };\n\n\n\n Ok(ConnectionCheckoutFailedEvent {\n\n address: ServerAddress::Tcp {\n\n host: Default::default(),\n\n port: None,\n\n },\n\n reason,\n\n })\n\n}\n", "file_path": "src/cmap/test/event.rs", "rank": 83, "score": 70083.6612036163 }, { "content": "/// Constructs a new channel for for monitoring whether this pool still has references\n\n/// to it.\n\nfn handle_channel() -> (PoolWorkerHandle, HandleListener) {\n\n let (sender, receiver) = mpsc::channel(1);\n\n (PoolWorkerHandle { sender }, HandleListener { receiver })\n\n}\n\n\n\n/// Handle to the worker. Once all handles have been dropped, the worker\n\n/// will stop waiting for new requests and drop the pool itself.\n\n#[derive(Debug, Clone)]\n\npub(super) struct PoolWorkerHandle {\n\n sender: mpsc::Sender<()>,\n\n}\n\n\n\nimpl PoolWorkerHandle {\n\n #[cfg(test)]\n\n pub(super) fn new_mocked() -> Self {\n\n let (s, _) = handle_channel();\n\n s\n\n }\n\n}\n\n\n\n/// Listener used to determine when all handles have been dropped.\n", "file_path": "src/cmap/worker.rs", "rank": 84, "score": 69069.71486725833 }, { "content": "fn mac<M: Mac + NewMac>(\n\n key: &[u8],\n\n input: &[u8],\n\n auth_mechanism: &str,\n\n) -> Result<impl AsRef<[u8]>> {\n\n let mut mac =\n\n M::new_from_slice(key).map_err(|_| Error::unknown_authentication_error(auth_mechanism))?;\n\n mac.update(input);\n\n Ok(mac.finalize().into_bytes())\n\n}\n", "file_path": "src/client/auth/mod.rs", "rank": 85, "score": 68108.41695856229 }, { "content": "fn scram_sasl_first_options(mechanism: AuthMechanism) {\n\n let sasl_first = SaslStart::new(String::new(), mechanism, Vec::new(), None);\n\n let command = sasl_first.into_command();\n\n let options = match command.body.get_document(\"options\") {\n\n Ok(options) => options,\n\n Err(_) => panic!(\"SaslStart should contain options document\"),\n\n };\n\n match options.get_bool(\"skipEmptyExchange\") {\n\n Ok(skip_empty_exchange) => assert!(\n\n skip_empty_exchange,\n\n \"skipEmptyExchange should be true for SCRAM authentication\"\n\n ),\n\n Err(_) => panic!(\"SaslStart options should contain skipEmptyExchange\"),\n\n }\n\n}\n\n\n\n#[cfg_attr(feature = \"tokio-runtime\", tokio::test)]\n\n#[cfg_attr(feature = \"async-std-runtime\", async_std::test)]\n\nasync fn sasl_first_options_specified() {\n\n scram_sasl_first_options(AuthMechanism::ScramSha1);\n", "file_path": "src/client/auth/test.rs", "rank": 86, "score": 68100.14014469457 }, { "content": "fn h_i<M: Mac + NewMac + Sync>(\n\n str: &str,\n\n salt: &[u8],\n\n iterations: u32,\n\n output_size: usize,\n\n) -> Vec<u8> {\n\n let mut buf = vec![0u8; output_size];\n\n pbkdf2::pbkdf2::<M>(str.as_bytes(), salt, iterations, buf.as_mut_slice());\n\n buf\n\n}\n\n\n", "file_path": "src/client/auth/scram.rs", "rank": 87, "score": 66425.065949866 }, { "content": "fn deserialize_command_started_events<'de, D>(\n\n deserializer: D,\n\n) -> std::result::Result<Option<Vec<CommandStartedEvent>>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let docs = Vec::<Document>::deserialize(deserializer)?;\n\n Ok(Some(\n\n docs.iter()\n\n .map(|doc| {\n\n let event = doc.get_document(\"command_started_event\").unwrap();\n\n from_document(event.clone()).unwrap()\n\n })\n\n .collect(),\n\n ))\n\n}\n", "file_path": "src/test/spec/v2_runner/test_file.rs", "rank": 88, "score": 66282.94345051884 }, { "content": "fn assert_same_lsid_on_last_two_commands(client: &EventClient) {\n\n let events = client.get_all_command_started_events();\n\n let lsid1 = events[events.len() - 1].command.get(\"lsid\").unwrap();\n\n let lsid2 = events[events.len() - 2].command.get(\"lsid\").unwrap();\n\n assert_eq!(lsid1, lsid2);\n\n}\n", "file_path": "src/test/spec/v2_runner/mod.rs", "rank": 89, "score": 65430.31340713379 }, { "content": "fn assert_different_lsid_on_last_two_commands(client: &EventClient) {\n\n let events = client.get_all_command_started_events();\n\n let lsid1 = events[events.len() - 1].command.get(\"lsid\").unwrap();\n\n let lsid2 = events[events.len() - 2].command.get(\"lsid\").unwrap();\n\n assert_ne!(lsid1, lsid2);\n\n}\n\n\n", "file_path": "src/test/spec/v2_runner/mod.rs", "rank": 90, "score": 64611.992721732546 }, { "content": "fn localhost_test_build_10gen(port: u16) -> ServerAddress {\n\n ServerAddress::Tcp {\n\n host: \"localhost.test.build.10gen.cc\".into(),\n\n port: Some(port),\n\n }\n\n}\n\n\n\nlazy_static::lazy_static! {\n\n static ref DEFAULT_HOSTS: Vec<ServerAddress> = vec![\n\n localhost_test_build_10gen(27017),\n\n localhost_test_build_10gen(27108),\n\n ];\n\n}\n\n\n\nasync fn run_test(new_hosts: Result<Vec<ServerAddress>>, expected_hosts: HashSet<ServerAddress>) {\n\n let mut options = ClientOptions::new_srv();\n\n options.hosts = DEFAULT_HOSTS.clone();\n\n let topology = Topology::new_mocked(options);\n\n let mut monitor = SrvPollingMonitor::new(topology.downgrade()).unwrap();\n\n\n", "file_path": "src/sdam/srv_polling/test.rs", "rank": 91, "score": 64307.69920546483 }, { "content": "fn get_nth_percentile(durations: &[Duration], n: f64) -> Duration {\n\n let index = (durations.len() as f64 * (n / 100.0)) as usize;\n\n durations[std::cmp::max(index, 1) - 1]\n\n}\n\n\n\npub(crate) fn score_test(\n\n durations: Vec<Duration>,\n\n name: &'static str,\n\n task_size: f64,\n\n more_info: bool,\n\n) -> BenchmarkScore {\n\n let median = get_nth_percentile(&durations, 50.0);\n\n let score = task_size / median.as_secs_f64();\n\n println!(\n\n \"TEST: {} -- Score: {:.3} MB/s, Median Iteration Time: {:.3}s\\n\",\n\n name,\n\n score,\n\n median.as_secs_f64()\n\n );\n\n\n", "file_path": "benchmarks/src/score.rs", "rank": 92, "score": 63959.41149106549 }, { "content": "fn hash<D: Digest>(val: &[u8]) -> Vec<u8> {\n\n let mut hash = D::new();\n\n hash.update(val);\n\n hash.finalize().to_vec()\n\n}\n\n\n", "file_path": "src/client/auth/scram.rs", "rank": 93, "score": 61725.035609391074 }, { "content": "fn xor(lhs: &[u8], rhs: &[u8]) -> Vec<u8> {\n\n assert_eq!(lhs.len(), rhs.len());\n\n\n\n lhs.iter()\n\n .zip(rhs.iter())\n\n .map(|(l, r)| l.bitxor(r))\n\n .collect()\n\n}\n\n\n", "file_path": "src/client/auth/scram.rs", "rank": 94, "score": 61725.035609391074 }, { "content": "fn convert_read_preference(test_read_pref: TestReadPreference) -> Option<ReadPreference> {\n\n let max_staleness = test_read_pref\n\n .max_staleness_seconds\n\n .map(Duration::from_secs);\n\n let options = ReadPreferenceOptions::builder()\n\n .tag_sets(test_read_pref.tag_sets)\n\n .max_staleness(max_staleness)\n\n .build();\n\n\n\n let read_pref = match &test_read_pref.mode.as_ref()?[..] {\n\n \"Primary\" => ReadPreference::Primary,\n\n \"Secondary\" => ReadPreference::Secondary { options },\n\n \"PrimaryPreferred\" => ReadPreference::PrimaryPreferred { options },\n\n \"SecondaryPreferred\" => ReadPreference::SecondaryPreferred { options },\n\n \"Nearest\" => ReadPreference::Nearest { options },\n\n m => panic!(\"invalid read preference mode: {}\", m),\n\n };\n\n\n\n Some(read_pref)\n\n}\n", "file_path": "src/sdam/description/topology/server_selection/test/logic.rs", "rank": 95, "score": 57310.57847942342 }, { "content": "fn is_master_response_from_server_type(server_type: ServerType) -> Option<IsMasterCommandResponse> {\n\n let mut response = IsMasterCommandResponse::default();\n\n\n\n match server_type {\n\n ServerType::Unknown => {\n\n return None;\n\n }\n\n ServerType::Mongos => {\n\n response.msg = Some(\"isdbgrid\".into());\n\n }\n\n ServerType::RsPrimary => {\n\n response.set_name = Some(\"foo\".into());\n\n response.is_writable_primary = Some(true);\n\n }\n\n ServerType::RsOther => {\n\n response.set_name = Some(\"foo\".into());\n\n response.hidden = Some(true);\n\n }\n\n ServerType::RsSecondary => {\n\n response.set_name = Some(\"foo\".into());\n", "file_path": "src/sdam/description/topology/server_selection/test/mod.rs", "rank": 96, "score": 57310.57847942342 }, { "content": "fn verify_max_await_time(max_await_time: Option<Duration>, cursor_type: Option<CursorType>) {\n\n let ns = Namespace::empty();\n\n let find = Find::<Document>::new(\n\n ns,\n\n None,\n\n Some(FindOptions {\n\n cursor_type,\n\n max_await_time,\n\n ..Default::default()\n\n }),\n\n );\n\n\n\n let spec = handle_response_test(\n\n &find,\n\n doc! {\n\n \"cursor\": {\n\n \"id\": 123,\n\n \"ns\": \"a.b\",\n\n \"firstBatch\": [],\n\n },\n", "file_path": "src/operation/find/test.rs", "rank": 97, "score": 55968.25225236805 }, { "content": "/// Choose a server from several suitable choices within the latency window according to\n\n/// the algorithm laid out in the server selection specification.\n\nfn select_server_in_latency_window(in_window: Vec<&Arc<Server>>) -> Option<Arc<Server>> {\n\n if in_window.is_empty() {\n\n return None;\n\n } else if in_window.len() == 1 {\n\n return Some(in_window[0].clone());\n\n }\n\n\n\n let mut rng = SmallRng::from_entropy();\n\n in_window\n\n .choose_multiple(&mut rng, 2)\n\n .min_by_key(|s| s.operation_count())\n\n .map(|server| (*server).clone())\n\n}\n\n\n\nimpl TopologyDescription {\n\n pub(crate) fn server_selection_timeout_error_message(\n\n &self,\n\n criteria: &SelectionCriteria,\n\n ) -> String {\n\n if self.has_available_servers() {\n", "file_path": "src/sdam/description/topology/server_selection/mod.rs", "rank": 98, "score": 54856.44489224866 }, { "content": "fn filter_servers_by_tag_sets(servers: &mut Vec<&ServerDescription>, tag_sets: &[TagSet]) {\n\n if tag_sets.is_empty() {\n\n return;\n\n }\n\n\n\n for tag_set in tag_sets {\n\n let matches_tag_set = |server: &&ServerDescription| server.matches_tag_set(tag_set);\n\n\n\n if servers.iter().any(matches_tag_set) {\n\n servers.retain(matches_tag_set);\n\n\n\n return;\n\n }\n\n }\n\n\n\n servers.clear();\n\n}\n", "file_path": "src/sdam/description/topology/server_selection/mod.rs", "rank": 99, "score": 54594.06919787023 } ]
Rust
src/json_dsl/param.rs
swarkentin/valico
f6aed770ef3b0f1215636b29c696c2a69e29f92b
use serde::Serialize; use serde_json::{to_value, Value}; use super::super::json_schema; use super::builder; use super::coercers; use super::validators; pub struct Param { pub name: String, pub coercer: Option<Box<dyn coercers::Coercer + Send + Sync>>, pub nest: Option<builder::Builder>, pub description: Option<String>, pub allow_null: bool, pub validators: validators::Validators, pub default: Option<Value>, pub schema_builder: Option<Box<dyn Fn(&mut json_schema::Builder) + Send + Sync>>, pub schema_id: Option<url::Url>, } unsafe impl Send for Param {} impl Param { pub fn new(name: &str) -> Param { Param { name: name.to_string(), description: None, coercer: None, nest: None, allow_null: false, validators: vec![], default: None, schema_builder: None, schema_id: None, } } pub fn new_with_coercer( name: &str, coercer: Box<dyn coercers::Coercer + Send + Sync>, ) -> Param { Param { name: name.to_string(), description: None, coercer: Some(coercer), nest: None, allow_null: false, validators: vec![], default: None, schema_builder: None, schema_id: None, } } pub fn new_with_nest( name: &str, coercer: Box<dyn coercers::Coercer + Send + Sync>, nest: builder::Builder, ) -> Param { Param { name: name.to_string(), description: None, coercer: Some(coercer), nest: Some(nest), allow_null: false, validators: vec![], default: None, schema_builder: None, schema_id: None, } } pub fn build<F>(name: &str, build_def: F) -> Param where F: FnOnce(&mut Param), { let mut param = Param::new(name); build_def(&mut param); param } pub fn desc(&mut self, description: &str) { self.description = Some(description.to_string()); } pub fn schema_id(&mut self, id: url::Url) { self.schema_id = Some(id); } pub fn schema<F>(&mut self, build: F) where F: Fn(&mut json_schema::Builder) + 'static + Send + Sync, { self.schema_builder = Some(Box::new(build)); } pub fn coerce(&mut self, coercer: Box<dyn coercers::Coercer + Send + Sync>) { self.coercer = Some(coercer); } pub fn nest<F>(&mut self, nest_def: F) where F: FnOnce(&mut builder::Builder) -> (), { self.nest = Some(builder::Builder::build(nest_def)); } pub fn allow_null(&mut self) { self.allow_null = true; } pub fn regex(&mut self, regex: regex::Regex) { self.validators.push(Box::new(regex)); } pub fn validate(&mut self, validator: Box<dyn validators::Validator + 'static + Send + Sync>) { self.validators.push(validator); } pub fn validate_with<F>(&mut self, validator: F) where F: Fn(&Value, &str) -> super::validators::ValidatorResult + 'static + Send + Sync, { self.validators.push(Box::new(validator)); } fn process_validators(&self, val: &Value, path: &str) -> super::super::ValicoErrors { let mut errors = vec![]; for validator in self.validators.iter() { match validator.validate(val, path) { Ok(()) => (), Err(validation_errors) => errors.extend(validation_errors), } } errors } pub fn process( &self, val: &mut Value, path: &str, scope: Option<&json_schema::Scope>, ) -> super::ExtendedResult<Option<Value>> { if val.is_null() && self.allow_null { return super::ExtendedResult::new(None); } let mut result = super::ExtendedResult::new(None); let mut return_value = None; { let val = if self.coercer.is_some() { match self.coercer.as_ref().unwrap().coerce(val, path) { Ok(None) => val, Ok(Some(new_value)) => { return_value = Some(new_value); return_value.as_mut().unwrap() } Err(errors) => { result.state.errors.extend(errors); return result; } } } else { val }; if self.nest.is_some() { let process_state = self.nest.as_ref().unwrap().process_nest(val, path, scope); result.append(process_state); } let validation_errors = self.process_validators(val, path); result.state.errors.extend(validation_errors); if let Some(ref id) = self.schema_id { if let Some(scope) = scope { let schema = scope.resolve(id); match schema { Some(schema) => result.append(schema.validate_in(val, path)), None => result.state.missing.push(id.clone()), } } } } if return_value.is_some() { result.value = return_value; } result } } impl Param { pub fn allow_values<T: Serialize>(&mut self, values: &[T]) { self.validators .push(Box::new(validators::AllowedValues::new( values.iter().map(|v| to_value(v).unwrap()).collect(), ))); } pub fn reject_values<T: Serialize>(&mut self, values: &[T]) { self.validators .push(Box::new(validators::RejectedValues::new( values.iter().map(|v| to_value(v).unwrap()).collect(), ))); } pub fn default<T: Serialize>(&mut self, default: T) { self.default = Some(to_value(&default).unwrap()); } }
use serde::Serialize; use serde_json::{to_value, Value}; use super::super::json_schema; use super::builder; use super::coercers; use super::validators; pub struct Param { pub name: String, pub coercer: Option<Box<dyn coercers::Coercer + Send + Sync>>, pub nest: Option<builder::Builder>, pub description: Option<String>, pub allow_null: bool, pub validators: validators::Validators, pub default: Option<Value>, pub schema_builder: Option<Box<dyn Fn(&mut json_schema::Builder) + Send + Sync>>, pub schema_id: Option<url::Url>, } unsafe impl Send for Param {} impl Param {
pub fn new_with_coercer( name: &str, coercer: Box<dyn coercers::Coercer + Send + Sync>, ) -> Param { Param { name: name.to_string(), description: None, coercer: Some(coercer), nest: None, allow_null: false, validators: vec![], default: None, schema_builder: None, schema_id: None, } } pub fn new_with_nest( name: &str, coercer: Box<dyn coercers::Coercer + Send + Sync>, nest: builder::Builder, ) -> Param { Param { name: name.to_string(), description: None, coercer: Some(coercer), nest: Some(nest), allow_null: false, validators: vec![], default: None, schema_builder: None, schema_id: None, } } pub fn build<F>(name: &str, build_def: F) -> Param where F: FnOnce(&mut Param), { let mut param = Param::new(name); build_def(&mut param); param } pub fn desc(&mut self, description: &str) { self.description = Some(description.to_string()); } pub fn schema_id(&mut self, id: url::Url) { self.schema_id = Some(id); } pub fn schema<F>(&mut self, build: F) where F: Fn(&mut json_schema::Builder) + 'static + Send + Sync, { self.schema_builder = Some(Box::new(build)); } pub fn coerce(&mut self, coercer: Box<dyn coercers::Coercer + Send + Sync>) { self.coercer = Some(coercer); } pub fn nest<F>(&mut self, nest_def: F) where F: FnOnce(&mut builder::Builder) -> (), { self.nest = Some(builder::Builder::build(nest_def)); } pub fn allow_null(&mut self) { self.allow_null = true; } pub fn regex(&mut self, regex: regex::Regex) { self.validators.push(Box::new(regex)); } pub fn validate(&mut self, validator: Box<dyn validators::Validator + 'static + Send + Sync>) { self.validators.push(validator); } pub fn validate_with<F>(&mut self, validator: F) where F: Fn(&Value, &str) -> super::validators::ValidatorResult + 'static + Send + Sync, { self.validators.push(Box::new(validator)); } fn process_validators(&self, val: &Value, path: &str) -> super::super::ValicoErrors { let mut errors = vec![]; for validator in self.validators.iter() { match validator.validate(val, path) { Ok(()) => (), Err(validation_errors) => errors.extend(validation_errors), } } errors } pub fn process( &self, val: &mut Value, path: &str, scope: Option<&json_schema::Scope>, ) -> super::ExtendedResult<Option<Value>> { if val.is_null() && self.allow_null { return super::ExtendedResult::new(None); } let mut result = super::ExtendedResult::new(None); let mut return_value = None; { let val = if self.coercer.is_some() { match self.coercer.as_ref().unwrap().coerce(val, path) { Ok(None) => val, Ok(Some(new_value)) => { return_value = Some(new_value); return_value.as_mut().unwrap() } Err(errors) => { result.state.errors.extend(errors); return result; } } } else { val }; if self.nest.is_some() { let process_state = self.nest.as_ref().unwrap().process_nest(val, path, scope); result.append(process_state); } let validation_errors = self.process_validators(val, path); result.state.errors.extend(validation_errors); if let Some(ref id) = self.schema_id { if let Some(scope) = scope { let schema = scope.resolve(id); match schema { Some(schema) => result.append(schema.validate_in(val, path)), None => result.state.missing.push(id.clone()), } } } } if return_value.is_some() { result.value = return_value; } result } } impl Param { pub fn allow_values<T: Serialize>(&mut self, values: &[T]) { self.validators .push(Box::new(validators::AllowedValues::new( values.iter().map(|v| to_value(v).unwrap()).collect(), ))); } pub fn reject_values<T: Serialize>(&mut self, values: &[T]) { self.validators .push(Box::new(validators::RejectedValues::new( values.iter().map(|v| to_value(v).unwrap()).collect(), ))); } pub fn default<T: Serialize>(&mut self, default: T) { self.default = Some(to_value(&default).unwrap()); } }
pub fn new(name: &str) -> Param { Param { name: name.to_string(), description: None, coercer: None, nest: None, allow_null: false, validators: vec![], default: None, schema_builder: None, schema_id: None, } }
function_block-full_function
[ { "content": "pub fn string() -> Box<dyn coercers::Coercer + Send + Sync> {\n\n Box::new(coercers::StringCoercer)\n\n}\n", "file_path": "src/json_dsl/mod.rs", "rank": 0, "score": 279433.43672192114 }, { "content": "pub trait Coercer: Send + Sync {\n\n fn get_primitive_type(&self) -> PrimitiveType;\n\n fn coerce(&self, _: &mut Value, _: &str) -> CoercerResult<Option<Value>>;\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct StringCoercer;\n\n\n\nimpl Coercer for StringCoercer {\n\n fn get_primitive_type(&self) -> PrimitiveType {\n\n PrimitiveType::String\n\n }\n\n fn coerce(&self, val: &mut Value, path: &str) -> CoercerResult<Option<Value>> {\n\n if val.is_string() {\n\n Ok(None)\n\n } else if val.is_number() {\n\n Ok(Some(to_value(&to_string(&val).unwrap()).unwrap()))\n\n } else {\n\n Err(vec![Box::new(errors::WrongType {\n\n path: path.to_string(),\n", "file_path": "src/json_dsl/coercers.rs", "rank": 1, "score": 258439.52863677926 }, { "content": "pub fn u64() -> Box<dyn coercers::Coercer + Send + Sync> {\n\n Box::new(coercers::U64Coercer)\n\n}\n", "file_path": "src/json_dsl/mod.rs", "rank": 2, "score": 225131.4492418405 }, { "content": "pub fn null() -> Box<dyn coercers::Coercer + Send + Sync> {\n\n Box::new(coercers::NullCoercer)\n\n}\n", "file_path": "src/json_dsl/mod.rs", "rank": 3, "score": 225131.4492418405 }, { "content": "pub fn object() -> Box<dyn coercers::Coercer + Send + Sync> {\n\n Box::new(coercers::ObjectCoercer)\n\n}\n\n\n\npub struct ExtendedResult<T> {\n\n value: T,\n\n state: json_schema::ValidationState,\n\n}\n\n\n\nimpl<T> ExtendedResult<T> {\n\n pub fn new(value: T) -> ExtendedResult<T> {\n\n ExtendedResult {\n\n value,\n\n state: json_schema::ValidationState::new(),\n\n }\n\n }\n\n\n\n pub fn with_errors(value: T, errors: super::ValicoErrors) -> ExtendedResult<T> {\n\n ExtendedResult {\n\n value,\n", "file_path": "src/json_dsl/mod.rs", "rank": 4, "score": 225131.4492418405 }, { "content": "pub fn i64() -> Box<dyn coercers::Coercer + Send + Sync> {\n\n Box::new(coercers::I64Coercer)\n\n}\n", "file_path": "src/json_dsl/mod.rs", "rank": 5, "score": 225131.4492418405 }, { "content": "pub fn boolean() -> Box<dyn coercers::Coercer + Send + Sync> {\n\n Box::new(coercers::BooleanCoercer)\n\n}\n", "file_path": "src/json_dsl/mod.rs", "rank": 6, "score": 225131.4492418405 }, { "content": "pub fn f64() -> Box<dyn coercers::Coercer + Send + Sync> {\n\n Box::new(coercers::F64Coercer)\n\n}\n", "file_path": "src/json_dsl/mod.rs", "rank": 7, "score": 225131.4492418405 }, { "content": "pub fn array() -> Box<dyn coercers::Coercer + Send + Sync> {\n\n Box::new(coercers::ArrayCoercer::new())\n\n}\n", "file_path": "src/json_dsl/mod.rs", "rank": 8, "score": 225131.4492418405 }, { "content": "pub fn encoded_array(separator: &str) -> Box<dyn coercers::Coercer + Send + Sync> {\n\n Box::new(coercers::ArrayCoercer::encoded(separator.to_string()))\n\n}\n\n\n", "file_path": "src/json_dsl/mod.rs", "rank": 9, "score": 209790.4407064715 }, { "content": "pub trait Keyword: Send + Sync + any::Any {\n\n fn compile(&self, def: &Value, ctx: &schema::WalkContext) -> KeywordResult;\n\n fn is_exclusive(&self) -> bool {\n\n false\n\n }\n\n fn place_first(&self) -> bool {\n\n false\n\n }\n\n}\n\n\n\nimpl<T: 'static + Send + Sync + any::Any> Keyword for T\n\nwhere\n\n T: Fn(&Value, &schema::WalkContext<'_>) -> KeywordResult,\n\n{\n\n fn compile(&self, def: &Value, ctx: &schema::WalkContext<'_>) -> KeywordResult {\n\n self(def, ctx)\n\n }\n\n}\n\n\n\nimpl fmt::Debug for dyn Keyword + 'static {\n", "file_path": "src/json_schema/keywords/mod.rs", "rank": 10, "score": 175024.39028578403 }, { "content": "pub fn is_matching(va: &Value, vb: &Value) -> bool {\n\n match va {\n\n Number(a) => match vb {\n\n Number(b) => a.as_f64().unwrap() == b.as_f64().unwrap(),\n\n _ => false,\n\n },\n\n _ => *va == *vb,\n\n }\n\n}\n", "file_path": "src/json_schema/helpers.rs", "rank": 11, "score": 155875.62955403316 }, { "content": "/// http://tools.ietf.org/html/draft-ietf-appsawg-json-pointer-07\n\npub fn encode(string: &str) -> String {\n\n const QUERY_SET: percent_encoding::AsciiSet = percent_encoding::CONTROLS\n\n .add(b' ')\n\n .add(b'\"')\n\n .add(b'#')\n\n .add(b'<')\n\n .add(b'>')\n\n .add(b'%');\n\n percent_encoding::percent_encode(\n\n string.replace(\"~\", \"~0\").replace(\"/\", \"~1\").as_bytes(),\n\n &QUERY_SET,\n\n )\n\n .to_string()\n\n}\n\n\n", "file_path": "src/json_schema/helpers.rs", "rank": 12, "score": 137550.23628411267 }, { "content": "/// Encode and connect\n\npub fn connect(strings: &[&str]) -> String {\n\n strings\n\n .iter()\n\n .map(|s| encode(s))\n\n .collect::<Vec<String>>()\n\n .join(\"/\")\n\n}\n\n\n", "file_path": "src/json_schema/helpers.rs", "rank": 13, "score": 129008.85808688501 }, { "content": "pub fn serialize_schema_path(url: &Url) -> (String, Option<String>) {\n\n let mut url_without_fragment = url.clone();\n\n url_without_fragment.set_fragment(None);\n\n let mut url_str = url_without_fragment.into_string();\n\n\n\n match url.fragment().as_ref() {\n\n Some(fragment) if !fragment.is_empty() => {\n\n if !fragment.starts_with('/') {\n\n let fragment_parts = fragment\n\n .split('/')\n\n .map(|s| s.to_string())\n\n .collect::<Vec<String>>();\n\n url_str.push_str(\"#\");\n\n url_str.push_str(fragment_parts[0].as_ref());\n\n let fragment = if fragment_parts.len() > 1 {\n\n Some(\"/\".to_string() + fragment_parts[1..].join(\"/\").as_ref())\n\n } else {\n\n None\n\n };\n\n (url_str, fragment)\n\n } else {\n\n (url_str, Some((*fragment).to_string()))\n\n }\n\n }\n\n _ => (url_str, None),\n\n }\n\n}\n\n\n", "file_path": "src/json_schema/helpers.rs", "rank": 14, "score": 126989.6141754483 }, { "content": "pub fn default() -> KeywordMap {\n\n let mut map = collections::HashMap::new();\n\n\n\n decouple_keyword((vec![\"$ref\"], Box::new(ref_::Ref)), &mut map);\n\n decouple_keyword((vec![\"allOf\"], Box::new(of::AllOf)), &mut map);\n\n decouple_keyword((vec![\"anyOf\"], Box::new(of::AnyOf)), &mut map);\n\n decouple_keyword((vec![\"const\"], Box::new(const_::Const)), &mut map);\n\n decouple_keyword((vec![\"contains\"], Box::new(contains::Contains)), &mut map);\n\n decouple_keyword(\n\n (vec![\"dependencies\"], Box::new(dependencies::Dependencies)),\n\n &mut map,\n\n );\n\n decouple_keyword((vec![\"enum\"], Box::new(enum_::Enum)), &mut map);\n\n decouple_keyword(\n\n (vec![\"exclusiveMaximum\"], Box::new(maxmin::ExclusiveMaximum)),\n\n &mut map,\n\n );\n\n decouple_keyword(\n\n (vec![\"exclusiveMinimum\"], Box::new(maxmin::ExclusiveMinimum)),\n\n &mut map,\n", "file_path": "src/json_schema/keywords/mod.rs", "rank": 15, "score": 126235.60040681818 }, { "content": "fn check_type(val: &Value, ty: json_schema::PrimitiveType) -> bool {\n\n match ty {\n\n json_schema::PrimitiveType::Array => val.is_array(),\n\n json_schema::PrimitiveType::Boolean => val.is_boolean(),\n\n json_schema::PrimitiveType::Integer => {\n\n let is_true_integer = val.is_u64() || val.is_i64();\n\n let is_integer_float = val.is_f64() && val.as_f64().unwrap().fract() == 0.0;\n\n is_true_integer || is_integer_float\n\n }\n\n json_schema::PrimitiveType::Number => val.is_number(),\n\n json_schema::PrimitiveType::Null => val.is_null(),\n\n json_schema::PrimitiveType::Object => val.is_object(),\n\n json_schema::PrimitiveType::String => val.is_string(),\n\n }\n\n}\n\n\n\nimpl super::Validator for Type {\n\n fn validate(&self, val: &Value, path: &str, _scope: &scope::Scope) -> super::ValidationState {\n\n let mut state = super::ValidationState::new();\n\n\n", "file_path": "src/json_schema/validators/type_.rs", "rank": 16, "score": 116157.47335205796 }, { "content": "pub trait ValicoError: Error + Send + Debug + GetTypeId {\n\n fn get_code(&self) -> &str;\n\n fn get_path(&self) -> &str;\n\n fn get_title(&self) -> &str;\n\n fn get_detail(&self) -> Option<&str> {\n\n None\n\n }\n\n}\n\n\n\nimpl dyn ValicoError {\n\n /// Is this `Error` object of type `E`?\n\n pub fn is<E: ValicoError>(&self) -> bool {\n\n self.typeid() == TypeId::of::<E>()\n\n }\n\n\n\n /// If this error is `E`, downcast this error to `E`, by reference.\n\n pub fn downcast<E: ValicoError>(&self) -> Option<&E> {\n\n if self.is::<E>() {\n\n unsafe { Some(&*(get_data_ptr(self) as *const E)) }\n\n } else {\n", "file_path": "src/common/error.rs", "rank": 17, "score": 111741.0833971274 }, { "content": "pub trait Validator {\n\n fn validate(&self, item: &Value, _: &str) -> ValidatorResult;\n\n}\n\n\n\nimpl fmt::Debug for dyn Validator + 'static {\n\n fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n fmt.write_str(\"[validator]\")\n\n }\n\n}\n\n\n\npub type BoxedValidator = Box<dyn Validator + 'static + Send + Sync>;\n\npub type Validators = Vec<BoxedValidator>;\n\n\n\nimpl<T> Validator for T\n\nwhere\n\n T: Fn(&Value, &str) -> ValidatorResult,\n\n{\n\n fn validate(&self, val: &Value, path: &str) -> ValidatorResult {\n\n self(val, path)\n\n }\n\n}\n", "file_path": "src/json_dsl/validators/mod.rs", "rank": 18, "score": 110930.6996853894 }, { "content": "pub trait Validator {\n\n fn validate(&self, item: &Value, _: &str, _: &scope::Scope) -> ValidationState;\n\n}\n\n\n\nimpl fmt::Debug for dyn Validator + 'static + Send + Sync {\n\n fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n fmt.write_str(\"<validator>\")\n\n }\n\n}\n\n\n\npub type BoxedValidator = Box<dyn Validator + 'static + Send + Sync>;\n\npub type Validators = Vec<BoxedValidator>;\n\n\n\nimpl<T> Validator for T\n\nwhere\n\n T: Fn(&Value, &str, &scope::Scope) -> ValidationState,\n\n{\n\n fn validate(&self, val: &Value, path: &str, scope: &scope::Scope) -> ValidationState {\n\n self(val, path, scope)\n\n }\n\n}\n", "file_path": "src/json_schema/validators/mod.rs", "rank": 19, "score": 110930.6996853894 }, { "content": "pub fn alter_fragment_path(mut url: Url, new_fragment: String) -> Url {\n\n let normalized_fragment = if new_fragment.starts_with('/') {\n\n &new_fragment[1..]\n\n } else {\n\n new_fragment.as_ref()\n\n };\n\n\n\n let result_fragment = match url.fragment() {\n\n Some(ref fragment) if !fragment.is_empty() => {\n\n if !fragment.starts_with('/') {\n\n let mut result_fragment = \"\".to_string();\n\n let mut fragment_parts = fragment.split('/').map(|s| s.to_string());\n\n result_fragment.push_str(\"#\");\n\n result_fragment.push_str(fragment_parts.next().unwrap().as_ref());\n\n result_fragment.push_str(\"/\");\n\n result_fragment.push_str(normalized_fragment.as_ref());\n\n result_fragment\n\n } else {\n\n \"/\".to_string() + normalized_fragment\n\n }\n\n }\n\n _ => \"/\".to_string() + normalized_fragment,\n\n };\n\n\n\n url.set_fragment(Some(&result_fragment));\n\n url\n\n}\n\n\n", "file_path": "src/json_schema/helpers.rs", "rank": 20, "score": 103334.28804955914 }, { "content": "pub fn convert_boolean_schema(val: Value) -> Value {\n\n match val.as_bool() {\n\n Some(b) => {\n\n if b {\n\n json!({})\n\n } else {\n\n json!({\"not\": {}})\n\n }\n\n }\n\n None => val,\n\n }\n\n}\n\n\n", "file_path": "src/json_schema/helpers.rs", "rank": 21, "score": 102170.31729445876 }, { "content": "pub fn schema_box(build: Box<dyn Fn(&mut Builder) + Send>) -> Builder {\n\n let mut builder = Builder::new();\n\n build(&mut builder);\n\n builder\n\n}\n", "file_path": "src/json_schema/builder.rs", "rank": 22, "score": 99282.73790164408 }, { "content": "pub fn assert_str_eq(params: &json_dsl::Builder, body: &str, res: &str) {\n\n assert_str_eq_with_scope(params, None, body, res);\n\n}\n\n\n", "file_path": "tests/dsl/helpers.rs", "rank": 23, "score": 82188.0734299442 }, { "content": "pub fn parse_url_key(key: &str, obj: &Value) -> Result<Option<Url>, schema::SchemaError> {\n\n match obj.get(key) {\n\n Some(value) => match value.as_str() {\n\n Some(string) => Url::parse(string)\n\n .map(Some)\n\n .map_err(schema::SchemaError::UrlParseError),\n\n None => Ok(None),\n\n },\n\n None => Ok(None),\n\n }\n\n}\n\n\n", "file_path": "src/json_schema/helpers.rs", "rank": 24, "score": 76964.06980477084 }, { "content": "use serde_json::Value;\n\n\n\nuse super::super::scope;\n\n\n\n#[allow(missing_copy_implementations)]\n\npub struct PropertyNames {\n\n pub url: url::Url,\n\n}\n\n\n\nimpl super::Validator for PropertyNames {\n\n fn validate(&self, val: &Value, path: &str, scope: &scope::Scope) -> super::ValidationState {\n\n let object = nonstrict_process!(val.as_object(), path);\n\n\n\n let schema = scope.resolve(&self.url);\n\n let mut state = super::ValidationState::new();\n\n\n\n if let Some(schema) = schema {\n\n for key in object.keys() {\n\n let item_path = [path, [\"[\", key.as_ref(), \"]\"].join(\"\").as_ref()].join(\"/\");\n\n state.append(schema.validate_in(&Value::from(key.clone()), item_path.as_ref()));\n\n }\n\n } else {\n\n state.missing.push(self.url.clone());\n\n }\n\n\n\n state\n\n }\n\n}\n", "file_path": "src/json_schema/validators/property_names.rs", "rank": 25, "score": 76122.60927276744 }, { "content": "use serde_json::Value;\n\n\n\nuse super::super::errors;\n\n\n\npub struct RejectedValues {\n\n rejected_values: Vec<Value>,\n\n}\n\n\n\nimpl RejectedValues {\n\n pub fn new(values: Vec<Value>) -> RejectedValues {\n\n RejectedValues {\n\n rejected_values: values,\n\n }\n\n }\n\n}\n\n\n\nimpl super::Validator for RejectedValues {\n\n fn validate(&self, val: &Value, path: &str) -> super::ValidatorResult {\n\n let mut matched = false;\n\n for rejected_value in self.rejected_values.iter() {\n", "file_path": "src/json_dsl/validators/rejected_values.rs", "rank": 26, "score": 75978.67074010913 }, { "content": "use super::super::errors;\n\nuse serde_json::Value;\n\n\n\npub struct AllowedValues {\n\n allowed_values: Vec<Value>,\n\n}\n\n\n\nimpl AllowedValues {\n\n pub fn new(values: Vec<Value>) -> AllowedValues {\n\n AllowedValues {\n\n allowed_values: values,\n\n }\n\n }\n\n}\n\n\n\nimpl super::Validator for AllowedValues {\n\n fn validate(&self, val: &Value, path: &str) -> super::ValidatorResult {\n\n let mut matched = false;\n\n for allowed_value in self.allowed_values.iter() {\n\n if val == allowed_value {\n", "file_path": "src/json_dsl/validators/allowed_values.rs", "rank": 27, "score": 75978.4621561173 }, { "content": " if val == rejected_value {\n\n matched = true;\n\n }\n\n }\n\n\n\n if matched {\n\n Err(vec![Box::new(errors::WrongValue {\n\n path: path.to_string(),\n\n detail: Some(\"Value is among reject list\".to_string()),\n\n })])\n\n } else {\n\n Ok(())\n\n }\n\n }\n\n}\n", "file_path": "src/json_dsl/validators/rejected_values.rs", "rank": 28, "score": 75966.76578821718 }, { "content": " matched = true;\n\n }\n\n }\n\n\n\n if matched {\n\n Ok(())\n\n } else {\n\n Err(vec![Box::new(errors::WrongValue {\n\n path: path.to_string(),\n\n detail: Some(\"Value is not among allowed list\".to_string()),\n\n })])\n\n }\n\n }\n\n}\n", "file_path": "src/json_dsl/validators/allowed_values.rs", "rank": 29, "score": 75966.5379013762 }, { "content": "#[test]\n\nfn is_validates_allow_values() {\n\n let params = json_dsl::Builder::build(|params| {\n\n params.req(\"a\", |a| {\n\n a.coerce(json_dsl::string());\n\n a.allow_values(&[\"allowed1\".to_string(), \"allowed2\".to_string()])\n\n })\n\n });\n\n\n\n assert_str_eq(&params, r#\"{\"a\":\"allowed1\"}\"#, r#\"{\"a\":\"allowed1\"}\"#);\n\n assert_str_eq(&params, r#\"{\"a\":\"allowed2\"}\"#, r#\"{\"a\":\"allowed2\"}\"#);\n\n\n\n // error because \"a\" is not in allowed list\n\n assert_error::<errors::WrongValue>(&params, r#\"{\"a\":\"not in allowed\"}\"#, \"/a\");\n\n}\n\n\n", "file_path": "tests/dsl/mod.rs", "rank": 30, "score": 75957.87346465942 }, { "content": "#[test]\n\nfn it_validates_params_with_schema() {\n\n let mut params = json_dsl::Builder::build(|params| {\n\n params.req(\"a\", |a| {\n\n a.schema(|schema| {\n\n schema.integer();\n\n schema.maximum(10f64);\n\n })\n\n });\n\n });\n\n\n\n let mut scope = json_schema::Scope::new();\n\n params.build_schemes(&mut scope).unwrap();\n\n\n\n assert_str_eq_with_scope(&params, Some(&scope), r#\"{\"a\":1}\"#, r#\"{\"a\":1}\"#);\n\n assert_error_with_scope::<schema_errors::Maximum>(&params, Some(&scope), r#\"{\"a\":11}\"#, \"/a\");\n\n assert_error_with_scope::<schema_errors::WrongType>(\n\n &params,\n\n Some(&scope),\n\n r#\"{\"a\":\"test\"}\"#,\n\n \"/a\",\n\n );\n\n}\n\n\n", "file_path": "tests/dsl/mod.rs", "rank": 31, "score": 75957.87346465942 }, { "content": "#[test]\n\nfn is_validates_reject_values() {\n\n let params = json_dsl::Builder::build(|params| {\n\n params.req(\"a\", |a| {\n\n a.coerce(json_dsl::string());\n\n a.reject_values(&[\"rejected1\".to_string(), \"rejected2\".to_string()])\n\n })\n\n });\n\n\n\n assert_str_eq(&params, r#\"{\"a\":\"some\"}\"#, r#\"{\"a\":\"some\"}\"#);\n\n\n\n // errors because \"a\" is in reject list\n\n assert_error::<errors::WrongValue>(&params, r#\"{\"a\":\"rejected1\"}\"#, \"/a\");\n\n assert_error::<errors::WrongValue>(&params, r#\"{\"a\":\"rejected2\"}\"#, \"/a\");\n\n}\n\n\n", "file_path": "tests/dsl/mod.rs", "rank": 32, "score": 75957.87346465942 }, { "content": "#[test]\n\nfn is_validates_opt_with_default() {\n\n let params = json_dsl::Builder::build(|params| {\n\n params.opt(\"a\", |a| {\n\n a.default(\"default\".to_string());\n\n });\n\n });\n\n\n\n assert_str_eq(&params, r#\"{\"a\":\"test\"}\"#, r#\"{\"a\":\"test\"}\"#);\n\n assert_str_eq(&params, r#\"{}\"#, r#\"{\"a\":\"default\"}\"#);\n\n}\n\n\n", "file_path": "tests/dsl/mod.rs", "rank": 33, "score": 75897.54650466678 }, { "content": "#[test]\n\nfn validate_string() {\n\n let mut scope = scope::Scope::new();\n\n let schema = scope\n\n .compile_and_return(\n\n builder::schema(|s| {\n\n s.string();\n\n })\n\n .into_json(),\n\n true,\n\n )\n\n .ok()\n\n .unwrap();\n\n\n\n assert_eq!(\n\n schema.validate(&to_value(&\"string\").unwrap()).is_valid(),\n\n true\n\n );\n\n assert_eq!(\n\n schema\n\n .validate(&jsonway::object(|_arr| {}).unwrap())\n\n .is_valid(),\n\n false\n\n );\n\n}\n\n\n", "file_path": "src/json_schema/keywords/type_.rs", "rank": 34, "score": 74280.29145275941 }, { "content": "#[test]\n\nfn it_validates_params_with_schema_and_coercion() {\n\n let mut params = json_dsl::Builder::build(|params| {\n\n params.req(\"a\", |a| {\n\n a.coerce(json_dsl::u64());\n\n a.schema(|schema| {\n\n schema.maximum(10f64);\n\n })\n\n });\n\n });\n\n\n\n let mut scope = json_schema::Scope::new();\n\n params.build_schemes(&mut scope).unwrap();\n\n\n\n assert_str_eq_with_scope(&params, Some(&scope), r#\"{\"a\":1}\"#, r#\"{\"a\":1}\"#);\n\n assert_str_eq_with_scope(&params, Some(&scope), r#\"{\"a\":\"10\"}\"#, r#\"{\"a\":10}\"#);\n\n assert_error_with_scope::<schema_errors::Maximum>(&params, Some(&scope), r#\"{\"a\":\"11\"}\"#, \"/a\");\n\n assert_error_with_scope::<errors::WrongType>(&params, Some(&scope), r#\"{\"a\":\"test\"}\"#, \"/a\");\n\n}\n", "file_path": "tests/dsl/mod.rs", "rank": 35, "score": 74198.50774320697 }, { "content": "#[test]\n\nfn validate_items_with_array_of_schemes_with_additional_bool() {\n\n let mut scope = scope::Scope::new();\n\n let schema = scope\n\n .compile_and_return(\n\n builder::schema(|s| {\n\n s.items_array(|items| {\n\n items.push(|item| {\n\n item.minimum(1f64);\n\n item.maximum(3f64);\n\n });\n\n items.push(|item| {\n\n item.minimum(3f64);\n\n item.maximum(6f64);\n\n });\n\n });\n\n s.additional_items(false);\n\n })\n\n .into_json(),\n\n true,\n\n )\n\n .ok()\n\n .unwrap();\n\n\n\n assert_eq!(\n\n schema.validate(&to_value(&[1, 3, 100]).unwrap()).is_valid(),\n\n false\n\n );\n\n}\n\n\n", "file_path": "src/json_schema/keywords/items.rs", "rank": 36, "score": 68024.7376832114 }, { "content": "pub fn compile(\n\n def: Value,\n\n external_id: Option<Url>,\n\n settings: CompilationSettings<'_>,\n\n) -> Result<Schema, SchemaError> {\n\n Schema::compile(def, external_id, settings)\n\n}\n\n\n", "file_path": "src/json_schema/schema.rs", "rank": 37, "score": 66925.78594081453 }, { "content": "pub fn get_errors(\n\n params: &json_dsl::Builder,\n\n scope: Option<&json_schema::Scope>,\n\n body: &str,\n\n) -> Vec<Box<dyn error::ValicoError>> {\n\n let obj = from_str(body);\n\n match obj {\n\n Ok(mut json) => {\n\n let state = params.process(&mut json, scope);\n\n if state.is_strictly_valid() {\n\n panic!(\"Success response when we await some errors\");\n\n } else {\n\n state.errors\n\n }\n\n }\n\n Err(_) => {\n\n panic!(\"Invalid JSON\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/dsl/helpers.rs", "rank": 38, "score": 66925.78594081453 }, { "content": "pub fn array_of(\n\n coercer: Box<dyn coercers::Coercer + Send + Sync>,\n\n) -> Box<dyn coercers::Coercer + Send + Sync> {\n\n Box::new(coercers::ArrayCoercer::of_type(coercer))\n\n}\n\n\n", "file_path": "src/json_dsl/mod.rs", "rank": 39, "score": 66925.78594081453 }, { "content": "pub fn test_result(\n\n params: &json_dsl::Builder,\n\n scope: Option<&json_schema::Scope>,\n\n body: &str,\n\n) -> Value {\n\n let obj = from_str(body);\n\n match obj {\n\n Ok(mut json) => {\n\n let state = params.process(&mut json, scope);\n\n if state.is_strictly_valid() {\n\n json\n\n } else {\n\n panic!(\"Errors during process: {:?}\", state);\n\n }\n\n }\n\n Err(_) => {\n\n panic!(\"Invalid JSON\");\n\n }\n\n }\n\n}\n\n\n", "file_path": "tests/dsl/helpers.rs", "rank": 40, "score": 66925.78594081453 }, { "content": "pub fn encoded_array_of(\n\n separator: &str,\n\n coercer: Box<dyn coercers::Coercer + Send + Sync>,\n\n) -> Box<dyn coercers::Coercer + Send + Sync> {\n\n Box::new(coercers::ArrayCoercer::encoded_of(\n\n separator.to_string(),\n\n coercer,\n\n ))\n\n}\n\n\n", "file_path": "src/json_dsl/mod.rs", "rank": 41, "score": 66029.71260047414 }, { "content": "pub fn assert_str_eq_with_scope(\n\n params: &json_dsl::Builder,\n\n scope: Option<&json_schema::Scope>,\n\n body: &str,\n\n res: &str,\n\n) {\n\n assert_eq!(\n\n to_string(&test_result(params, scope, body)).unwrap(),\n\n res.to_string()\n\n );\n\n}\n\n\n", "file_path": "tests/dsl/helpers.rs", "rank": 42, "score": 65174.20994265957 }, { "content": "pub fn parse_url_key_with_base(\n\n key: &str,\n\n obj: &Value,\n\n base: &Url,\n\n) -> Result<Option<Url>, schema::SchemaError> {\n\n match obj.get(key) {\n\n Some(value) => match value.as_str() {\n\n Some(string) => Url::options()\n\n .base_url(Some(base))\n\n .parse(string)\n\n .map(Some)\n\n .map_err(schema::SchemaError::UrlParseError),\n\n None => Ok(None),\n\n },\n\n None => Ok(None),\n\n }\n\n}\n\n\n", "file_path": "src/json_schema/helpers.rs", "rank": 43, "score": 64356.58363984333 }, { "content": "pub trait GetTypeId: Any {\n\n fn typeid(&self) -> TypeId {\n\n TypeId::of::<Self>()\n\n }\n\n}\n\nimpl<T: Any> GetTypeId for T {}\n\n\n", "file_path": "src/common/error.rs", "rank": 44, "score": 63961.010828063125 }, { "content": "pub fn generate_id() -> Url {\n\n let uuid = Uuid::new_v4();\n\n Url::parse(&format!(\"json-schema://{}\", uuid)).unwrap()\n\n}\n\n\n", "file_path": "src/json_schema/helpers.rs", "rank": 45, "score": 63105.50817024856 }, { "content": "pub fn schema<F>(build: F) -> Builder\n\nwhere\n\n F: FnOnce(&mut Builder),\n\n{\n\n Builder::build(build)\n\n}\n\n\n", "file_path": "src/json_schema/builder.rs", "rank": 46, "score": 56790.45973374093 }, { "content": "pub fn assert_error<T: error::ValicoError + 'static>(\n\n params: &json_dsl::Builder,\n\n body: &str,\n\n path: &str,\n\n) {\n\n assert_error_with_scope::<T>(params, None, body, path);\n\n}\n", "file_path": "tests/dsl/helpers.rs", "rank": 47, "score": 56041.412056006164 }, { "content": "pub fn assert_error_with_scope<T: error::ValicoError + 'static>(\n\n params: &json_dsl::Builder,\n\n scope: Option<&json_schema::Scope>,\n\n body: &str,\n\n path: &str,\n\n) {\n\n let errors = get_errors(params, scope, body);\n\n let error = errors.iter().find(|error| {\n\n let err = error.downcast::<T>();\n\n err.is_some() && err.unwrap().get_path() == path\n\n });\n\n\n\n assert!(\n\n error.is_some(),\n\n \"Can't find error in {}. Errors: {:?}\",\n\n path,\n\n errors\n\n )\n\n}\n\n\n", "file_path": "tests/dsl/helpers.rs", "rank": 48, "score": 55323.46232978852 }, { "content": "pub fn get_data_ptr<T: ?Sized>(d: *const T) -> *const () {\n\n d as *const ()\n\n}\n\n\n", "file_path": "src/common/error.rs", "rank": 49, "score": 53022.71124347062 }, { "content": "pub fn decouple_keyword(keyword_pair: KeywordPair, map: &mut KeywordMap) {\n\n let (keys, keyword) = keyword_pair;\n\n\n\n let consumer = Arc::new(KeywordConsumer {\n\n keys: keys.clone(),\n\n keyword,\n\n });\n\n\n\n for key in keys.iter() {\n\n map.insert(key, consumer.clone());\n\n }\n\n}\n", "file_path": "src/json_schema/keywords/mod.rs", "rank": 50, "score": 51511.329909335866 }, { "content": "#[test]\n\nfn is_validates_with_function_validator() {\n\n let params = json_dsl::Builder::build(|params| {\n\n params.req(\"a\", |a| {\n\n a.coerce(json_dsl::u64());\n\n a.validate_with(|val: &Value, path: &str| {\n\n if *val == json!(2) {\n\n Ok(())\n\n } else {\n\n Err(vec![Box::new(errors::WrongType {\n\n path: path.to_string(),\n\n detail: \"Value is not exactly 2\".to_string(),\n\n })])\n\n }\n\n });\n\n })\n\n });\n\n\n\n assert_str_eq(&params, r#\"{\"a\":\"2\"}\"#, r#\"{\"a\":2}\"#);\n\n assert_error::<errors::WrongType>(&params, r#\"{\"a\":3}\"#, \"/a\");\n\n assert_error::<errors::WrongType>(&params, r#\"{\"a\":\"3\"}\"#, \"/a\");\n\n}\n\n\n", "file_path": "tests/dsl/mod.rs", "rank": 51, "score": 45556.85218800818 }, { "content": "impl ArrayCoercer {\n\n pub fn new() -> ArrayCoercer {\n\n ArrayCoercer {\n\n sub_coercer: None,\n\n separator: None,\n\n }\n\n }\n\n\n\n pub fn encoded(separator: String) -> ArrayCoercer {\n\n ArrayCoercer {\n\n separator: Some(separator),\n\n sub_coercer: None,\n\n }\n\n }\n\n\n\n pub fn encoded_of(\n\n separator: String,\n\n sub_coercer: Box<dyn Coercer + Send + Sync>,\n\n ) -> ArrayCoercer {\n\n ArrayCoercer {\n", "file_path": "src/json_dsl/coercers.rs", "rank": 63, "score": 40805.14698156542 }, { "content": " path: path.to_string(),\n\n detail:\n\n \"Can't coerce this string value to null. Correct value is only empty string\"\n\n .to_string(),\n\n })])\n\n }\n\n } else {\n\n Err(vec![Box::new(errors::WrongType {\n\n path: path.to_string(),\n\n detail: \"Can't coerce object to null\".to_string(),\n\n })])\n\n }\n\n }\n\n}\n\n\n\npub struct ArrayCoercer {\n\n sub_coercer: Option<Box<dyn Coercer + Send + Sync>>,\n\n separator: Option<String>,\n\n}\n\n\n", "file_path": "src/json_dsl/coercers.rs", "rank": 64, "score": 40804.53991957929 }, { "content": "use serde_json::{to_string, to_value, Value};\n\n\n\nuse super::errors;\n\n\n\n#[allow(dead_code)]\n\n#[derive(Copy, Clone)]\n\npub enum PrimitiveType {\n\n String,\n\n I64,\n\n U64,\n\n F64,\n\n Boolean,\n\n Null,\n\n Array,\n\n Object,\n\n // Reserved for future use in Rustless\n\n File,\n\n}\n\n\n\npub type CoercerResult<T> = Result<T, super::super::ValicoErrors>;\n\n\n", "file_path": "src/json_dsl/coercers.rs", "rank": 65, "score": 40801.25419793305 }, { "content": " separator: Some(separator),\n\n sub_coercer: Some(sub_coercer),\n\n }\n\n }\n\n\n\n pub fn of_type(sub_coercer: Box<dyn Coercer + Send + Sync>) -> ArrayCoercer {\n\n ArrayCoercer {\n\n separator: None,\n\n sub_coercer: Some(sub_coercer),\n\n }\n\n }\n\n\n\n fn coerce_array(&self, val: &mut Value, path: &str) -> CoercerResult<Option<Value>> {\n\n let array = val.as_array_mut().unwrap();\n\n if self.sub_coercer.is_some() {\n\n let sub_coercer = self.sub_coercer.as_ref().unwrap();\n\n let mut errors = vec![];\n\n for i in 0..array.len() {\n\n let item_path = [path, i.to_string().as_ref()].join(\"/\");\n\n match sub_coercer.coerce(&mut array[i], item_path.as_ref()) {\n", "file_path": "src/json_dsl/coercers.rs", "rank": 66, "score": 40800.82715234187 }, { "content": " match converted {\n\n Some(num) => Ok(Some(to_value(&num).unwrap())),\n\n None => Err(vec![Box::new(errors::WrongType {\n\n path: path.to_string(),\n\n detail: \"Can't coerce string value to u64\".to_string(),\n\n })]),\n\n }\n\n } else {\n\n Err(vec![Box::new(errors::WrongType {\n\n path: path.to_string(),\n\n detail: \"Can't coerce object value to u64\".to_string(),\n\n })])\n\n }\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct F64Coercer;\n\n\n\nimpl Coercer for F64Coercer {\n", "file_path": "src/json_dsl/coercers.rs", "rank": 67, "score": 40800.79214152964 }, { "content": " })]),\n\n }\n\n } else {\n\n Err(vec![Box::new(errors::WrongType {\n\n path: path.to_string(),\n\n detail: \"Can't coerce object value to f64\".to_string(),\n\n })])\n\n }\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct BooleanCoercer;\n\n\n\nimpl Coercer for BooleanCoercer {\n\n fn get_primitive_type(&self) -> PrimitiveType {\n\n PrimitiveType::Boolean\n\n }\n\n fn coerce(&self, val: &mut Value, path: &str) -> CoercerResult<Option<Value>> {\n\n if val.is_boolean() {\n", "file_path": "src/json_dsl/coercers.rs", "rank": 68, "score": 40800.58796635382 }, { "content": " detail: \"Can't coerce value to string\".to_string(),\n\n })])\n\n }\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct I64Coercer;\n\n\n\nimpl Coercer for I64Coercer {\n\n fn get_primitive_type(&self) -> PrimitiveType {\n\n PrimitiveType::I64\n\n }\n\n fn coerce(&self, val: &mut Value, path: &str) -> CoercerResult<Option<Value>> {\n\n if val.is_i64() {\n\n Ok(None)\n\n } else if val.is_u64() {\n\n let val = val.as_u64().unwrap();\n\n Ok(Some(to_value(&(val as i64)).unwrap()))\n\n } else if val.is_f64() {\n", "file_path": "src/json_dsl/coercers.rs", "rank": 69, "score": 40800.4173573567 }, { "content": " Ok(Some(array))\n\n } else {\n\n Err(vec![Box::new(errors::WrongType {\n\n path: path.to_string(),\n\n detail: \"Can't coerce object to array\".to_string(),\n\n })])\n\n }\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct ObjectCoercer;\n\n\n\nimpl Coercer for ObjectCoercer {\n\n fn get_primitive_type(&self) -> PrimitiveType {\n\n PrimitiveType::Object\n\n }\n\n fn coerce(&self, val: &mut Value, path: &str) -> CoercerResult<Option<Value>> {\n\n if val.is_object() {\n\n Ok(None)\n\n } else {\n\n Err(vec![Box::new(errors::WrongType {\n\n path: path.to_string(),\n\n detail: \"Can't coerce non-object value to the object type\".to_string(),\n\n })])\n\n }\n\n }\n\n}\n", "file_path": "src/json_dsl/coercers.rs", "rank": 70, "score": 40799.79763610115 }, { "content": " }\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct NullCoercer;\n\n\n\nimpl Coercer for NullCoercer {\n\n fn get_primitive_type(&self) -> PrimitiveType {\n\n PrimitiveType::Null\n\n }\n\n fn coerce(&self, val: &mut Value, path: &str) -> CoercerResult<Option<Value>> {\n\n if val.is_null() {\n\n Ok(None)\n\n } else if val.is_string() {\n\n let val = val.as_str().unwrap();\n\n if val == \"\" {\n\n Ok(Some(json!(null)))\n\n } else {\n\n Err(vec![Box::new(errors::WrongType {\n", "file_path": "src/json_dsl/coercers.rs", "rank": 71, "score": 40798.86734328897 }, { "content": "\n\n#[derive(Copy, Clone)]\n\npub struct U64Coercer;\n\n\n\nimpl Coercer for U64Coercer {\n\n fn get_primitive_type(&self) -> PrimitiveType {\n\n PrimitiveType::U64\n\n }\n\n fn coerce(&self, val: &mut Value, path: &str) -> CoercerResult<Option<Value>> {\n\n if val.is_u64() {\n\n Ok(None)\n\n } else if val.is_i64() {\n\n let val = val.as_i64().unwrap();\n\n Ok(Some(to_value(&(val as u64)).unwrap()))\n\n } else if val.is_f64() {\n\n let val = val.as_f64().unwrap();\n\n Ok(Some(to_value(&(val as u64)).unwrap()))\n\n } else if val.is_string() {\n\n let val = val.as_str().unwrap();\n\n let converted: Option<u64> = val.parse().ok();\n", "file_path": "src/json_dsl/coercers.rs", "rank": 72, "score": 40798.21723916736 }, { "content": "}\n\n\n\nimpl Coercer for ArrayCoercer {\n\n fn get_primitive_type(&self) -> PrimitiveType {\n\n PrimitiveType::Array\n\n }\n\n\n\n fn coerce(&self, val: &mut Value, path: &str) -> CoercerResult<Option<Value>> {\n\n if val.is_array() {\n\n self.coerce_array(val, path)\n\n } else if val.is_string() && self.separator.is_some() {\n\n let separator = self.separator.as_ref().unwrap();\n\n let string = val.as_str().unwrap();\n\n let mut array = Value::Array(\n\n string\n\n .split(&separator[..])\n\n .map(|s| Value::String(s.to_string()))\n\n .collect::<Vec<Value>>(),\n\n );\n\n self.coerce_array(&mut array, path)?;\n", "file_path": "src/json_dsl/coercers.rs", "rank": 73, "score": 40798.10958863281 }, { "content": " fn get_primitive_type(&self) -> PrimitiveType {\n\n PrimitiveType::F64\n\n }\n\n fn coerce(&self, val: &mut Value, path: &str) -> CoercerResult<Option<Value>> {\n\n if val.is_f64() {\n\n Ok(None)\n\n } else if val.is_i64() {\n\n let val = val.as_i64().unwrap();\n\n Ok(Some(to_value(&(val as f64)).unwrap()))\n\n } else if val.is_u64() {\n\n let val = val.as_u64().unwrap();\n\n Ok(Some(to_value(&(val as f64)).unwrap()))\n\n } else if val.is_string() {\n\n let val = val.as_str().unwrap();\n\n let converted: Option<f64> = val.parse().ok();\n\n match converted {\n\n Some(num) => Ok(Some(to_value(&num).unwrap())),\n\n None => Err(vec![Box::new(errors::WrongType {\n\n path: path.to_string(),\n\n detail: \"Can't coerce string value to f64\".to_string(),\n", "file_path": "src/json_dsl/coercers.rs", "rank": 74, "score": 40792.873115909075 }, { "content": " let val = val.as_f64().unwrap();\n\n Ok(Some(to_value(&(val as i64)).unwrap()))\n\n } else if val.is_string() {\n\n let val = val.as_str().unwrap();\n\n let converted: Option<i64> = val.parse().ok();\n\n match converted {\n\n Some(num) => Ok(Some(to_value(&num).unwrap())),\n\n None => Err(vec![Box::new(errors::WrongType {\n\n path: path.to_string(),\n\n detail: \"Can't coerce string value to i64\".to_string(),\n\n })]),\n\n }\n\n } else {\n\n Err(vec![Box::new(errors::WrongType {\n\n path: path.to_string(),\n\n detail: \"Can't coerce object value to i64\".to_string(),\n\n })])\n\n }\n\n }\n\n}\n", "file_path": "src/json_dsl/coercers.rs", "rank": 75, "score": 40790.89193088472 }, { "content": " Ok(None)\n\n } else if val.is_string() {\n\n let val = val.as_str().unwrap();\n\n if val == \"true\" {\n\n Ok(Some(json!(true)))\n\n } else if val == \"false\" {\n\n Ok(Some(json!(false)))\n\n } else {\n\n Err(vec![\n\n Box::new(errors::WrongType {\n\n path: path.to_string(),\n\n detail: \"Can't coerce this string value to boolean. Correct values are 'true' and 'false'\".to_string()\n\n })\n\n ])\n\n }\n\n } else {\n\n Err(vec![Box::new(errors::WrongType {\n\n path: path.to_string(),\n\n detail: \"Can't coerce object to boolean\".to_string(),\n\n })])\n", "file_path": "src/json_dsl/coercers.rs", "rank": 76, "score": 40790.28363405998 }, { "content": " Ok(Some(value)) => {\n\n array.remove(i);\n\n array.insert(i, value);\n\n }\n\n Ok(None) => (),\n\n Err(err) => {\n\n errors.extend(err);\n\n }\n\n }\n\n }\n\n\n\n if errors.is_empty() {\n\n Ok(None)\n\n } else {\n\n Err(errors)\n\n }\n\n } else {\n\n Ok(None)\n\n }\n\n }\n", "file_path": "src/json_dsl/coercers.rs", "rank": 77, "score": 40786.1715664471 }, { "content": "use serde_json::Value;\n\n\n\nuse super::super::helpers;\n\nuse super::super::schema;\n\nuse super::super::validators;\n\n\n\n#[allow(missing_copy_implementations)]\n\npub struct PropertyNames;\n\nimpl super::Keyword for PropertyNames {\n\n fn compile(&self, def: &Value, ctx: &schema::WalkContext<'_>) -> super::KeywordResult {\n\n let property_names = keyword_key_exists!(def, \"propertyNames\");\n\n\n\n if property_names.is_object() || property_names.is_boolean() {\n\n Ok(Some(Box::new(validators::PropertyNames {\n\n url: helpers::alter_fragment_path(\n\n ctx.url.clone(),\n\n [ctx.escaped_fragment().as_ref(), \"propertyNames\"].join(\"/\"),\n\n ),\n\n })))\n\n } else {\n\n Err(schema::SchemaError::Malformed {\n\n path: ctx.fragment.join(\"/\"),\n\n detail: \"The value of propertyNames MUST be an object or a boolean\".to_string(),\n\n })\n\n }\n\n }\n\n}\n", "file_path": "src/json_schema/keywords/property_names.rs", "rank": 78, "score": 39148.150558093126 }, { "content": "#[test]\n\nfn is_process_string_require() {\n\n let params = json_dsl::Builder::build(|params| {\n\n params.req_typed(\"a\", json_dsl::string());\n\n });\n\n\n\n assert_str_eq(&params, r#\"{\"a\":\"1\"}\"#, r#\"{\"a\":\"1\"}\"#);\n\n assert_str_eq(&params, r#\"{\"a\":1}\"#, r#\"{\"a\":\"1\"}\"#);\n\n assert_str_eq(&params, r#\"{\"a\":1.112}\"#, r#\"{\"a\":\"1.112\"}\"#);\n\n\n\n // error because \"a\" is object\n\n assert_error::<errors::WrongType>(&params, r#\"{\"a\": {}}\"#, \"/a\");\n\n\n\n // error because \"a\" is null\n\n assert_error::<errors::WrongType>(&params, r#\"{\"a\": null}\"#, \"/a\");\n\n}\n\n\n", "file_path": "tests/dsl/mod.rs", "rank": 79, "score": 39065.07667055512 }, { "content": "#[test]\n\nfn default_when_needed() {\n\n let mut scope = scope::Scope::new().supply_defaults();\n\n let schema = scope.compile_and_return(mk_schema(), true).unwrap();\n\n let result = schema.validate(&json!({\"a\":[true],\"b\":[true],\"c\":{}}));\n\n assert!(result.is_strictly_valid());\n\n assert_eq!(\n\n result.replacement,\n\n Some(json!({\"a\":[true,42],\"b\":[true,42],\"c\":{\"x\":false,\"y\":true}}))\n\n );\n\n}\n\n\n", "file_path": "src/json_schema/keywords/of.rs", "rank": 80, "score": 38921.02677797795 }, { "content": "#[test]\n\nfn no_default_for_schema() {\n\n let mut scope = scope::Scope::new().supply_defaults();\n\n let schema = scope.compile_and_return(mk_schema(), true).unwrap();\n\n assert_eq!(schema.get_default(), None);\n\n}\n\n\n", "file_path": "src/json_schema/keywords/of.rs", "rank": 81, "score": 38921.02677797795 }, { "content": "#[test]\n\nfn no_default_otherwise() {\n\n let mut scope = scope::Scope::new().supply_defaults();\n\n let schema = scope.compile_and_return(mk_schema(), true).unwrap();\n\n let result = schema.validate(&json!({\"a\":{\"x\":\"x\"},\"b\":[true,0],\"c\":{\"x\":1,\"y\":2}}));\n\n assert!(result.is_strictly_valid());\n\n assert_eq!(result.replacement, None);\n\n}\n\n\n", "file_path": "src/json_schema/keywords/of.rs", "rank": 82, "score": 38921.02677797795 }, { "content": "#[test]\n\nfn divergent_defaults() {\n\n let mut scope = scope::Scope::new().supply_defaults();\n\n let schema = scope\n\n .compile_and_return(\n\n json!({\n\n \"allOf\": [\n\n {\n\n \"properties\": {\n\n \"a\": {\n\n \"anyOf\": [{\n\n \"properties\": {\n\n \"b\": { \"default\": 42 }\n\n }\n\n }]\n\n }\n\n },\n\n },\n\n {\n\n \"properties\": {\n\n \"a\": { \"default\": {} }\n", "file_path": "src/json_schema/keywords/of.rs", "rank": 83, "score": 38921.02677797795 }, { "content": "#[test]\n\nfn conflicting_defaults() {\n\n let mut scope = scope::Scope::new().supply_defaults();\n\n let schema = scope\n\n .compile_and_return(\n\n json!({\n\n \"allOf\": [\n\n {\n\n \"properties\": {\n\n \"a\": { \"type\": \"number\" }\n\n },\n\n },\n\n {\n\n \"properties\": {\n\n \"a\": { \"default\": \"hello\" }\n\n }\n\n }\n\n ]\n\n }),\n\n true,\n\n )\n\n .unwrap();\n\n let result = schema.validate(&json!({}));\n\n assert!(!result.is_valid());\n\n assert_eq!(&*format!(\"{:?}\", result),\n\n \"ValidationState { errors: [WrongType { path: \\\"/a\\\", detail: \\\"The value must be number\\\" }], missing: [], replacement: None }\");\n\n}\n\n\n", "file_path": "src/json_schema/keywords/of.rs", "rank": 84, "score": 38921.02677797795 }, { "content": "#[test]\n\nfn default_when_needed2() {\n\n let mut scope = scope::Scope::new().supply_defaults();\n\n let schema = scope.compile_and_return(mk_schema(), true).unwrap();\n\n let result = schema.validate(&json!({\"a\":{},\"b\":{}}));\n\n assert!(result.is_strictly_valid());\n\n assert_eq!(\n\n result.replacement,\n\n Some(json!({\"a\":{\"x\":\"buh\"},\"b\":{\"x\":\"buh\"}}))\n\n );\n\n}\n\n\n", "file_path": "src/json_schema/keywords/of.rs", "rank": 85, "score": 38921.02677797795 }, { "content": "use serde_json::Value;\n\n\n\nuse super::super::errors;\n\nuse super::super::scope;\n\n\n\n#[allow(missing_copy_implementations)]\n\npub struct Not {\n\n pub url: url::Url,\n\n}\n\n\n\nimpl super::Validator for Not {\n\n fn validate(&self, val: &Value, path: &str, scope: &scope::Scope) -> super::ValidationState {\n\n let schema = scope.resolve(&self.url);\n\n let mut state = super::ValidationState::new();\n\n\n\n if let Some(schema) = schema {\n\n if schema.validate_in(val, path).is_valid() {\n\n state.errors.push(Box::new(errors::Not {\n\n path: path.to_string(),\n\n }))\n\n }\n\n } else {\n\n state.missing.push(self.url.clone());\n\n }\n\n\n\n state\n\n }\n\n}\n", "file_path": "src/json_schema/validators/not.rs", "rank": 86, "score": 38837.88181728568 }, { "content": "use serde_json::Value;\n\nuse std::borrow::Cow;\n\n\n\nuse super::super::errors;\n\nuse super::super::scope;\n\n\n\n#[allow(missing_copy_implementations)]\n\npub struct AllOf {\n\n pub schemes: Vec<url::Url>,\n\n}\n\n\n\nimpl super::Validator for AllOf {\n\n fn validate(&self, val: &Value, path: &str, scope: &scope::Scope) -> super::ValidationState {\n\n let mut state = super::ValidationState::new();\n\n let mut val = Cow::Borrowed(val);\n\n\n\n // first get all relevant schemas\n\n let schemas = self\n\n .schemes\n\n .iter()\n", "file_path": "src/json_schema/validators/of.rs", "rank": 87, "score": 38836.10042233262 }, { "content": " }\n\n\n\n if !valid {\n\n state.errors.push(Box::new(errors::AnyOf {\n\n path: path.to_string(),\n\n states,\n\n }))\n\n }\n\n\n\n state.set_replacement(val);\n\n state\n\n }\n\n}\n\n\n\n#[allow(missing_copy_implementations)]\n\npub struct OneOf {\n\n pub schemes: Vec<url::Url>,\n\n}\n\n\n\nimpl super::Validator for OneOf {\n", "file_path": "src/json_schema/validators/of.rs", "rank": 88, "score": 38832.10479931508 }, { "content": " val = Cow::Owned(v);\n\n }\n\n\n\n state.set_replacement(val);\n\n state\n\n }\n\n}\n\n\n\n#[allow(missing_copy_implementations)]\n\npub struct AnyOf {\n\n pub schemes: Vec<url::Url>,\n\n}\n\n\n\nimpl super::Validator for AnyOf {\n\n fn validate(&self, val: &Value, path: &str, scope: &scope::Scope) -> super::ValidationState {\n\n let mut state = super::ValidationState::new();\n\n let mut val = Cow::Borrowed(val);\n\n\n\n let mut states = vec![];\n\n let mut valid = false;\n", "file_path": "src/json_schema/validators/of.rs", "rank": 89, "score": 38831.402501102515 }, { "content": "\n\n // second pass if defaults are enabled to check that the result is stable\n\n if let Cow::Owned(v) = val {\n\n let mut second = Cow::Borrowed(&v);\n\n\n\n for schema in schemas.iter() {\n\n let mut result = schema.validate_in(&second, path);\n\n if result.is_valid() && result.replacement.is_some() {\n\n *second.to_mut() = result.replacement.take().unwrap();\n\n }\n\n state.append(result);\n\n }\n\n if let Cow::Owned(_) = second {\n\n state.errors.push(Box::new(errors::DivergentDefaults {\n\n path: path.to_string(),\n\n }));\n\n }\n\n if !state.is_valid() {\n\n return state;\n\n }\n", "file_path": "src/json_schema/validators/of.rs", "rank": 90, "score": 38827.06633060269 }, { "content": " fn validate(&self, val: &Value, path: &str, scope: &scope::Scope) -> super::ValidationState {\n\n let mut state = super::ValidationState::new();\n\n let mut val = Cow::Borrowed(val);\n\n\n\n let mut states = vec![];\n\n let mut valid = 0;\n\n for url in self.schemes.iter() {\n\n let schema = scope.resolve(url);\n\n\n\n if let Some(schema) = schema {\n\n let mut result = schema.validate_in(&val, path);\n\n\n\n state.missing.extend(result.missing.clone());\n\n\n\n if result.is_valid() {\n\n if let Some(result) = result.replacement.take() {\n\n *val.to_mut() = result;\n\n }\n\n valid += 1;\n\n } else {\n", "file_path": "src/json_schema/validators/of.rs", "rank": 91, "score": 38824.75662803699 }, { "content": " states.push(result)\n\n }\n\n } else {\n\n state.missing.push(url.clone())\n\n }\n\n }\n\n\n\n if valid != 1 {\n\n state.errors.push(Box::new(errors::OneOf {\n\n path: path.to_string(),\n\n states,\n\n }))\n\n }\n\n\n\n state.set_replacement(val);\n\n state\n\n }\n\n}\n", "file_path": "src/json_schema/validators/of.rs", "rank": 92, "score": 38824.09598352965 }, { "content": " for url in self.schemes.iter() {\n\n let schema = scope.resolve(url);\n\n\n\n if let Some(schema) = schema {\n\n let mut result = schema.validate_in(&val, path);\n\n\n\n state.missing.extend(result.missing.clone());\n\n\n\n if result.is_valid() {\n\n if let Some(result) = result.replacement.take() {\n\n *val.to_mut() = result;\n\n }\n\n valid = true;\n\n break;\n\n } else {\n\n states.push(result)\n\n }\n\n } else {\n\n state.missing.push(url.clone())\n\n }\n", "file_path": "src/json_schema/validators/of.rs", "rank": 93, "score": 38821.68755296945 }, { "content": " .map(|url| (url, scope.resolve(url)))\n\n .filter_map(|(url, opt)| {\n\n if opt.is_none() {\n\n state.missing.push(url.clone())\n\n }\n\n opt\n\n })\n\n .collect::<Vec<_>>();\n\n\n\n // first pass to populate all defaults (if enabled)\n\n for schema in schemas.iter() {\n\n let mut result = schema.validate_in(&val, path);\n\n if result.is_valid() && result.replacement.is_some() {\n\n *val.to_mut() = result.replacement.take().unwrap();\n\n }\n\n state.append(result);\n\n }\n\n if !state.is_valid() {\n\n return state;\n\n }\n", "file_path": "src/json_schema/validators/of.rs", "rank": 94, "score": 38821.538929509974 }, { "content": "#[test]\n\nfn is_process_object_with_nested_require() {\n\n let params = json_dsl::Builder::build(|params| {\n\n params.req_nested(\"a\", json_dsl::object(), |params| {\n\n params.req_typed(\"b\", json_dsl::f64());\n\n params.req_typed(\"c\", json_dsl::array_of(json_dsl::string()));\n\n });\n\n });\n\n\n\n assert_str_eq(\n\n &params,\n\n r#\"{\"a\":{\"b\":\"1.22\",\"c\":[1.112,\"\"]}}\"#,\n\n r#\"{\"a\":{\"b\":1.22,\"c\":[\"1.112\",\"\"]}}\"#,\n\n );\n\n\n\n // error because \"a\":\"b\" is not a f64\n\n assert_error::<errors::WrongType>(&params, r#\"{\"a\":{\"b\":\"not-f64\"},\"c\":[1.112,\"\"]}\"#, \"/a/b\");\n\n\n\n // error because \"a\":\"c\":\"1\" is object and can't be coerced to string\n\n assert_error::<errors::WrongType>(&params, r#\"{\"a\":{\"b\":\"1.22\",\"c\":[1.112,{}]}}\"#, \"/a/c/1\");\n\n}\n\n\n", "file_path": "tests/dsl/mod.rs", "rank": 95, "score": 38219.250378427474 }, { "content": "#[test]\n\nfn is_process_array_with_nested_require() {\n\n let params = json_dsl::Builder::build(|params| {\n\n params.req_nested(\"a\", json_dsl::array(), |params| {\n\n params.req_typed(\"b\", json_dsl::string());\n\n params.req_typed(\"c\", json_dsl::array_of(json_dsl::u64()))\n\n });\n\n });\n\n\n\n assert_str_eq(\n\n &params,\n\n r#\"{\"a\":[{\"b\":1,\"c\":[\"1\"]}]}\"#,\n\n r#\"{\"a\":[{\"b\":\"1\",\"c\":[1]}]}\"#,\n\n );\n\n\n\n // error because element in \"a\" at index(0) is not coersible to string\n\n assert_error::<errors::WrongType>(&params, r#\"{\"a\":[{\"b\":{},\"c\":[\"1\"]}]}\"#, \"/a/0/b\");\n\n\n\n // error because element in \"a\":0:\"c\":0 is not coersible to string\n\n assert_error::<errors::WrongType>(&params, r#\"{\"a\":[{\"b\":1,\"c\":[{}]}]}\"#, \"/a/0/c/0\");\n\n}\n\n\n", "file_path": "tests/dsl/mod.rs", "rank": 96, "score": 38219.250378427474 }, { "content": "#[cfg(test)]\n\nfn mk_schema() -> Value {\n\n json!({\n\n \"properties\": {\n\n \"a\": {\n\n \"oneOf\": [\n\n { \"type\": \"array\", \"items\": [{\"type\":\"boolean\"},{\"default\":42}] },\n\n { \"type\": \"object\", \"properties\": {\"x\": {\"default\": \"buh\"}} }\n\n ]\n\n },\n\n \"b\": {\n\n \"anyOf\": [\n\n { \"type\": \"array\", \"items\": [{\"type\":\"boolean\"},{\"default\":42}] },\n\n { \"type\": \"object\", \"properties\": {\"x\": {\"default\": \"buh\"}} }\n\n ]\n\n },\n\n \"c\": {\n\n \"allOf\": [\n\n { \"properties\": {\"x\": {\"default\": false}} },\n\n { \"properties\": {\"y\": {\"default\": true}} }\n\n ]\n\n }\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/json_schema/keywords/of.rs", "rank": 97, "score": 38078.45250582503 }, { "content": "#[test]\n\nfn no_default_for_schema() {\n\n let mut scope = scope::Scope::new().supply_defaults();\n\n let schema = scope.compile_and_return(mk_schema(), true).unwrap();\n\n assert_eq!(schema.get_default(), None);\n\n}\n\n\n", "file_path": "src/json_schema/keywords/dependencies.rs", "rank": 98, "score": 38019.52286227436 }, { "content": "#[test]\n\nfn default_when_needed() {\n\n let mut scope = scope::Scope::new().supply_defaults();\n\n let schema = scope.compile_and_return(mk_schema(), true).unwrap();\n\n let result = schema.validate(&json!({\"x\": 12}));\n\n assert!(result.is_strictly_valid());\n\n assert_eq!(result.replacement, Some(json!({\"x\": 12, \"y\": \"buh\"})));\n\n}\n\n\n", "file_path": "src/json_schema/keywords/dependencies.rs", "rank": 99, "score": 38019.52286227436 } ]
Rust
day7/src/main.rs
monkeydom/adventofcode-2020-rust
9b9105fabc51b9c793fae59528ac593f54524e11
#[allow(dead_code)] mod aoc; #[allow(dead_code)] mod file; use std::collections::HashMap; use std::collections::HashSet; use std::fmt; fn main() { aoc::preamble(); part2(); } #[derive(Debug)] struct BagContents { bt: BagType, contents: Vec<BagType>, } impl BagContents { fn from_strings(strings: &[&str]) -> Self { let bt = BagType::from_strings(&strings[..2]); let mut contents: Vec<BagType> = vec![]; for index in (4..strings.len()).step_by(4) { if let Ok(count) = strings[index].parse() { let bagtype = BagType::from_strings(&strings[index + 1..index + 3]); contents.extend((0..count).map(|_| bagtype.clone())); } } BagContents { bt, contents } } } #[derive(Debug, PartialEq, Eq, Clone, Hash)] struct BagType { attribute: String, color: String, } impl fmt::Display for BagType { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "[{} {}]", self.attribute, self.color) } } impl BagType { fn from_strings(strings: &[&str]) -> Self { BagType { attribute: strings[0].to_string(), color: strings[1].to_string(), } } } fn parse_rule(line: String) -> BagContents { let tokens: Vec<&str> = line.split(" ").collect(); let bag_contents = BagContents::from_strings(&tokens); println!("{}\n{:?}\n", line, &bag_contents); bag_contents } fn part1() { let contents: Vec<BagContents> = file::lines().map(|l| parse_rule(l)).collect(); let innermost_bag = BagType::from_strings(&["shiny", "gold"]); let mut hash_set: HashSet<BagType> = HashSet::new(); hash_set.insert(innermost_bag); loop { let mut collect_set: HashSet<BagType> = HashSet::new(); for bc in contents.iter() { for inner in bc.contents.iter() { if hash_set.contains(inner) { collect_set.insert(bc.bt.clone()); } } } for bt in &hash_set { collect_set.insert(bt.clone()); } if collect_set.len() > hash_set.len() { hash_set = collect_set; } else { break; } } let result = hash_set.len() - 1; aoc::print_solution1(format!("{}", result).as_str()); } fn part2() { let result = "None Yet"; let contents: Vec<BagContents> = file::lines().map(|l| parse_rule(l)).collect(); let mut containment_count = HashMap::new(); let gold_bag = BagType::from_strings(&["shiny", "gold"]); let mut cc = &mut containment_count; let mut update = |mcc: &mut HashMap<BagType, i64>, bt: &BagType, count: i64| { mcc.insert(bt.clone(), mcc.get(bt).unwrap_or(&0) + count); }; for bc in contents.iter() { if bc.contents.len() == 0 { println!("LEN 0 ! {:?}", bc); update(&mut cc, &bc.bt, 0); } } let mut iteration = 1; loop { let mut unknown_count = 0; let mut updated_some = false; for bc in contents.iter() { if let None = cc.get(&bc.bt) { if bc.contents.iter().all(|bt| cc.get(&bt).is_some()) { updated_some = true; let value = bc .contents .iter() .fold(0, |acc, v| acc + cc.get(v).unwrap() + 1); println!("{:?} -> updating with value {}", &bc.bt, value); update(cc, &bc.bt, value); } else { println!("unknown {:?}", &bc.bt); unknown_count += 1; } } } if !updated_some { break; } println!( "====== {} iteration [uk: {}] ======", iteration, unknown_count ); iteration += 1; } let result = containment_count.get(&gold_bag); aoc::print_solution2(format!("{:?} ", result).as_str()); } #[cfg(test)] mod tests { use super::*; const TEST_INPUT_1: &str = "light red bags contain 1 bright white bag, 2 muted yellow bags. dark orange bags contain 3 bright white bags, 4 muted yellow bags. bright white bags contain 1 shiny gold bag. muted yellow bags contain 2 shiny gold bags, 9 faded blue bags. shiny gold bags contain 1 dark olive bag, 2 vibrant plum bags. dark olive bags contain 3 faded blue bags, 4 dotted black bags. vibrant plum bags contain 5 faded blue bags, 6 dotted black bags. faded blue bags contain no other bags. dotted black bags contain no other bags." #[test] fn test_parse_lines() { } }
#[allow(dead_code)] mod aoc; #[allow(dead_code)] mod file; use std::collections::HashMap; use std::collections::HashSet; use std::fmt; fn main() { aoc::preamble(); part2(); } #[derive(Debug)] struct BagContents { bt: BagType, contents: Vec<BagType>, } impl BagContents { fn from_strings(strings: &[&str]) -> Self { let bt = BagType::from_strings(&strings[..2]); let mut contents: Vec<BagType> = vec![]; for index in (4..strings.len()).step_by(4) { if let Ok(count) = strings[index].parse() { let bagtype = BagType::from_strings(&strings[index + 1..index + 3]); contents.extend((0..count).map(|_| bagtype.clone())); } } BagContents { bt, contents } } } #[derive(Debug, PartialEq, Eq, Clone, Hash)] struct BagType { attribute: String, color: String, } impl fmt::Display for BagType { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "[{} {}]", self.attribute, s
(|bt| cc.get(&bt).is_some()) { updated_some = true; let value = bc .contents .iter() .fold(0, |acc, v| acc + cc.get(v).unwrap() + 1); println!("{:?} -> updating with value {}", &bc.bt, value); update(cc, &bc.bt, value); } else { println!("unknown {:?}", &bc.bt); unknown_count += 1; } } } if !updated_some { break; } println!( "====== {} iteration [uk: {}] ======", iteration, unknown_count ); iteration += 1; } let result = containment_count.get(&gold_bag); aoc::print_solution2(format!("{:?} ", result).as_str()); } #[cfg(test)] mod tests { use super::*; const TEST_INPUT_1: &str = "light red bags contain 1 bright white bag, 2 muted yellow bags. dark orange bags contain 3 bright white bags, 4 muted yellow bags. bright white bags contain 1 shiny gold bag. muted yellow bags contain 2 shiny gold bags, 9 faded blue bags. shiny gold bags contain 1 dark olive bag, 2 vibrant plum bags. dark olive bags contain 3 faded blue bags, 4 dotted black bags. vibrant plum bags contain 5 faded blue bags, 6 dotted black bags. faded blue bags contain no other bags. dotted black bags contain no other bags." #[test] fn test_parse_lines() { } }
elf.color) } } impl BagType { fn from_strings(strings: &[&str]) -> Self { BagType { attribute: strings[0].to_string(), color: strings[1].to_string(), } } } fn parse_rule(line: String) -> BagContents { let tokens: Vec<&str> = line.split(" ").collect(); let bag_contents = BagContents::from_strings(&tokens); println!("{}\n{:?}\n", line, &bag_contents); bag_contents } fn part1() { let contents: Vec<BagContents> = file::lines().map(|l| parse_rule(l)).collect(); let innermost_bag = BagType::from_strings(&["shiny", "gold"]); let mut hash_set: HashSet<BagType> = HashSet::new(); hash_set.insert(innermost_bag); loop { let mut collect_set: HashSet<BagType> = HashSet::new(); for bc in contents.iter() { for inner in bc.contents.iter() { if hash_set.contains(inner) { collect_set.insert(bc.bt.clone()); } } } for bt in &hash_set { collect_set.insert(bt.clone()); } if collect_set.len() > hash_set.len() { hash_set = collect_set; } else { break; } } let result = hash_set.len() - 1; aoc::print_solution1(format!("{}", result).as_str()); } fn part2() { let result = "None Yet"; let contents: Vec<BagContents> = file::lines().map(|l| parse_rule(l)).collect(); let mut containment_count = HashMap::new(); let gold_bag = BagType::from_strings(&["shiny", "gold"]); let mut cc = &mut containment_count; let mut update = |mcc: &mut HashMap<BagType, i64>, bt: &BagType, count: i64| { mcc.insert(bt.clone(), mcc.get(bt).unwrap_or(&0) + count); }; for bc in contents.iter() { if bc.contents.len() == 0 { println!("LEN 0 ! {:?}", bc); update(&mut cc, &bc.bt, 0); } } let mut iteration = 1; loop { let mut unknown_count = 0; let mut updated_some = false; for bc in contents.iter() { if let None = cc.get(&bc.bt) { if bc.contents.iter().all
random
[ { "content": "fn process_lines(lines: impl Iterator<Item = String>) -> (Vec<Field>, Vec<i64>, Vec<Vec<i64>>) {\n\n let mut my_ticket: Vec<i64> = vec![];\n\n let mut nearby_tickets: Vec<Vec<i64>> = vec![];\n\n let mut fields: Vec<Field> = vec![];\n\n let mut state = 0;\n\n for line in lines {\n\n if line == \"\" {\n\n state += 1;\n\n continue;\n\n }\n\n println!(\"{:?}\", &line);\n\n if state == 0 {\n\n fields.push(Field::from_str(&line).unwrap());\n\n } else if line.chars().last().unwrap() != ':' {\n\n let pass: Vec<i64> = line.split(\",\").map(|n| n.parse::<i64>().unwrap()).collect();\n\n if state == 1 {\n\n my_ticket = pass;\n\n } else {\n\n nearby_tickets.push(pass);\n\n }\n\n }\n\n }\n\n println!(\"{:?} {:?} {:?}\", fields, my_ticket, nearby_tickets);\n\n (fields, my_ticket, nearby_tickets)\n\n}\n", "file_path": "day16/src/main.rs", "rank": 0, "score": 226923.78950502846 }, { "content": "fn validate_and_remove(keys: &mut HashSet<&str>, key: &str, value: &str) -> bool {\n\n let result = match key {\n\n \"byr\" => is_year_in_range(1920..=2002, value),\n\n \"iyr\" => is_year_in_range(2010..=2020, value),\n\n \"eyr\" => is_year_in_range(2020..=2030, value),\n\n \"ecl\" => is_valid_eyecolor(value),\n\n \"pid\" => is_valid_number(9, value),\n\n \"hcl\" => is_valid_haircolor(value),\n\n \"hgt\" => is_valid_height(value),\n\n _ => true,\n\n };\n\n\n\n if result {\n\n keys.remove(key);\n\n true\n\n } else {\n\n println!(\n\n \"{}\",\n\n Red.bold()\n\n .paint(format!(\"{} is invalid for {}!\", value, key))\n\n );\n\n false\n\n }\n\n}\n\n\n", "file_path": "day4/src/main.rs", "rank": 1, "score": 209446.08095193817 }, { "content": "fn solve_part2(lines: impl Iterator<Item = String>) -> i64 {\n\n let mut state: HashSet<Point4> = parse_to_state(lines)\n\n .iter()\n\n .map(|p| Point4(p.0, p.1, p.2, 0))\n\n .collect();\n\n // println!(\"{:?}\", state);\n\n state = perform_step4(state);\n\n\n\n // for y in 0..4 {\n\n // for x in 0..3 {\n\n // print!(\n\n // \"{}\",\n\n // (if state.contains(&Point4(x, y, 0, 0)) {\n\n // \"#\"\n\n // } else {\n\n // \".\"\n\n // })\n\n // )\n\n // }\n\n // println!(\"\");\n", "file_path": "day17/src/main.rs", "rank": 2, "score": 201581.8665159427 }, { "content": "fn solve_part2(lines: impl Iterator<Item = String>) -> i64 {\n\n let (mut fields, my_ticket, tickets) = process_lines(lines);\n\n\n\n let valid_tickets: Vec<&Vec<i64>> = tickets\n\n .iter()\n\n .filter(|t| t.iter().all(|n| is_valid(&fields, *n)))\n\n .collect();\n\n\n\n for ticket in &valid_tickets {\n\n for (n, value) in ticket.iter().enumerate() {\n\n for field in &mut fields {\n\n if !field.ranges.iter().any(|r| r.contains(value)) {\n\n field.invalid_positions.insert(n);\n\n }\n\n }\n\n }\n\n }\n\n\n\n fields.sort_by(|a, b| b.invalid_positions.len().cmp(&a.invalid_positions.len()));\n\n\n", "file_path": "day16/src/main.rs", "rank": 3, "score": 201581.8665159427 }, { "content": "fn parse_to_state(lines: impl Iterator<Item = String>) -> HashSet<Point> {\n\n let mut result = HashSet::new();\n\n for (y, line) in lines.enumerate() {\n\n for (x, c) in line.chars().enumerate() {\n\n if c == '#' {\n\n result.insert(Point(x as i64, y as i64, 0));\n\n }\n\n }\n\n println!(\"{}: {}\", y, line);\n\n }\n\n\n\n result\n\n}\n\n\n", "file_path": "day17/src/main.rs", "rank": 4, "score": 192150.76754089142 }, { "content": "fn execute(prog: &mut Vec<i64>, pc: &mut usize) {\n\n let store = prog[*pc + 3] as usize;\n\n let a = prog[prog[*pc + 1] as usize];\n\n let b = prog[prog[*pc + 2] as usize];\n\n if prog[*pc] == 1 {\n\n prog[store] = a + b;\n\n } else {\n\n prog[store] = a * b;\n\n }\n\n *pc += 4;\n\n}\n\n\n", "file_path": "Preparation/2019/day2/src/main.rs", "rank": 5, "score": 189222.07155699306 }, { "content": "fn some_kind_of_uppercase_first_letter(s: &str) -> String {\n\n let mut c = s.chars();\n\n match c.next() {\n\n None => String::new(),\n\n Some(f) => f.to_uppercase().chain(c).collect(),\n\n }\n\n}\n\n\n", "file_path": "day15/src/aoc.rs", "rank": 6, "score": 188680.3749531592 }, { "content": "fn some_kind_of_uppercase_first_letter(s: &str) -> String {\n\n let mut c = s.chars();\n\n match c.next() {\n\n None => String::new(),\n\n Some(f) => f.to_uppercase().chain(c).collect(),\n\n }\n\n}\n\n\n", "file_path": "day14/src/aoc.rs", "rank": 7, "score": 188680.3749531592 }, { "content": "fn some_kind_of_uppercase_first_letter(s: &str) -> String {\n\n let mut c = s.chars();\n\n match c.next() {\n\n None => String::new(),\n\n Some(f) => f.to_uppercase().chain(c).collect(),\n\n }\n\n}\n\n\n", "file_path": "day7/src/aoc.rs", "rank": 8, "score": 188680.3749531592 }, { "content": "fn some_kind_of_uppercase_first_letter(s: &str) -> String {\n\n let mut c = s.chars();\n\n match c.next() {\n\n None => String::new(),\n\n Some(f) => f.to_uppercase().chain(c).collect(),\n\n }\n\n}\n\n\n", "file_path": "day12/src/aoc.rs", "rank": 9, "score": 188680.3749531592 }, { "content": "fn some_kind_of_uppercase_first_letter(s: &str) -> String {\n\n let mut c = s.chars();\n\n match c.next() {\n\n None => String::new(),\n\n Some(f) => f.to_uppercase().chain(c).collect(),\n\n }\n\n}\n\n\n", "file_path": "day6/src/aoc.rs", "rank": 10, "score": 188680.3749531592 }, { "content": "fn some_kind_of_uppercase_first_letter(s: &str) -> String {\n\n let mut c = s.chars();\n\n match c.next() {\n\n None => String::new(),\n\n Some(f) => f.to_uppercase().chain(c).collect(),\n\n }\n\n}\n\n\n", "file_path": "day10/src/aoc.rs", "rank": 11, "score": 188680.3749531592 }, { "content": "fn some_kind_of_uppercase_first_letter(s: &str) -> String {\n\n let mut c = s.chars();\n\n match c.next() {\n\n None => String::new(),\n\n Some(f) => f.to_uppercase().chain(c).collect(),\n\n }\n\n}\n\n\n", "file_path": "day11/src/aoc.rs", "rank": 12, "score": 188680.3749531592 }, { "content": "fn some_kind_of_uppercase_first_letter(s: &str) -> String {\n\n let mut c = s.chars();\n\n match c.next() {\n\n None => String::new(),\n\n Some(f) => f.to_uppercase().chain(c).collect(),\n\n }\n\n}\n\n\n", "file_path": "day13/src/aoc.rs", "rank": 13, "score": 188680.3749531592 }, { "content": "fn some_kind_of_uppercase_first_letter(s: &str) -> String {\n\n let mut c = s.chars();\n\n match c.next() {\n\n None => String::new(),\n\n Some(f) => f.to_uppercase().chain(c).collect(),\n\n }\n\n}\n\n\n", "file_path": "day4/src/aoc.rs", "rank": 14, "score": 188680.3749531592 }, { "content": "fn some_kind_of_uppercase_first_letter(s: &str) -> String {\n\n let mut c = s.chars();\n\n match c.next() {\n\n None => String::new(),\n\n Some(f) => f.to_uppercase().chain(c).collect(),\n\n }\n\n}\n\n\n", "file_path": "day16/src/aoc.rs", "rank": 15, "score": 188680.3749531592 }, { "content": "fn some_kind_of_uppercase_first_letter(s: &str) -> String {\n\n let mut c = s.chars();\n\n match c.next() {\n\n None => String::new(),\n\n Some(f) => f.to_uppercase().chain(c).collect(),\n\n }\n\n}\n\n\n", "file_path": "day17/src/aoc.rs", "rank": 16, "score": 188680.3749531592 }, { "content": "fn some_kind_of_uppercase_first_letter(s: &str) -> String {\n\n let mut c = s.chars();\n\n match c.next() {\n\n None => String::new(),\n\n Some(f) => f.to_uppercase().chain(c).collect(),\n\n }\n\n}\n\n\n", "file_path": "day9/src/aoc.rs", "rank": 17, "score": 188680.3749531592 }, { "content": "fn some_kind_of_uppercase_first_letter(s: &str) -> String {\n\n let mut c = s.chars();\n\n match c.next() {\n\n None => String::new(),\n\n Some(f) => f.to_uppercase().chain(c).collect(),\n\n }\n\n}\n\n\n", "file_path": "day8/src/aoc.rs", "rank": 18, "score": 188680.3749531592 }, { "content": "fn process_lines(lines: impl Iterator<Item = String>) -> HashMap<usize, u64> {\n\n let mut result = HashMap::new();\n\n\n\n let mut set_mask = 0u64;\n\n let mut clear_mask = 0u64;\n\n\n\n for line in lines {\n\n let parts: Vec<&str> = line.split(\" = \").collect();\n\n match parts[0] {\n\n \"mask\" => {\n\n let pm = parse_mask(parts[1]);\n\n set_mask = pm.0;\n\n clear_mask = pm.1;\n\n }\n\n\n\n _ => {\n\n let loc: usize = parts[0][4..(parts[0].len() - 1)].parse().unwrap();\n\n let value: u64 = parts[1].parse().unwrap();\n\n let value = value | set_mask;\n\n let value = value & !clear_mask;\n\n result.insert(loc, value);\n\n }\n\n }\n\n }\n\n\n\n result\n\n}\n\n\n", "file_path": "day14/src/main.rs", "rank": 19, "score": 186491.62588077915 }, { "content": "pub fn sections() -> impl Iterator<Item = impl Iterator<Item = String>> {\n\n let mut intermediate: Vec<Vec<String>> = vec![];\n\n let mut current: Vec<String> = vec![];\n\n for line in lines() {\n\n if line != \"\" {\n\n current.push(line);\n\n } else {\n\n if current.len() > 0 {\n\n intermediate.push(current);\n\n }\n\n current = vec![]\n\n }\n\n }\n\n if current.len() > 0 {\n\n intermediate.push(current);\n\n }\n\n\n\n intermediate.into_iter().map(|v| v.into_iter())\n\n}\n", "file_path": "day9/src/file.rs", "rank": 20, "score": 185848.20392267633 }, { "content": "pub fn sections() -> impl Iterator<Item = impl Iterator<Item = String>> {\n\n let mut intermediate: Vec<Vec<String>> = vec![];\n\n let mut current: Vec<String> = vec![];\n\n for line in lines() {\n\n if line != \"\" {\n\n current.push(line);\n\n } else {\n\n if current.len() > 0 {\n\n intermediate.push(current);\n\n }\n\n current = vec![]\n\n }\n\n }\n\n if current.len() > 0 {\n\n intermediate.push(current);\n\n }\n\n\n\n intermediate.into_iter().map(|v| v.into_iter())\n\n}\n", "file_path": "day8/src/file.rs", "rank": 21, "score": 185848.20392267633 }, { "content": "pub fn sections() -> impl Iterator<Item = impl Iterator<Item = String>> {\n\n let mut intermediate: Vec<Vec<String>> = vec![];\n\n let mut current: Vec<String> = vec![];\n\n for line in lines() {\n\n if line != \"\" {\n\n current.push(line);\n\n } else {\n\n if current.len() > 0 {\n\n intermediate.push(current);\n\n }\n\n current = vec![]\n\n }\n\n }\n\n if current.len() > 0 {\n\n intermediate.push(current);\n\n }\n\n\n\n intermediate.into_iter().map(|v| v.into_iter())\n\n}\n", "file_path": "day6/src/file.rs", "rank": 22, "score": 185848.20392267633 }, { "content": "pub fn sections() -> impl Iterator<Item = impl Iterator<Item = String>> {\n\n let mut intermediate: Vec<Vec<String>> = vec![];\n\n let mut current: Vec<String> = vec![];\n\n for line in lines() {\n\n if line != \"\" {\n\n current.push(line);\n\n } else {\n\n if current.len() > 0 {\n\n intermediate.push(current);\n\n }\n\n current = vec![]\n\n }\n\n }\n\n if current.len() > 0 {\n\n intermediate.push(current);\n\n }\n\n\n\n intermediate.into_iter().map(|v| v.into_iter())\n\n}\n", "file_path": "day12/src/file.rs", "rank": 23, "score": 185848.20392267633 }, { "content": "pub fn sections() -> impl Iterator<Item = impl Iterator<Item = String>> {\n\n let mut intermediate: Vec<Vec<String>> = vec![];\n\n let mut current: Vec<String> = vec![];\n\n for line in lines() {\n\n if line != \"\" {\n\n current.push(line);\n\n } else {\n\n if current.len() > 0 {\n\n intermediate.push(current);\n\n }\n\n current = vec![]\n\n }\n\n }\n\n if current.len() > 0 {\n\n intermediate.push(current);\n\n }\n\n\n\n intermediate.into_iter().map(|v| v.into_iter())\n\n}\n", "file_path": "day10/src/file.rs", "rank": 24, "score": 185848.20392267633 }, { "content": "pub fn sections() -> impl Iterator<Item = impl Iterator<Item = String>> {\n\n let mut intermediate: Vec<Vec<String>> = vec![];\n\n let mut current: Vec<String> = vec![];\n\n for line in lines() {\n\n if line != \"\" {\n\n current.push(line);\n\n } else {\n\n if current.len() > 0 {\n\n intermediate.push(current);\n\n }\n\n current = vec![]\n\n }\n\n }\n\n if current.len() > 0 {\n\n intermediate.push(current);\n\n }\n\n\n\n intermediate.into_iter().map(|v| v.into_iter())\n\n}\n", "file_path": "day16/src/file.rs", "rank": 25, "score": 185848.20392267633 }, { "content": "pub fn sections() -> impl Iterator<Item = impl Iterator<Item = String>> {\n\n let mut intermediate: Vec<Vec<String>> = vec![];\n\n let mut current: Vec<String> = vec![];\n\n for line in lines() {\n\n if line != \"\" {\n\n current.push(line);\n\n } else {\n\n if current.len() > 0 {\n\n intermediate.push(current);\n\n }\n\n current = vec![]\n\n }\n\n }\n\n if current.len() > 0 {\n\n intermediate.push(current);\n\n }\n\n\n\n intermediate.into_iter().map(|v| v.into_iter())\n\n}\n", "file_path": "day11/src/file.rs", "rank": 26, "score": 185848.20392267633 }, { "content": "pub fn sections() -> impl Iterator<Item = impl Iterator<Item = String>> {\n\n let mut intermediate: Vec<Vec<String>> = vec![];\n\n let mut current: Vec<String> = vec![];\n\n for line in lines() {\n\n if line != \"\" {\n\n current.push(line);\n\n } else {\n\n if current.len() > 0 {\n\n intermediate.push(current);\n\n }\n\n current = vec![]\n\n }\n\n }\n\n if current.len() > 0 {\n\n intermediate.push(current);\n\n }\n\n\n\n intermediate.into_iter().map(|v| v.into_iter())\n\n}\n", "file_path": "day17/src/file.rs", "rank": 27, "score": 185848.20392267633 }, { "content": "pub fn sections() -> impl Iterator<Item = impl Iterator<Item = String>> {\n\n let mut intermediate: Vec<Vec<String>> = vec![];\n\n let mut current: Vec<String> = vec![];\n\n for line in lines() {\n\n if line != \"\" {\n\n current.push(line);\n\n } else {\n\n if current.len() > 0 {\n\n intermediate.push(current);\n\n }\n\n current = vec![]\n\n }\n\n }\n\n if current.len() > 0 {\n\n intermediate.push(current);\n\n }\n\n\n\n intermediate.into_iter().map(|v| v.into_iter())\n\n}\n", "file_path": "day7/src/file.rs", "rank": 28, "score": 185848.20392267633 }, { "content": "pub fn sections() -> impl Iterator<Item = impl Iterator<Item = String>> {\n\n let mut intermediate: Vec<Vec<String>> = vec![];\n\n let mut current: Vec<String> = vec![];\n\n for line in lines() {\n\n if line != \"\" {\n\n current.push(line);\n\n } else {\n\n if current.len() > 0 {\n\n intermediate.push(current);\n\n }\n\n current = vec![]\n\n }\n\n }\n\n if current.len() > 0 {\n\n intermediate.push(current);\n\n }\n\n\n\n intermediate.into_iter().map(|v| v.into_iter())\n\n}\n", "file_path": "day14/src/file.rs", "rank": 29, "score": 185848.20392267633 }, { "content": "pub fn sections() -> impl Iterator<Item = impl Iterator<Item = String>> {\n\n let mut intermediate: Vec<Vec<String>> = vec![];\n\n let mut current: Vec<String> = vec![];\n\n for line in lines() {\n\n if line != \"\" {\n\n current.push(line);\n\n } else {\n\n if current.len() > 0 {\n\n intermediate.push(current);\n\n }\n\n current = vec![]\n\n }\n\n }\n\n if current.len() > 0 {\n\n intermediate.push(current);\n\n }\n\n\n\n intermediate.into_iter().map(|v| v.into_iter())\n\n}\n", "file_path": "day15/src/file.rs", "rank": 30, "score": 185848.20392267633 }, { "content": "pub fn sections() -> impl Iterator<Item = impl Iterator<Item = String>> {\n\n let mut intermediate: Vec<Vec<String>> = vec![];\n\n let mut current: Vec<String> = vec![];\n\n for line in lines() {\n\n if line != \"\" {\n\n current.push(line);\n\n } else {\n\n if current.len() > 0 {\n\n intermediate.push(current);\n\n }\n\n current = vec![]\n\n }\n\n }\n\n if current.len() > 0 {\n\n intermediate.push(current);\n\n }\n\n\n\n intermediate.into_iter().map(|v| v.into_iter())\n\n}\n", "file_path": "day13/src/file.rs", "rank": 31, "score": 185848.20392267633 }, { "content": "pub fn lines() -> impl Iterator<Item = String> {\n\n let file = File::open(\"input.txt\").expect(\"Expected file to live at ./input.txt\");\n\n let reader = BufReader::new(file);\n\n\n\n reader.lines().map(|l| l.unwrap())\n\n}\n\n\n", "file_path": "day17/src/file.rs", "rank": 32, "score": 184576.01465982804 }, { "content": "pub fn lines() -> impl Iterator<Item = String> {\n\n let file = File::open(\"input.txt\").expect(\"Expected file to live at ./input.txt\");\n\n let reader = BufReader::new(file);\n\n\n\n reader.lines().map(|l| l.unwrap())\n\n}\n", "file_path": "day2/src/file.rs", "rank": 33, "score": 184576.01465982804 }, { "content": "pub fn lines() -> impl Iterator<Item = String> {\n\n let file = File::open(\"input.txt\").expect(\"Expected file to live at ./input.txt\");\n\n let reader = BufReader::new(file);\n\n\n\n reader.lines().map(|l| l.unwrap())\n\n}\n\n\n", "file_path": "day15/src/file.rs", "rank": 34, "score": 184576.01465982804 }, { "content": "pub fn lines() -> impl Iterator<Item = String> {\n\n let file = File::open(\"input.txt\").expect(\"Expected file to live at ./input.txt\");\n\n let reader = BufReader::new(file);\n\n\n\n reader.lines().map(|l| l.unwrap())\n\n}\n", "file_path": "day3/src/file.rs", "rank": 35, "score": 184576.01465982804 }, { "content": "pub fn lines() -> impl Iterator<Item = String> {\n\n let file = File::open(\"input.txt\").expect(\"Expected file to live at ./input.txt\");\n\n let reader = BufReader::new(file);\n\n\n\n reader.lines().map(|l| l.unwrap())\n\n}\n\n\n", "file_path": "day6/src/file.rs", "rank": 36, "score": 184576.01465982804 }, { "content": "pub fn lines() -> impl Iterator<Item = String> {\n\n let file = File::open(\"input.txt\").expect(\"Expected file to live at ./input.txt\");\n\n let reader = BufReader::new(file);\n\n\n\n reader.lines().map(|l| l.unwrap())\n\n}\n\n\n", "file_path": "day8/src/file.rs", "rank": 37, "score": 184576.01465982804 }, { "content": "pub fn lines() -> impl Iterator<Item = String> {\n\n let file = File::open(\"input.txt\").expect(\"Expected file to live at ./input.txt\");\n\n let reader = BufReader::new(file);\n\n\n\n reader.lines().map(|l| l.unwrap())\n\n}\n\n\n", "file_path": "day14/src/file.rs", "rank": 38, "score": 184576.01465982804 }, { "content": "pub fn lines() -> impl Iterator<Item = String> {\n\n let file = File::open(\"input.txt\").expect(\"Expected file to live at ./input.txt\");\n\n let reader = BufReader::new(file);\n\n\n\n reader.lines().map(|l| l.unwrap())\n\n}\n", "file_path": "day4/src/file.rs", "rank": 39, "score": 184576.01465982804 }, { "content": "pub fn lines() -> impl Iterator<Item = String> {\n\n let file = File::open(\"input.txt\").expect(\"Expected file to live at ./input.txt\");\n\n let reader = BufReader::new(file);\n\n\n\n reader.lines().map(|l| l.unwrap())\n\n}\n\n\n", "file_path": "day10/src/file.rs", "rank": 40, "score": 184576.01465982804 }, { "content": "pub fn lines() -> impl Iterator<Item = String> {\n\n let file = File::open(\"input.txt\").expect(\"Expected file to live at ./input.txt\");\n\n let reader = BufReader::new(file);\n\n\n\n reader.lines().map(|l| l.unwrap())\n\n}\n\n\n", "file_path": "day9/src/file.rs", "rank": 41, "score": 184576.01465982804 }, { "content": "pub fn lines() -> impl Iterator<Item = String> {\n\n let file = File::open(\"input.txt\").expect(\"Expected file to live at ./input.txt\");\n\n let reader = BufReader::new(file);\n\n\n\n reader.lines().map(|l| l.unwrap())\n\n}\n\n\n", "file_path": "day13/src/file.rs", "rank": 42, "score": 184576.01465982804 }, { "content": "pub fn lines() -> impl Iterator<Item = String> {\n\n let file = File::open(\"input.txt\").expect(\"Expected file to live at ./input.txt\");\n\n let reader = BufReader::new(file);\n\n\n\n reader.lines().map(|l| l.unwrap())\n\n}\n\n\n", "file_path": "day16/src/file.rs", "rank": 43, "score": 184576.01465982804 }, { "content": "pub fn lines() -> impl Iterator<Item = String> {\n\n let file = File::open(\"input.txt\").expect(\"Expected file to live at ./input.txt\");\n\n let reader = BufReader::new(file);\n\n\n\n reader.lines().map(|l| l.unwrap())\n\n}\n\n\n", "file_path": "day11/src/file.rs", "rank": 44, "score": 184576.01465982804 }, { "content": "pub fn lines() -> impl Iterator<Item = String> {\n\n let file = File::open(\"input.txt\").expect(\"Expected file to live at ./input.txt\");\n\n let reader = BufReader::new(file);\n\n\n\n reader.lines().map(|l| l.unwrap())\n\n}\n\n\n", "file_path": "day7/src/file.rs", "rank": 45, "score": 184576.01465982804 }, { "content": "pub fn lines() -> impl Iterator<Item = String> {\n\n let file = File::open(\"input.txt\").expect(\"Expected file to live at ./input.txt\");\n\n let reader = BufReader::new(file);\n\n\n\n reader.lines().map(|l| l.unwrap())\n\n}\n\n\n", "file_path": "day12/src/file.rs", "rank": 46, "score": 184576.01465982804 }, { "content": "fn process_lines_p2(lines: impl Iterator<Item = String>) -> HashMap<usize, u64> {\n\n let mut result = HashMap::new();\n\n\n\n let mut set_mask = 0u64;\n\n let mut clear_mask = 0u64;\n\n let mut all_fluc = vec![0u64];\n\n\n\n for line in lines {\n\n let parts: Vec<&str> = line.split(\" = \").collect();\n\n match parts[0] {\n\n \"mask\" => {\n\n let pm = parse_mask(parts[1]);\n\n all_fluc = all_fluc_from_fluc(&pm.2);\n\n // println!(\"{:?} {:?}\", &pm, &all_fluc);\n\n set_mask = pm.0;\n\n clear_mask = pm.1;\n\n }\n\n\n\n _ => {\n\n let loc: usize = parts[0][4..(parts[0].len() - 1)].parse().unwrap();\n", "file_path": "day14/src/main.rs", "rank": 47, "score": 183501.38374847863 }, { "content": "fn parse_lines(lines: impl Iterator<Item = String>) -> HashMap<Location, SeatState> {\n\n let mut result = HashMap::new();\n\n for (row, line) in lines.enumerate() {\n\n for (col, char) in line.chars().enumerate() {\n\n if char == 'L' {\n\n result.insert(\n\n Location {\n\n x: row as i64,\n\n y: col as i64,\n\n },\n\n Empty,\n\n );\n\n }\n\n }\n\n }\n\n result\n\n}\n\n\n", "file_path": "day11/src/main.rs", "rank": 48, "score": 183501.38374847863 }, { "content": "fn solve_part1(lines: impl Iterator<Item = String>) -> i64 {\n\n let (fields, _, tickets) = process_lines(lines);\n\n tickets.iter().flat_map(|t| t.iter()).fold(\n\n 0,\n\n |acc, n| {\n\n if is_valid(&fields, *n) {\n\n acc\n\n } else {\n\n acc + n\n\n }\n\n },\n\n )\n\n}\n\n\n", "file_path": "day16/src/main.rs", "rank": 49, "score": 172526.7584671081 }, { "content": "fn solve_part1(lines: impl Iterator<Item = String>) -> i64 {\n\n let mut state = parse_to_state(lines);\n\n println!(\"{:?}\", state);\n\n state = perform_step(state);\n\n\n\n // for y in 0..4 {\n\n // for x in 0..3 {\n\n // print!(\n\n // \"{}\",\n\n // (if state.contains(&Point(x, y, 0)) {\n\n // \"#\"\n\n // } else {\n\n // \".\"\n\n // })\n\n // )\n\n // }\n\n // println!(\"\");\n\n // }\n\n\n\n // println!(\"{:?}\", state);\n\n state = perform_step(state);\n\n state = perform_step(state);\n\n state = perform_step(state);\n\n state = perform_step(state);\n\n state = perform_step(state);\n\n state.len() as i64\n\n}\n\n\n", "file_path": "day17/src/main.rs", "rank": 50, "score": 172526.7584671081 }, { "content": "pub fn parse_mask(mask: &str) -> (u64, u64, Vec<u64>) {\n\n let mut set_mask = 0u64;\n\n let mut clear_mask = 0u64;\n\n let mut fluc = vec![];\n\n\n\n for (loc, c) in mask.chars().enumerate() {\n\n match c {\n\n '1' => set_mask |= 1u64 << (35 - loc),\n\n '0' => clear_mask |= 1u64 << (35 - loc),\n\n _ => fluc.push(1u64 << (35 - loc)),\n\n }\n\n }\n\n\n\n (set_mask, clear_mask, fluc)\n\n}\n\n\n", "file_path": "day14/src/main.rs", "rank": 51, "score": 167228.12733293613 }, { "content": "fn parse_spec(spec: &String) -> (std::ops::RangeInclusive<usize>, char, &str) {\n\n let elements: Vec<&str> = (*spec).split_whitespace().collect();\n\n let pwd = elements[2];\n\n let range = parse_range(elements[0]);\n\n let c = elements[1]\n\n .chars()\n\n .next()\n\n .expect(\"Expected a character at position 2\");\n\n (range, c, pwd)\n\n}\n\n\n", "file_path": "day2/src/main.rs", "rank": 53, "score": 158032.27859015897 }, { "content": "fn test_lines() -> impl Iterator<Item = String> {\n\n let source = indoc! {\".#.\n\n ..#\n\n ###\"};\n\n source.split(\"\\n\").map(|l| l.to_string())\n\n}\n\n\n", "file_path": "day17/src/tests.rs", "rank": 54, "score": 157603.30659774187 }, { "content": "fn test_lines2() -> impl Iterator<Item = String> {\n\n let source = indoc! {\"mask = 000000000000000000000000000000X1001X\n\n mem[42] = 100\n\n mask = 00000000000000000000000000000000X0XX\n\n mem[26] = 1\"};\n\n source.split(\"\\n\").map(|l| l.to_string())\n\n}\n\n\n", "file_path": "day14/src/tests.rs", "rank": 55, "score": 157603.30659774187 }, { "content": "fn test_lines() -> impl Iterator<Item = String> {\n\n let source = indoc! {\"mask = XXXXXXXXXXXXXXXXXXXXXXXXXXXXX1XXXX0X\n\n\t\t\t\t\t\t\tmem[8] = 11\n\n\t\t\t\t\t\t\tmem[7] = 101\n\n\t\t\t\t\t\t\tmem[8] = 0\"};\n\n source.split(\"\\n\").map(|l| l.to_string())\n\n}\n\n\n", "file_path": "day14/src/tests.rs", "rank": 56, "score": 157603.30659774187 }, { "content": "fn test_lines() -> impl Iterator<Item = String> {\n\n let source = indoc! {\"class: 1-3 or 5-7\n\n row: 6-11 or 33-44\n\n seat: 13-40 or 45-50\n\n \n\n your ticket:\n\n 7,1,14\n\n \n\n nearby tickets:\n\n 7,3,47\n\n 40,4,50\n\n 55,2,20\n\n 38,6,12\"};\n\n source.split(\"\\n\").map(|l| l.to_string())\n\n}\n\n\n", "file_path": "day16/src/tests.rs", "rank": 57, "score": 157603.30659774187 }, { "content": "fn required_keys() -> HashSet<&'static str> {\n\n let array = [\"byr\", \"iyr\", \"eyr\", \"hgt\", \"hcl\", \"ecl\", \"pid\"]; // , \"cid\"]; cid optional\n\n let mut result = HashSet::<&str>::new();\n\n for s in array.iter() {\n\n result.insert(s);\n\n }\n\n result\n\n}\n\n\n", "file_path": "day4/src/main.rs", "rank": 58, "score": 157461.58544245473 }, { "content": "fn test_lines_p2() -> impl Iterator<Item = String> {\n\n let source = indoc! {\"cclass: 0-1 or 4-19\n\n row: 0-5 or 8-19\n\n seat: 0-13 or 16-19\n\n \n\n your ticket:\n\n 11,12,13\n\n \n\n nearby tickets:\n\n 3,9,18\n\n 15,1,5\n\n 5,14,9\n\n 99,1,1\"};\n\n source.split(\"\\n\").map(|l| l.to_string())\n\n}\n\n\n", "file_path": "day16/src/tests.rs", "rank": 59, "score": 155300.41540430675 }, { "content": "fn part2() {\n\n // let result = \"None Yet\";\n\n let mem = process_lines_p2(file::lines());\n\n let result = sum_memory(&mem);\n\n aoc::print_solution2(format!(\"{:?} \", result).as_str());\n\n}\n\n\n", "file_path": "day14/src/main.rs", "rank": 60, "score": 151311.2991120402 }, { "content": "fn part2() {\n\n // let result = \"None Yet\";\n\n let result = solve_part2(file::lines());\n\n aoc::print_solution2(format!(\"{:?} \", result).as_str());\n\n}\n\n\n", "file_path": "day17/src/main.rs", "rank": 61, "score": 151311.2991120402 }, { "content": "fn part2() {\n\n let result = \"None Yet\";\n\n // let result = \"None Yet\";\n\n let result = nth_number(vec![17, 1, 3, 16, 19, 0], 30_000_000);\n\n aoc::print_solution2(format!(\"{:?} \", result).as_str());\n\n}\n\n\n", "file_path": "day15/src/main.rs", "rank": 62, "score": 151311.2991120402 }, { "content": "fn part2() {\n\n // let result = \"None Yet\";\n\n\n\n let mut ns = NavStateTwo::new();\n\n for line in file::lines() {\n\n ns.perform_line(&line[..]);\n\n //println!(\"{} -> {:?}\", &line, ns)\n\n }\n\n\n\n let result = format!(\"{:?} => {}\", ns, ns.manhattan_distance());\n\n aoc::print_solution2(format!(\"{:?} \", result).as_str());\n\n}\n\n\n", "file_path": "day12/src/main.rs", "rank": 63, "score": 151311.2991120402 }, { "content": "fn part2() {\n\n let mut correct_count = 0;\n\n let mut invalid_count = 0;\n\n\n\n for (_i, line) in file::lines().enumerate() {\n\n let was_correct = validate2(&line);\n\n // println!(\"{} [{}]: {}\", i, format::bool(was_correct), line);\n\n if was_correct {\n\n correct_count += 1;\n\n } else {\n\n invalid_count += 1;\n\n }\n\n }\n\n\n\n println!(\"\\nPart 2:\");\n\n println!(\n\n \"{}: {}/{total} | {} : {}/{total}\",\n\n format::bool(true),\n\n correct_count,\n\n format::bool(false),\n\n invalid_count,\n\n total = correct_count + invalid_count\n\n );\n\n}\n\n\n", "file_path": "day2/src/main.rs", "rank": 64, "score": 151311.2991120402 }, { "content": "fn part2() {\n\n // let result = \"None Yet\";\n\n\n\n let stats = stats_at_end_p2();\n\n let result = stats.0;\n\n aoc::print_solution2(format!(\"{:?} \", result).as_str());\n\n}\n\n\n", "file_path": "day11/src/main.rs", "rank": 65, "score": 151311.2991120402 }, { "content": "fn part2() {\n\n let result = \"None Yet\";\n\n\n\n let mut program = read_program();\n\n let mut index = 0;\n\n\n\n loop {\n\n let ins = &program[index].clone();\n\n match ins {\n\n Instruction::Nop(n) => {\n\n program[index] = Instruction::Jmp(*n);\n\n println!(\"Switch at {} : {:?}\", index, run(&program));\n\n program[index] = *ins;\n\n }\n\n Instruction::Jmp(n) => {\n\n program[index] = Instruction::Nop(*n);\n\n println!(\"Switch at {} : {:?}\", index, run(&program));\n\n program[index] = *ins;\n\n }\n\n\n", "file_path": "day8/src/main.rs", "rank": 66, "score": 151311.2991120402 }, { "content": "fn part2() {\n\n let mut chars: HashSet<char> = HashSet::new();\n\n let mut result = 0;\n\n\n\n for (i, section) in file::sections().enumerate() {\n\n let mut count = 0;\n\n\n\n for (pos, line) in section.enumerate() {\n\n if pos == 0 {\n\n chars.extend(line.chars());\n\n } else {\n\n chars = line.chars().filter(|c| chars.contains(c)).collect();\n\n }\n\n count += 1;\n\n }\n\n result += chars.len();\n\n println!(\n\n \"{}: {} lines {:?}\",\n\n i,\n\n count,\n\n chars.iter().sorted().collect::<String>()\n\n );\n\n chars.drain();\n\n }\n\n aoc::print_solution2(format!(\"{} all yes answers\", result).as_str());\n\n}\n", "file_path": "day6/src/main.rs", "rank": 67, "score": 151311.2991120402 }, { "content": "fn part2() {\n\n let mut viable_passports = 0;\n\n let mut still_valid = true;\n\n let mut outstanding_keys = required_keys();\n\n\n\n let check = |keys: &HashSet<&str>, count: &mut i32| {\n\n if keys.is_empty() {\n\n *count += 1;\n\n println!(\"🎫 viable! [{}]\", count);\n\n }\n\n };\n\n\n\n 'lineloop: for line in file::lines() {\n\n if line == \"\" {\n\n check(&outstanding_keys, &mut viable_passports);\n\n outstanding_keys = required_keys();\n\n still_valid = true;\n\n println!(\"\");\n\n } else if still_valid {\n\n for element in line.split_whitespace() {\n", "file_path": "day4/src/main.rs", "rank": 69, "score": 151311.2991120402 }, { "content": "fn part2() {\n\n // let result = \"None Yet\";\n\n let result = solve_part2(file::lines());\n\n aoc::print_solution2(format!(\"{:?} \", result).as_str());\n\n}\n\n\n", "file_path": "day16/src/main.rs", "rank": 70, "score": 151311.2991120402 }, { "content": "fn part2() {\n\n // let result = \"None Yet\";\n\n\n\n let mut result: i64 = -1;\n\n\n\n let line = file::lines().skip(1).next().unwrap();\n\n let mut indexed =\n\n line.split(\",\")\n\n .enumerate()\n\n .fold(vec![], |mut acc: Vec<(usize, i64)>, (i, s)| {\n\n if s != \"x\" {\n\n acc.push((i, s.parse().unwrap()));\n\n }\n\n acc\n\n });\n\n\n\n indexed.sort_by(|(_, a), (_, b)| b.partial_cmp(&a).unwrap());\n\n println!(\"{:?}\", &indexed);\n\n\n\n let mut iteration = 1i64;\n", "file_path": "day13/src/main.rs", "rank": 71, "score": 151311.2991120402 }, { "content": "fn part2() {\n\n println!(\"\\nPart 2:\");\n\n\n\n let tree_map: Vec<String> = file::lines().collect();\n\n let map_width = tree_map[0].len();\n\n\n\n let mut treecounts = 1;\n\n let mut slopes: Vec<Point> = vec![];\n\n slopes.push(Point::new(1, 1));\n\n slopes.push(Point::new(3, 1));\n\n slopes.push(Point::new(5, 1));\n\n slopes.push(Point::new(7, 1));\n\n slopes.push(Point::new(1, 2));\n\n\n\n for slope in slopes {\n\n let mut location = Point::new(0, 0);\n\n\n\n let mut treecount = 0;\n\n\n\n while location.y < tree_map.len() as i64 {\n", "file_path": "day3/src/main.rs", "rank": 72, "score": 151311.2991120402 }, { "content": "fn all_fluc_from_fluc(fluc: &Vec<u64>) -> Vec<u64> {\n\n let mut all_fluc = vec![0u64];\n\n for n in fluc {\n\n for o in all_fluc.clone() {\n\n all_fluc.push(*n + o);\n\n }\n\n }\n\n all_fluc\n\n}\n\n\n", "file_path": "day14/src/main.rs", "rank": 73, "score": 141367.92549706495 }, { "content": "fn generate_seat_map(seats: &HashMap<Location, SeatState>) -> HashMap<Location, Vec<Location>> {\n\n let mut max_x = 0i64;\n\n let mut max_y = 0i64;\n\n for (loc, _) in seats {\n\n max_x = max_x.max(loc.x);\n\n max_y = max_y.max(loc.y);\n\n }\n\n\n\n let mut result = HashMap::new();\n\n for (loc, _) in seats {\n\n let mut relevants: Vec<Location> = Vec::new();\n\n for x in -1i64..=1 {\n\n for y in -1i64..=1 {\n\n if (x, y) != (0, 0) {\n\n // search for relevance\n\n let movement = Location { x, y };\n\n let mut test_loc = *loc;\n\n loop {\n\n test_loc = test_loc + movement;\n\n if let Some(_) = seats.get(&test_loc) {\n", "file_path": "day11/src/main.rs", "rank": 74, "score": 140283.73497857995 }, { "content": "fn progresult(prog_string: &str, noun: i64, verb: i64) -> i64 {\n\n let mut program: Vec<i64> = prog_string\n\n .split(\",\")\n\n .map(|n| n.parse::<i64>().expect(\"Expected Integer String\"))\n\n .collect();\n\n\n\n let mut pc: usize = 0;\n\n\n\n // adjust program as per instruction\n\n program[1] = noun;\n\n program[2] = verb;\n\n\n\n while program[pc] != 99 {\n\n execute(&mut program, &mut pc);\n\n }\n\n return program[0];\n\n}\n", "file_path": "Preparation/2019/day2/src/main.rs", "rank": 75, "score": 138014.1382239037 }, { "content": "pub fn print_key_value(key: &str, value: &str) {\n\n let line: &[ANSIString] = &[\n\n Style::new().italic().paint(format!(\"{}:\", key)),\n\n Style::new().paint(\" \"),\n\n Style::new().bold().paint(value),\n\n ];\n\n\n\n let ansi_line = ANSIStrings(line);\n\n println!(\"{}\", ansi_line);\n\n}\n", "file_path": "day12/src/aoc.rs", "rank": 76, "score": 137843.82906543507 }, { "content": "pub fn print_key_value(key: &str, value: &str) {\n\n let line: &[ANSIString] = &[\n\n Style::new().italic().paint(format!(\"{}:\", key)),\n\n Style::new().paint(\" \"),\n\n Style::new().bold().paint(value),\n\n ];\n\n\n\n let ansi_line = ANSIStrings(line);\n\n println!(\"{}\", ansi_line);\n\n}\n", "file_path": "day8/src/aoc.rs", "rank": 77, "score": 137843.82906543507 }, { "content": "pub fn print_key_value(key: &str, value: &str) {\n\n let line: &[ANSIString] = &[\n\n Style::new().italic().paint(format!(\"{}:\", key)),\n\n Style::new().paint(\" \"),\n\n Style::new().bold().paint(value),\n\n ];\n\n\n\n let ansi_line = ANSIStrings(line);\n\n println!(\"{}\", ansi_line);\n\n}\n", "file_path": "day15/src/aoc.rs", "rank": 78, "score": 137843.82906543507 }, { "content": "pub fn print_key_value(key: &str, value: &str) {\n\n let line: &[ANSIString] = &[\n\n Style::new().italic().paint(format!(\"{}:\", key)),\n\n Style::new().paint(\" \"),\n\n Style::new().bold().paint(value),\n\n ];\n\n\n\n let ansi_line = ANSIStrings(line);\n\n println!(\"{}\", ansi_line);\n\n}\n", "file_path": "day7/src/aoc.rs", "rank": 79, "score": 137843.82906543507 }, { "content": "pub fn print_key_value(key: &str, value: &str) {\n\n let line: &[ANSIString] = &[\n\n Style::new().italic().paint(format!(\"{}:\", key)),\n\n Style::new().paint(\" \"),\n\n Style::new().bold().paint(value),\n\n ];\n\n\n\n let ansi_line = ANSIStrings(line);\n\n println!(\"{}\", ansi_line);\n\n}\n", "file_path": "day16/src/aoc.rs", "rank": 80, "score": 137843.82906543507 }, { "content": "pub fn print_key_value(key: &str, value: &str) {\n\n let line: &[ANSIString] = &[\n\n Style::new().italic().paint(format!(\"{}:\", key)),\n\n Style::new().paint(\" \"),\n\n Style::new().bold().paint(value),\n\n ];\n\n\n\n let ansi_line = ANSIStrings(line);\n\n println!(\"{}\", ansi_line);\n\n}\n", "file_path": "day14/src/aoc.rs", "rank": 81, "score": 137843.82906543507 }, { "content": "pub fn print_key_value(key: &str, value: &str) {\n\n let line: &[ANSIString] = &[\n\n Style::new().italic().paint(format!(\"{}:\", key)),\n\n Style::new().paint(\" \"),\n\n Style::new().bold().paint(value),\n\n ];\n\n\n\n let ansi_line = ANSIStrings(line);\n\n println!(\"{}\", ansi_line);\n\n}\n", "file_path": "day4/src/aoc.rs", "rank": 82, "score": 137843.82906543507 }, { "content": "pub fn print_key_value(key: &str, value: &str) {\n\n let line: &[ANSIString] = &[\n\n Style::new().italic().paint(format!(\"{}:\", key)),\n\n Style::new().paint(\" \"),\n\n Style::new().bold().paint(value),\n\n ];\n\n\n\n let ansi_line = ANSIStrings(line);\n\n println!(\"{}\", ansi_line);\n\n}\n", "file_path": "day11/src/aoc.rs", "rank": 83, "score": 137843.82906543507 }, { "content": "pub fn print_key_value(key: &str, value: &str) {\n\n let line: &[ANSIString] = &[\n\n Style::new().italic().paint(format!(\"{}:\", key)),\n\n Style::new().paint(\" \"),\n\n Style::new().bold().paint(value),\n\n ];\n\n\n\n let ansi_line = ANSIStrings(line);\n\n println!(\"{}\", ansi_line);\n\n}\n", "file_path": "day10/src/aoc.rs", "rank": 84, "score": 137843.82906543507 }, { "content": "pub fn print_key_value(key: &str, value: &str) {\n\n let line: &[ANSIString] = &[\n\n Style::new().italic().paint(format!(\"{}:\", key)),\n\n Style::new().paint(\" \"),\n\n Style::new().bold().paint(value),\n\n ];\n\n\n\n let ansi_line = ANSIStrings(line);\n\n println!(\"{}\", ansi_line);\n\n}\n", "file_path": "day6/src/aoc.rs", "rank": 85, "score": 137843.82906543507 }, { "content": "pub fn print_key_value(key: &str, value: &str) {\n\n let line: &[ANSIString] = &[\n\n Style::new().italic().paint(format!(\"{}:\", key)),\n\n Style::new().paint(\" \"),\n\n Style::new().bold().paint(value),\n\n ];\n\n\n\n let ansi_line = ANSIStrings(line);\n\n println!(\"{}\", ansi_line);\n\n}\n", "file_path": "day13/src/aoc.rs", "rank": 86, "score": 137843.82906543507 }, { "content": "pub fn print_key_value(key: &str, value: &str) {\n\n let line: &[ANSIString] = &[\n\n Style::new().italic().paint(format!(\"{}:\", key)),\n\n Style::new().paint(\" \"),\n\n Style::new().bold().paint(value),\n\n ];\n\n\n\n let ansi_line = ANSIStrings(line);\n\n println!(\"{}\", ansi_line);\n\n}\n", "file_path": "day17/src/aoc.rs", "rank": 87, "score": 137843.82906543507 }, { "content": "pub fn print_key_value(key: &str, value: &str) {\n\n let line: &[ANSIString] = &[\n\n Style::new().italic().paint(format!(\"{}:\", key)),\n\n Style::new().paint(\" \"),\n\n Style::new().bold().paint(value),\n\n ];\n\n\n\n let ansi_line = ANSIStrings(line);\n\n println!(\"{}\", ansi_line);\n\n}\n", "file_path": "day9/src/aoc.rs", "rank": 88, "score": 137843.82906543507 }, { "content": "fn read_program() -> Vec<Instruction> {\n\n let mut program: Vec<Instruction> = vec![];\n\n for ins in file::lines() {\n\n let pair: Vec<&str> = ins.split(\" \").collect();\n\n let n: i64 = pair[1].parse().expect(\"Needs to be a parseable number\");\n\n let i = match pair[0] {\n\n \"jmp\" => Instruction::Jmp(n as isize),\n\n \"acc\" => Instruction::Acc(n),\n\n \"nop\" => Instruction::Nop(n as isize),\n\n _ => panic!(\"Unknown instruction found! {}\", ins),\n\n };\n\n\n\n program.push(i);\n\n }\n\n program\n\n}\n\n\n", "file_path": "day8/src/main.rs", "rank": 89, "score": 136933.02613655743 }, { "content": "pub fn print_solution1(s: &str) {\n\n print_solution(s, 1);\n\n}\n\n\n", "file_path": "day11/src/aoc.rs", "rank": 91, "score": 133796.7737538148 }, { "content": "pub fn print_solution1(s: &str) {\n\n print_solution(s, 1);\n\n}\n\n\n", "file_path": "day7/src/aoc.rs", "rank": 92, "score": 133796.7737538148 }, { "content": "pub fn print_solution2(s: &str) {\n\n print_solution(s, 2);\n\n}\n\n\n", "file_path": "day11/src/aoc.rs", "rank": 93, "score": 133796.7737538148 }, { "content": "pub fn print_solution1(s: &str) {\n\n print_solution(s, 1);\n\n}\n\n\n", "file_path": "day9/src/aoc.rs", "rank": 94, "score": 133796.7737538148 }, { "content": "pub fn print_solution1(s: &str) {\n\n print_solution(s, 1);\n\n}\n\n\n", "file_path": "day13/src/aoc.rs", "rank": 95, "score": 133796.7737538148 }, { "content": "pub fn print_solution2(s: &str) {\n\n print_solution(s, 2);\n\n}\n\n\n", "file_path": "day10/src/aoc.rs", "rank": 96, "score": 133796.7737538148 }, { "content": "pub fn print_solution1(s: &str) {\n\n print_solution(s, 1);\n\n}\n\n\n", "file_path": "day10/src/aoc.rs", "rank": 97, "score": 133796.7737538148 }, { "content": "pub fn print_solution2(s: &str) {\n\n print_solution(s, 2);\n\n}\n\n\n", "file_path": "day7/src/aoc.rs", "rank": 98, "score": 133796.7737538148 }, { "content": "pub fn print_solution2(s: &str) {\n\n print_solution(s, 2);\n\n}\n\n\n", "file_path": "day9/src/aoc.rs", "rank": 99, "score": 133796.7737538148 } ]
Rust
fortress/src/lib/enemies/state/enemy_state_machine.rs
j-rock/fortress
23b71bbd75afe75370b59e2117893f1023142c17
use crate::{ audio::{ AudioPlayer, Sound, }, dimensions::{ Attack, Reverse, time::{ DeltaTime, Microseconds, } }, enemies::{ DamageTextWriter, EnemySystemConfig, EnemyConfig, EnemyState, state::EnemyBody, }, items::{ ItemPickup, ItemSystem, ItemType, types::SkullType, }, particles::{ ParticleEvent, ParticleSystem, }, physics::PhysicsSimulation, render::{ LightDependentSpriteData, LightDependentSpriteRenderer, NamedSpriteSheet, SpriteSheetFrameId, }, }; use glm; use nalgebra::{ Point2, Vector2, }; pub enum EnemyStateMachine { Base(EnemyBody, Microseconds), Dying(Option<Point2<f64>>, Microseconds), Dead } impl EnemyStateMachine { pub fn new(body: EnemyBody) -> Self { Self::Base(body, 0) } pub fn pre_update(&mut self, config: &EnemyConfig, dt: DeltaTime, player_locs: &Vec<Point2<f64>>, enemy_state: &mut EnemyState) -> Option<Self> { match self { Self::Base(body, time_elapsed) => { *time_elapsed += dt.as_microseconds(); body.move_to_target(config, player_locs); if let Some(direction) = body.velocity() { enemy_state.set_facing_dir(direction); } }, Self::Dying(_, time_elapsed) => { *time_elapsed += dt.as_microseconds(); }, _ => {}, } None } pub fn take_attack(&self, config: &EnemySystemConfig, attack: Attack, bullet_direction: Option<Vector2<f64>>, enemy_state: &mut EnemyState, particles: &mut ParticleSystem, damage_text: &mut DamageTextWriter) { if let Self::Base(body, _) = self { let damage = attack.damage; enemy_state.take_attack(attack); if let Some(position) = body.position() { let blood_color = glm::vec3(config.enemy.blood_color.0, config.enemy.blood_color.1, config.enemy.blood_color.2); let blood_event = ParticleEvent::blood(position.clone(), blood_color, config.enemy.num_blood_particles_per_hit); particles.queue_event(blood_event); damage_text.add_damage(&config.damage_text, damage, position, bullet_direction); } } } pub fn post_update(&mut self, config: &EnemyConfig, audio: &AudioPlayer, enemy_state: &EnemyState, items: &mut ItemSystem, physics_sim: &mut PhysicsSimulation) -> Option<Self> { match self { Self::Base(body, _) if !enemy_state.health().alive() => { audio.play_sound(Sound::EnemyKilled); let position = body.position(); Some(Self::Dying(position, 0)) }, Self::Dying(position, time_elapsed) if *time_elapsed >= config.dying_duration_micros => { if let Some(position) = position { let item_pickup = ItemPickup::new(ItemType::Skull(SkullType::Regular), enemy_state.facing_dir()); items.spawn_item(item_pickup, position.clone(), physics_sim); } Some(Self::Dead) }, _ => None } } pub fn queue_draw(&self, config: &EnemyConfig, enemy_state: &EnemyState, sprite_renderer: &mut LightDependentSpriteRenderer) { let image_name = match self { Self::Dying(_, _) => String::from("enemy1_dying.png"), _ => String::from("enemy1.png") }; let frame = match self { Self::Base(_, time_elapsed) => (*time_elapsed / config.walk_frame_duration_micros) as usize, Self::Dying(_, time_elapsed) => (*time_elapsed / config.dying_frame_duration_micros) as usize, _ => 0, }; let reverse = if enemy_state.facing_dir().is_left() { Reverse::none() } else { Reverse::horizontally() }; if let Some(position) = self.position() { let world_half_size = glm::vec2(config.physical_radius as f32, config.physical_radius as f32) * config.render_scale; let world_center_position = glm::vec3(position.x as f32, world_half_size.y, -position.y as f32); sprite_renderer.queue(LightDependentSpriteData { world_center_position, world_half_size, sprite_frame_id: SpriteSheetFrameId::new(image_name, NamedSpriteSheet::SpriteSheet1), frame, unit_world_rotation: Vector2::new(0.0, 0.0), reverse, }); } } pub fn dead(&self) -> bool { match self { Self::Dead => true, _ => false, } } fn position(&self) -> Option<Point2<f64>> { match self { Self::Base(body, _) => body.position(), Self::Dying(position, _) => *position, _ => None } } }
use crate::{ audio::{ AudioPlayer, Sound, }, dimensions::{ Attack, Reverse, time::{ DeltaTime, Microseconds, } }, enemies::{ DamageTextWriter, EnemySystemConfig, EnemyConfig, EnemyState, state::EnemyBody, }, items::{ ItemPickup, ItemSystem, ItemType, types::SkullType, }, particles::{ ParticleEvent, ParticleSystem, }, physics::PhysicsSimulation, render::{ LightDependentSpriteData, LightDependentSpriteRenderer, NamedSpriteSheet, SpriteSheetFrameId, }, }; use glm; use nalgebra::{ Point2, Vector2, }; pub enum EnemyStateMachine { Base(EnemyBody, Microseconds), Dying(Option<Point2<f64>>, Microseconds), Dead } impl EnemyStateMachine { pub fn new(body: EnemyBody) -> Self { Self::Base(body, 0) } pub fn pre_update(&mut self, config: &EnemyConfig, dt: DeltaTime, player_locs: &Vec<Point2<f64>>, enemy_state: &mut EnemyState) -> Option<Self> { match self { Self::Base(body, time_elapsed) => { *time_elapsed += dt.as_microseconds(); body.move_to_target(config, player_locs); if let Some(direction) = body.velocity() { enemy_state.set_facing_dir(direction); } }, Self::Dying(_, time_elapsed) => { *time_elapsed += dt.as_microseconds(); }, _ => {}, } None } pub fn take_attack(&self, config: &EnemySystemConfig, attack: Attack, bullet_direction: Option<Vector2<f64>>, enemy_state: &mut EnemyState, particles: &mut ParticleSystem, damage_text: &mut DamageTextWriter) { if let Self::Base(body, _) = self { let damage = attack.damage; enemy_state.take_attack(attack); if let Some(position) = body.position() { let blood_color = glm::vec3(config.enemy.blood_color.0, config.enemy.blood_color.1, config.enemy.blood_color.2); let blood_event = ParticleEvent::blood(position.clone(), blood_color, config.enemy.num_blood_particles_per_hit); particles.queue_event(blood_event); damage_text.add_damage(&config.damage_text, damage, position, bullet_direction); } } }
let image_name = match self { Self::Dying(_, _) => String::from("enemy1_dying.png"), _ => String::from("enemy1.png") }; let frame = match self { Self::Base(_, time_elapsed) => (*time_elapsed / config.walk_frame_duration_micros) as usize, Self::Dying(_, time_elapsed) => (*time_elapsed / config.dying_frame_duration_micros) as usize, _ => 0, }; let reverse = if enemy_state.facing_dir().is_left() { Reverse::none() } else { Reverse::horizontally() }; if let Some(position) = self.position() { let world_half_size = glm::vec2(config.physical_radius as f32, config.physical_radius as f32) * config.render_scale; let world_center_position = glm::vec3(position.x as f32, world_half_size.y, -position.y as f32); sprite_renderer.queue(LightDependentSpriteData { world_center_position, world_half_size, sprite_frame_id: SpriteSheetFrameId::new(image_name, NamedSpriteSheet::SpriteSheet1), frame, unit_world_rotation: Vector2::new(0.0, 0.0), reverse, }); } } pub fn dead(&self) -> bool { match self { Self::Dead => true, _ => false, } } fn position(&self) -> Option<Point2<f64>> { match self { Self::Base(body, _) => body.position(), Self::Dying(position, _) => *position, _ => None } } }
pub fn post_update(&mut self, config: &EnemyConfig, audio: &AudioPlayer, enemy_state: &EnemyState, items: &mut ItemSystem, physics_sim: &mut PhysicsSimulation) -> Option<Self> { match self { Self::Base(body, _) if !enemy_state.health().alive() => { audio.play_sound(Sound::EnemyKilled); let position = body.position(); Some(Self::Dying(position, 0)) }, Self::Dying(position, time_elapsed) if *time_elapsed >= config.dying_duration_micros => { if let Some(position) = position { let item_pickup = ItemPickup::new(ItemType::Skull(SkullType::Regular), enemy_state.facing_dir()); items.spawn_item(item_pickup, position.clone(), physics_sim); } Some(Self::Dead) }, _ => None } } pub fn queue_draw(&self, config: &EnemyConfig, enemy_state: &EnemyState, sprite_renderer: &mut LightDependentSpriteRenderer) {
random
[ { "content": "pub fn milliseconds(t: i64) -> Microseconds {\n\n t * 1000\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct DeltaTime {\n\n microseconds_elapsed: Microseconds\n\n}\n\n\n\nimpl DeltaTime {\n\n fn duration_to_microseconds(duration: Duration) -> Microseconds {\n\n let nanos = Microseconds::from(duration.subsec_nanos());\n\n 1_000_000 * duration.as_secs() as i64 + nanos / 1000\n\n }\n\n\n\n pub fn new(duration: Duration) -> DeltaTime {\n\n DeltaTime {\n\n microseconds_elapsed: Self::duration_to_microseconds(duration)\n\n }\n\n }\n", "file_path": "fortress/src/lib/dimensions/time.rs", "rank": 0, "score": 188126.98751587694 }, { "content": "pub fn err_print(file: &'static str, line: u32) {\n\n unsafe {\n\n let error = gl::GetError();\n\n if error != gl::NO_ERROR {\n\n println!(\"Error was {} at {}:{}\", error, file, line);\n\n }\n\n }\n\n}", "file_path": "fortress/src/lib/render/opengl.rs", "rank": 1, "score": 133310.25026165193 }, { "content": "pub trait Config: Sized {\n\n fn from_path(path_buf: &PathBuf) -> StatusOr<Self>;\n\n}\n\n\n\nimpl<T: DeserializeOwned> Config for T {\n\n fn from_path(path_buf: &PathBuf) -> StatusOr<T> {\n\n let reader = file::util::reader(path_buf)?;\n\n ron::de::from_reader(reader)\n\n .map_err(|e| format!(\"Couldn't parse config {:?}: {}\", path_buf, e))\n\n }\n\n}\n\n\n", "file_path": "fortress_bake/src/file/config.rs", "rank": 2, "score": 127108.96312537641 }, { "content": "#[derive(Copy, Clone, PartialEq, Eq, Hash)]\n\nenum UniformKey {\n\n Texture(TextureUnit)\n\n}\n\n\n\nimpl ShaderUniformKey for UniformKey {\n\n fn to_cstring(self) -> CString {\n\n match self {\n\n UniformKey::Texture(texture_unit) => {\n\n CString::new(texture_unit.uniform_name()).expect(\"Bad texture\")\n\n }\n\n }\n\n }\n\n}\n\n\n\npub struct BackgroundRenderer {\n\n config_manager: SimpleConfigManager<BackgroundRendererConfig>,\n\n shader_program: ShaderProgram<UniformKey>,\n\n attribute_program: AttributeProgram,\n\n attr_vertex: Attribute<VertexAttr>,\n\n attr_texel: Attribute<TexelAttr>,\n", "file_path": "fortress/src/lib/render/renderer/background_renderer.rs", "rank": 3, "score": 113745.47355882835 }, { "content": "pub fn resource_base() -> PathBuf {\n\n RESOURCE_BASE.to_path_buf()\n\n}\n\n\n", "file_path": "fortress_bake/src/file/util.rs", "rank": 4, "score": 113290.153983731 }, { "content": "#[derive(Copy, Clone, PartialEq, Eq, Hash)]\n\nenum UniformKey {\n\n ProjectionView,\n\n CameraRight,\n\n CameraUp,\n\n}\n\n\n\nimpl ShaderUniformKey for UniformKey {\n\n fn to_cstring(self) -> CString {\n\n let s = match self {\n\n UniformKey::ProjectionView => \"projection_view\",\n\n UniformKey::CameraRight => \"camera_right\",\n\n UniformKey::CameraUp => \"camera_up\",\n\n };\n\n CString::new(s).expect(\"Bad cstring\")\n\n }\n\n}\n\n\n\npub struct ParticleSystem {\n\n config: SimpleConfigManager<ParticleConfig>,\n\n shader_program: ShaderProgram<UniformKey>,\n", "file_path": "fortress/src/lib/particles/particle_system.rs", "rank": 5, "score": 113279.51580493733 }, { "content": "#[derive(Copy, Clone, PartialEq, Eq, Hash)]\n\nenum UniformKey {\n\n LightsPosition(usize),\n\n LightsColor(usize),\n\n LightsAttenuation(usize),\n\n NumLights,\n\n ProjectionView,\n\n PositionIndependentView,\n\n CameraRight,\n\n CameraUp,\n\n Texture(TextureUnit),\n\n}\n\n\n\nimpl ShaderUniformKey for UniformKey {\n\n fn to_cstring(self) -> CString {\n\n match self {\n\n UniformKey::NumLights => CString::new(\"num_lights\").expect(\"Bad cstring\"),\n\n UniformKey::LightsPosition(idx) => {\n\n let s = format!(\"lights[{}].position\", idx);\n\n CString::new(s).expect(\"Bad cstring\")\n\n },\n", "file_path": "fortress/src/lib/render/renderer/light_dependent_sprite_renderer.rs", "rank": 6, "score": 111349.64720693107 }, { "content": "#[derive(Copy, Clone, PartialEq, Eq, Hash)]\n\nenum UniformKey {\n\n ProjectionView,\n\n PositionIndependentView,\n\n CameraRight,\n\n CameraUp,\n\n Texture(TextureUnit)\n\n}\n\n\n\nimpl ShaderUniformKey for UniformKey {\n\n fn to_cstring(self) -> CString {\n\n let string = match self {\n\n UniformKey::ProjectionView => \"projection_view\",\n\n UniformKey::PositionIndependentView => \"position_independent_view\",\n\n UniformKey::CameraRight => \"camera_right\",\n\n UniformKey::CameraUp => \"camera_up\",\n\n UniformKey::Texture(texture_unit) => texture_unit.uniform_name(),\n\n };\n\n CString::new(string).expect(\"Bad cstring\")\n\n }\n\n}\n", "file_path": "fortress/src/lib/render/renderer/fully_illuminated_sprite_renderer.rs", "rank": 7, "score": 111349.64720693107 }, { "content": "#[derive(Copy, Clone, PartialEq, Eq, Hash)]\n\nenum UniformKey {\n\n BevelRaise,\n\n LightsPosition(usize),\n\n LightsColor(usize),\n\n LightsAttenuation(usize),\n\n NumLights,\n\n Texture(TextureUnit),\n\n TileBottomLeft,\n\n TileTopRight,\n\n TileScale,\n\n ProjectionView,\n\n}\n\n\n\nimpl ShaderUniformKey for UniformKey {\n\n fn to_cstring(self) -> CString {\n\n match self {\n\n UniformKey::BevelRaise => CString::new(\"bevel_raise\").expect(\"Bad cstring\"),\n\n UniformKey::LightsPosition(idx) => {\n\n let s = format!(\"lights[{}].position\", idx);\n\n CString::new(s).expect(\"Bad cstring\")\n", "file_path": "fortress/src/lib/maps/render/hex_renderer.rs", "rank": 8, "score": 109528.71945000898 }, { "content": "pub fn run(_input_output: InputOutput) -> StatusOr<()> {\n\n Ok(())\n\n}\n", "file_path": "fortress_bake/src/bake/no_bake.rs", "rank": 9, "score": 104224.39082701862 }, { "content": "pub fn run(input_output: InputOutput) -> StatusOr<()> {\n\n save_sprite_sheets(&input_output.input, &input_output.output)?;\n\n Ok(())\n\n}\n", "file_path": "fortress_bake/src/bake/bake.rs", "rank": 10, "score": 104224.39082701862 }, { "content": "pub fn mmap(path: &PathBuf) -> StatusOr<MmapFile> {\n\n MmapFile::read(path)\n\n}\n\n\n", "file_path": "fortress_bake/src/file/util.rs", "rank": 11, "score": 100511.54581963373 }, { "content": "pub fn slurp_file(path: &PathBuf) -> StatusOr<String> {\n\n let file = File::open(path)\n\n .map_err(|e| format!(\"Error opening file {:?}: {}\", path, e))?;\n\n let mut buf_reader = BufReader::new(file);\n\n let mut contents = String::new();\n\n buf_reader.read_to_string(&mut contents)\n\n .map_err(|e| format!(\"Error reading to string from slurp_file {:?}: {}\", path, e))?;\n\n Ok(contents)\n\n}\n\n\n", "file_path": "fortress_bake/src/file/util.rs", "rank": 12, "score": 100511.54581963373 }, { "content": "#[derive(Copy, Clone, PartialEq, Eq, Hash)]\n\nenum UniformKey {\n\n FontTexture,\n\n ScreenWindowSize,\n\n}\n\n\n\nimpl ShaderUniformKey for UniformKey {\n\n fn to_cstring(self) -> CString {\n\n let string = match self {\n\n Self::FontTexture => \"font\",\n\n Self::ScreenWindowSize => \"screen_window_size\",\n\n };\n\n CString::new(string).expect(\"Bad cstring\")\n\n }\n\n}\n\n\n\npub struct ScreenTextRenderer {\n\n shader_program: ShaderProgram<UniformKey>,\n\n attribute_program: AttributeProgram,\n\n attr_pos: Attribute<PositionAttr>,\n\n attr_glyph_size: Attribute<GlyphSizeAttr>,\n", "file_path": "fortress/src/lib/text/screen_text_renderer.rs", "rank": 13, "score": 100227.88111350636 }, { "content": "#[derive(Copy, Clone, PartialEq, Eq, Hash)]\n\nenum UniformKey {\n\n HorizontalMode,\n\n ImageTexture,\n\n}\n\n\n\nimpl ShaderUniformKey for UniformKey {\n\n fn to_cstring(self) -> CString {\n\n let string = match self {\n\n UniformKey::HorizontalMode => \"horizontal\",\n\n UniformKey::ImageTexture => \"image\",\n\n };\n\n CString::new(string).expect(\"Bad cstring\")\n\n }\n\n}\n\n\n\npub struct BlurShader {\n\n shader_program: ShaderProgram<UniformKey>,\n\n attribute_program: AttributeProgram,\n\n attr_position: Attribute<BlurPositionAttr>,\n\n attr_texel: Attribute<BlurTexelAttr>,\n", "file_path": "fortress/src/lib/render/bloom/blur_shader.rs", "rank": 14, "score": 100227.88111350636 }, { "content": "#[derive(Copy, Clone, PartialEq, Eq, Hash)]\n\nenum UniformKey {\n\n CameraRight,\n\n CameraUp,\n\n FontTexture,\n\n ProjectionView,\n\n}\n\n\n\nimpl ShaderUniformKey for UniformKey {\n\n fn to_cstring(self) -> CString {\n\n let string = match self {\n\n Self::CameraRight => \"camera_right\",\n\n Self::CameraUp => \"camera_up\",\n\n Self::FontTexture => \"font\",\n\n Self::ProjectionView => \"projection_view\",\n\n };\n\n CString::new(string).expect(\"Bad cstring\")\n\n }\n\n}\n\n\n\npub struct WorldTextRenderer {\n", "file_path": "fortress/src/lib/text/world_text_renderer.rs", "rank": 15, "score": 100227.88111350636 }, { "content": "pub trait KnownComponent {\n\n fn component() -> (NumComponents, ComponentType);\n\n}\n\n\n\npub struct AttributeProgramBuilder {\n\n vao: GLuint,\n\n num_attributes: GLuint,\n\n}\n\n\n\nimpl AttributeProgramBuilder {\n\n pub fn add_attribute<T: KnownComponent>(&mut self) -> Attribute<T> {\n\n self.add_attribute_with_advance(AttributeAdvance::PerInstance)\n\n }\n\n\n\n pub fn add_attribute_with_advance<T: KnownComponent>(&mut self, advance: AttributeAdvance) -> Attribute<T> {\n\n self.num_attributes += 1;\n\n Attribute::<T>::new(self.num_attributes - 1, advance)\n\n }\n\n\n\n pub fn build(self) -> AttributeProgram {\n", "file_path": "fortress_bake/src/render/attribute.rs", "rank": 16, "score": 98785.09250299894 }, { "content": "#[derive(Copy, Clone, PartialEq, Eq, Hash)]\n\nenum UniformKey {\n\n Bloom,\n\n BloomIntensityMultiplier,\n\n Scene,\n\n}\n\n\n\nimpl ShaderUniformKey for UniformKey {\n\n fn to_cstring(self) -> CString {\n\n let string = match self {\n\n UniformKey::Bloom => \"bloom\",\n\n UniformKey::BloomIntensityMultiplier => \"bloom_intensity_multiplier\",\n\n UniformKey::Scene => \"scene\",\n\n };\n\n CString::new(string).expect(\"Bad cstring\")\n\n }\n\n}\n\n\n\npub struct BloomCompositorShader {\n\n shader_program: ShaderProgram<UniformKey>,\n\n attribute_program: AttributeProgram,\n", "file_path": "fortress/src/lib/render/bloom/bloom_compositor_shader.rs", "rank": 17, "score": 98738.79045980796 }, { "content": "pub trait ShaderUniformKey {\n\n fn to_cstring(self) -> CString;\n\n}\n\n\n", "file_path": "fortress/src/lib/render/shader.rs", "rank": 18, "score": 97236.76927578643 }, { "content": "pub fn reader(path: &PathBuf) -> StatusOr<BufReader<File>> {\n\n let file = File::open(path)\n\n .map_err(|e| format!(\"Error opening file {:?}: {}\", path, e))?;\n\n Ok(BufReader::new(file))\n\n}\n\n\n", "file_path": "fortress_bake/src/file/util.rs", "rank": 19, "score": 97195.29660993302 }, { "content": "pub fn slurp_file_bytes(path: &PathBuf) -> StatusOr<Vec<u8>> {\n\n let file = File::open(path)\n\n .map_err(|e| format!(\"Error opening file {:?}: {}\", path, e))?;\n\n let mut buf_reader = BufReader::new(file);\n\n let mut contents = Vec::new();\n\n buf_reader.read_to_end(&mut contents)\n\n .map_err(|e| format!(\"Error reading to string from slurp_file {:?}: {}\", path, e))?;\n\n Ok(contents)\n\n}\n\n\n", "file_path": "fortress_bake/src/file/util.rs", "rank": 20, "score": 95925.76171367109 }, { "content": "#[derive(Deserialize)]\n\npub struct AudioConfig {\n\n pub sound_volume: f64\n\n}", "file_path": "fortress/src/lib/audio/audio_config.rs", "rank": 21, "score": 93376.25152583321 }, { "content": "#[derive(Deserialize)]\n\npub struct BloodParticleConfig {\n\n pub particle_limit: usize,\n\n pub size_range: (f32, f32),\n\n pub gravity: f32,\n\n pub max_spread_speed: f32,\n\n pub start_height: f32,\n\n pub min_height: f32,\n\n pub start_velocity_y: f32,\n\n pub start_position_radius: f32,\n\n pub expiry_duration_micros: i64,\n\n pub bloom_intensity: f32,\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct SnowParticleConfig {\n\n pub particle_limit: usize,\n\n pub particle_generation_period_micros: i64,\n\n pub wind_direction_raw: (f32, f32, f32),\n\n pub wind_direction_max_angle_offset: f32,\n", "file_path": "fortress/src/lib/particles/particle_config.rs", "rank": 22, "score": 92998.67985648892 }, { "content": " pub wave_phase_shift: f64,\n\n pub bloom_intensity: f32,\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct ParticleConfig {\n\n pub initial_particle_events_limit_guess: usize,\n\n pub blood: BloodParticleConfig,\n\n pub snow: SnowParticleConfig,\n\n pub hero_switch: HeroSwitchParticleConfig,\n\n}", "file_path": "fortress/src/lib/particles/particle_config.rs", "rank": 23, "score": 92998.17508980753 }, { "content": " pub speed_range: (f32, f32),\n\n pub size_range: (f32, f32),\n\n pub color: (f32, f32, f32),\n\n pub start_position_offset: (f32, f32, f32),\n\n pub height_above_which_alpha_is_full: f32,\n\n pub bloom_intensity: f32,\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct HeroSwitchParticleConfig {\n\n pub particle_limit: usize,\n\n pub particles_per_event: usize,\n\n pub size: f32,\n\n pub color: (f32, f32, f32),\n\n pub max_age_seconds: f64,\n\n pub starting_radial_offset: f64,\n\n pub starting_height_band: (f64, f64),\n\n pub xz_speed_band: (f64, f64),\n\n pub wave_speed_band: (f64, f64),\n\n pub wave_amplitude: f64,\n", "file_path": "fortress/src/lib/particles/particle_config.rs", "rank": 24, "score": 92996.63837305682 }, { "content": "use crate::items::{\n\n barrels::BarrelConfig,\n\n ItemType,\n\n types::SkullType,\n\n};\n\nuse std::collections::HashMap;\n\n\n\n#[derive(Deserialize)]\n\npub struct ItemConfig {\n\n pub system_items_initial_capacity: usize,\n\n pub system_barrels_initial_capacity: usize,\n\n pub physical_radius: f64,\n\n pub physical_density: f64,\n\n pub render_scale: f32,\n\n pub bloom_intensity: f32,\n\n pub light_elevation: f32,\n\n pub light_attenuation: (f32, f32, f32),\n\n pub item_type_light_color: HashMap<ItemType, (f32, f32, f32)>,\n\n pub skull_value: HashMap<SkullType, i64>,\n\n\n\n pub barrel: BarrelConfig,\n\n}", "file_path": "fortress/src/lib/items/item_config.rs", "rank": 25, "score": 92879.50341614783 }, { "content": "use crate::{\n\n dimensions::Criticality,\n\n text::RasterSize,\n\n};\n\nuse std::collections::HashMap;\n\n\n\n#[derive(Deserialize)]\n\npub struct EnemySystemConfig {\n\n pub generator: EnemyGeneratorConfig,\n\n pub enemy: EnemyConfig,\n\n pub damage_text: DamageTextConfig,\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct EnemyGeneratorConfig {\n\n pub slab_initial_capacity_guess: usize,\n\n pub starting_health: i64,\n\n pub cooldown_duration_micros: i64,\n\n pub spawn_offset_distance: f64,\n\n pub max_concurrent_spawns: usize,\n", "file_path": "fortress/src/lib/enemies/enemy_config.rs", "rank": 26, "score": 92766.81893236088 }, { "content": " pub generate_distance: f64,\n\n pub physical_radius: f64,\n\n pub physical_density: f64,\n\n pub render_scale: f32,\n\n pub num_sprite_frames: usize,\n\n pub light_offset: (f32, f32, f32),\n\n pub light_color: (f32, f32, f32),\n\n pub light_attenuation: (f32, f32, f32),\n\n pub blood_color: (f32, f32, f32),\n\n pub num_blood_particles_per_hit: u32,\n\n pub death_screen_shake_intensity: f32,\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct EnemyConfig {\n\n pub slab_initial_capacity_guess: usize,\n\n pub starting_health: i64,\n\n pub dying_duration_micros: i64,\n\n pub dying_frame_duration_micros: i64,\n\n pub walk_frame_duration_micros: i64,\n", "file_path": "fortress/src/lib/enemies/enemy_config.rs", "rank": 27, "score": 92755.99281513582 }, { "content": " pub stop_and_hit_distance: f64,\n\n pub anger_distance: f64,\n\n pub move_speed: f64,\n\n pub physical_radius: f64,\n\n pub physical_density: f64,\n\n pub render_scale: f32,\n\n pub blood_color: (f32, f32, f32),\n\n pub num_blood_particles_per_hit: u32,\n\n}\n\n\n\n#[derive(Deserialize)]\n\npub struct DamageTextConfig {\n\n pub initial_capacity: usize,\n\n pub start_velocity: (f32, f32, f32),\n\n pub start_height: f32,\n\n pub vertical_acceleration: f32,\n\n pub text_expiry_duration_micros: i64,\n\n pub raster_size: RasterSize,\n\n pub color: HashMap<Criticality, (f32, f32, f32)>,\n\n pub shadow_color: (f32, f32, f32),\n\n pub shadow_offset: (f32, f32, f32),\n\n}\n", "file_path": "fortress/src/lib/enemies/enemy_config.rs", "rank": 28, "score": 92755.87213340867 }, { "content": "#[derive(Deserialize)]\n\nstruct BackgroundRendererConfig {\n\n pub camera_speed: f32,\n\n // Screen pixels / pixels sampled.\n\n pub zoom: f32\n\n}\n\n\n", "file_path": "fortress/src/lib/render/renderer/background_renderer.rs", "rank": 29, "score": 91777.57536784832 }, { "content": "// Returns base names of files in dir that match .*extension\n\npub fn files_in_dir_ending_with(dir: &PathBuf, extension: &str) -> StatusOr<Vec<String>> {\n\n Ok(dir.read_dir()\n\n .map_err(|e| format!(\"Bad dir: {:?}\", e))?\n\n .filter_map(|entry| entry.ok())\n\n .map(|entry| entry.file_name())\n\n .filter_map(|base_name| {\n\n let base_name_str = base_name.to_str()?;\n\n if !base_name_str.ends_with(extension) {\n\n return None;\n\n }\n\n Some(String::from(base_name_str))\n\n })\n\n .collect())\n\n}\n\n\n", "file_path": "fortress_bake/src/file/util.rs", "rank": 30, "score": 90293.89995048215 }, { "content": "use crate::render::attribute;\n\nuse glm;\n\n\n\npub struct ParticleRenderView<'a> {\n\n pub attr_pos: &'a mut Vec<Vec3Attr>,\n\n pub attr_color: &'a mut Vec<Vec3Attr>,\n\n pub attr_bloom: &'a mut Vec<BloomAttr>,\n\n pub attr_alpha: &'a mut Vec<FloatAttr>,\n\n pub attr_size: &'a mut Vec<FloatAttr>,\n\n}\n\n\n\n#[repr(C)]\n\npub struct Vec3Attr {\n\n val: glm::Vec3,\n\n}\n\n\n\nimpl Vec3Attr {\n\n pub fn new(val: glm::Vec3) -> Vec3Attr {\n\n Vec3Attr {\n\n val\n", "file_path": "fortress/src/lib/particles/particle_render_view.rs", "rank": 31, "score": 89520.18693454567 }, { "content": " }\n\n}\n\n\n\nimpl attribute::KnownComponent for FloatAttr {\n\n fn component() -> (attribute::NumComponents, attribute::ComponentType) {\n\n (attribute::NumComponents::S1, attribute::ComponentType::Float)\n\n }\n\n}\n\n\n\n#[repr(C)]\n\npub struct BloomAttr {\n\n color: glm::Vec3,\n\n intensity: f32,\n\n}\n\n\n\nimpl BloomAttr {\n\n pub fn new(color: glm::Vec3, intensity: f32) -> Self {\n\n BloomAttr {\n\n color,\n\n intensity,\n", "file_path": "fortress/src/lib/particles/particle_render_view.rs", "rank": 32, "score": 89506.07018217102 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl attribute::KnownComponent for Vec3Attr {\n\n fn component() -> (attribute::NumComponents, attribute::ComponentType) {\n\n (attribute::NumComponents::S3, attribute::ComponentType::Float)\n\n }\n\n}\n\n\n\n#[repr(C)]\n\npub struct FloatAttr {\n\n val: f32,\n\n}\n\n\n\nimpl FloatAttr {\n\n pub fn new(val: f32) -> Self {\n\n FloatAttr {\n\n val\n\n }\n", "file_path": "fortress/src/lib/particles/particle_render_view.rs", "rank": 33, "score": 89502.393547841 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl attribute::KnownComponent for BloomAttr {\n\n fn component() -> (attribute::NumComponents, attribute::ComponentType) {\n\n (attribute::NumComponents::S4, attribute::ComponentType::Float)\n\n }\n\n}\n", "file_path": "fortress/src/lib/particles/particle_render_view.rs", "rank": 34, "score": 89494.98107473564 }, { "content": "pub fn resource_path(parent_folder: &'static str, resource_name: &'static str) -> PathBuf {\n\n let mut path_buf = resource_base();\n\n [parent_folder, resource_name].iter().for_each(|p| path_buf.push(p));\n\n path_buf\n\n}\n\n\n", "file_path": "fortress_bake/src/file/util.rs", "rank": 35, "score": 89144.90504532716 }, { "content": "#[repr(C)]\n\nstruct SpritePositionAttr {\n\n world_center_position: glm::Vec3,\n\n}\n\n\n\nimpl attribute::KnownComponent for SpritePositionAttr {\n\n fn component() -> (attribute::NumComponents, attribute::ComponentType) {\n\n (attribute::NumComponents::S3, attribute::ComponentType::Float)\n\n }\n\n}\n\n\n", "file_path": "fortress/src/lib/render/renderer/light_dependent_sprite_renderer.rs", "rank": 36, "score": 87232.81821263845 }, { "content": "#[repr(C)]\n\nstruct SpritePositionAttr {\n\n world_center_position: glm::Vec3,\n\n}\n\n\n\nimpl attribute::KnownComponent for SpritePositionAttr {\n\n fn component() -> (attribute::NumComponents, attribute::ComponentType) {\n\n (attribute::NumComponents::S3, attribute::ComponentType::Float)\n\n }\n\n}\n\n\n", "file_path": "fortress/src/lib/render/renderer/fully_illuminated_sprite_renderer.rs", "rank": 37, "score": 87232.81821263845 }, { "content": "use crate::file;\n\nuse enum_iterator::IntoEnumIterator;\n\nuse std::path::PathBuf;\n\n\n\n#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, IntoEnumIterator)]\n\npub enum Sound {\n\n BarrelDestroy,\n\n BarrelHit,\n\n CollectItem,\n\n EnemyGeneratorHurt,\n\n EnemyGeneratorKilled,\n\n EnemyKilled,\n\n HeroSwitch,\n\n JoinGame,\n\n ShootSingleFireball,\n\n ShootSpecial,\n\n}\n\n\n\nimpl Sound {\n\n pub fn to_path_buf(self) -> PathBuf {\n", "file_path": "fortress/src/lib/audio/sound.rs", "rank": 38, "score": 86273.28311531171 }, { "content": " let filename = match self {\n\n Sound::BarrelDestroy => \"barrel_destroy.wav\",\n\n Sound::BarrelHit => \"barrel_hit.wav\",\n\n Sound::CollectItem => \"collect_item.wav\",\n\n Sound::EnemyGeneratorHurt => \"enemy_generator_hurt.wav\",\n\n Sound::EnemyGeneratorKilled => \"enemy_generator_killed.wav\",\n\n Sound::EnemyKilled => \"enemy_killed.wav\",\n\n Sound::HeroSwitch => \"hero_switch.wav\",\n\n Sound::JoinGame => \"join_game.wav\",\n\n Sound::ShootSingleFireball => \"shoot_single_fireball.wav\",\n\n Sound::ShootSpecial => \"shoot_special.wav\",\n\n };\n\n file::util::resource_path(\"audio\", filename)\n\n }\n\n\n\n pub fn all_sounds() -> <Self as IntoEnumIterator>::Iterator {\n\n Self::into_enum_iter()\n\n }\n\n}", "file_path": "fortress/src/lib/audio/sound.rs", "rank": 39, "score": 86272.09632738782 }, { "content": "fn compile_program(shaders: &[GLuint]) -> StatusOr<GLuint> {\n\n unsafe {\n\n let program_id = gl::CreateProgram();\n\n for shader in shaders.iter() {\n\n gl::AttachShader(program_id, *shader);\n\n }\n\n gl::LinkProgram(program_id);\n\n\n\n let mut success: GLint = 0;\n\n gl::GetProgramiv(program_id, gl::LINK_STATUS, &mut success);\n\n if success as GLboolean == gl::TRUE {\n\n Ok(program_id)\n\n } else {\n\n let mut error_log: Vec<u8> = Vec::with_capacity(512);\n\n let error_log_ptr = error_log.as_mut_slice().as_mut_ptr() as *mut i8;\n\n gl::GetProgramInfoLog(program_id, error_log.capacity() as i32, std::ptr::null_mut(), error_log_ptr);\n\n let err_string =\n\n String::from_utf8(error_log)\n\n .map_err(|_err|\n\n String::from(\"Program failed to compile. Could not retrieve reason.\"))?;\n", "file_path": "fortress/src/lib/render/shader.rs", "rank": 40, "score": 86076.79625513857 }, { "content": "use crate::{\n\n enemies::DamageTextConfig,\n\n dimensions::{\n\n Damage,\n\n time::{\n\n DeltaTime,\n\n Timer\n\n },\n\n },\n\n math::EasingFn,\n\n text::{\n\n TextContent,\n\n TextRenderer,\n\n WorldTextRequest,\n\n },\n\n};\n\nuse glm;\n\nuse nalgebra::{\n\n Point2,\n\n Vector2,\n", "file_path": "fortress/src/lib/enemies/damage_text_writer.rs", "rank": 41, "score": 82228.73070711746 }, { "content": "};\n\n\n\npub struct DamageTextWriter {\n\n damage: Vec<Damage>,\n\n position: Vec<glm::Vec3>,\n\n velocity: Vec<glm::Vec3>,\n\n timer: Vec<Timer>,\n\n}\n\n\n\nimpl DamageTextWriter {\n\n pub fn new(config: &DamageTextConfig) -> Self {\n\n DamageTextWriter {\n\n damage: Vec::with_capacity(config.initial_capacity),\n\n position: Vec::with_capacity(config.initial_capacity),\n\n velocity: Vec::with_capacity(config.initial_capacity),\n\n timer: Vec::with_capacity(config.initial_capacity),\n\n }\n\n }\n\n\n\n pub fn pre_update(&mut self, config: &DamageTextConfig, dt: DeltaTime) {\n", "file_path": "fortress/src/lib/enemies/damage_text_writer.rs", "rank": 42, "score": 82217.19956549715 }, { "content": " pub fn add_damage(&mut self, config: &DamageTextConfig, damage: Damage, position: Point2<f64>, direction: Option<Vector2<f64>>) {\n\n let velocity = glm::vec2(config.start_velocity.0, config.start_velocity.2) * if let Some(world_direction) = direction {\n\n glm::vec2(world_direction.x as f32, -world_direction.y as f32)\n\n } else {\n\n glm::vec2(0.0, 0.0)\n\n };\n\n\n\n self.damage.push(damage);\n\n self.position.push(glm::vec3(position.x as f32, config.start_height, -position.y as f32));\n\n self.velocity.push(glm::vec3(velocity.x, config.start_velocity.1, velocity.y));\n\n self.timer.push(Timer::new(config.text_expiry_duration_micros));\n\n }\n\n\n\n pub fn queue_draw(&self, config: &DamageTextConfig, text: &mut TextRenderer) {\n\n (0..self.damage.len())\n\n .for_each(|idx| {\n\n let damage = self.damage[idx];\n\n if let Some(color) = config.color.get(&damage.criticality()) {\n\n let content = [TextContent::Number(damage.value())];\n\n let world_position = self.position[idx];\n", "file_path": "fortress/src/lib/enemies/damage_text_writer.rs", "rank": 43, "score": 82214.17716341447 }, { "content": " let float_dt = dt.as_f32_seconds();\n\n let vertical_acceleration = config.vertical_acceleration * float_dt;\n\n\n\n (0..self.damage.len())\n\n .rev()\n\n .for_each(|idx| {\n\n let ref mut timer = self.timer[idx];\n\n timer.tick(dt);\n\n if timer.is_expired() {\n\n self.swap_delete(idx);\n\n return;\n\n }\n\n\n\n let ref mut velocity = self.velocity[idx];\n\n velocity.y += vertical_acceleration;\n\n let ref mut position = self.position[idx];\n\n *position = (*position) + (*velocity * float_dt);\n\n });\n\n }\n\n\n", "file_path": "fortress/src/lib/enemies/damage_text_writer.rs", "rank": 44, "score": 82203.14300687205 }, { "content": "\n\n let alpha = {\n\n let t = self.timer[idx].as_completion_fraction_of(config.text_expiry_duration_micros);\n\n EasingFn::ease_out_quintic(1.0 - t)\n\n };\n\n\n\n text.queue_world_text(content.iter().copied(), WorldTextRequest {\n\n world_position: world_position + glm::vec3(config.shadow_offset.0, config.shadow_offset.1, config.shadow_offset.2),\n\n raster_size: config.raster_size,\n\n color: glm::vec3(config.shadow_color.0, config.shadow_color.1, config.shadow_color.2),\n\n alpha,\n\n });\n\n text.queue_world_text(content.iter().copied(), WorldTextRequest {\n\n world_position,\n\n raster_size: config.raster_size,\n\n color: glm::vec3(color.0, color.1, color.2),\n\n alpha,\n\n });\n\n }\n\n });\n\n }\n\n\n\n fn swap_delete(&mut self, index: usize) {\n\n self.damage.swap_remove(index);\n\n self.position.swap_remove(index);\n\n self.velocity.swap_remove(index);\n\n self.timer.swap_remove(index);\n\n }\n\n}", "file_path": "fortress/src/lib/enemies/damage_text_writer.rs", "rank": 45, "score": 82201.66070627354 }, { "content": "#[derive(Deserialize)]\n\npub struct BarrelConfig {\n\n pub physical_radius: f64,\n\n pub physical_density: f64,\n\n pub render_scale: (f32, f32),\n\n pub bloom_intensity: f32,\n\n pub num_strikes_health: i64,\n\n pub blood_color: (f32, f32, f32),\n\n pub num_blood_particles_per_hit: u32,\n\n}", "file_path": "fortress/src/lib/items/barrels/barrel_config.rs", "rank": 46, "score": 82006.65656560195 }, { "content": "struct FirstTimeUsed {\n\n pub has_been_used: bool,\n\n pub first_time_used: bool,\n\n}\n\n\n\nimpl FirstTimeUsed {\n\n pub fn new() -> FirstTimeUsed {\n\n FirstTimeUsed {\n\n has_been_used: false,\n\n first_time_used: false,\n\n }\n\n }\n\n\n\n pub fn touch(&mut self, cond: bool) {\n\n if cond {\n\n self.first_time_used = !self.has_been_used;\n\n self.has_been_used = true;\n\n }\n\n }\n\n\n\n pub fn is_first(&self) -> bool {\n\n self.first_time_used\n\n }\n\n}\n", "file_path": "fortress/src/lib/control/keyboard.rs", "rank": 47, "score": 81080.88183264389 }, { "content": "#[derive(Deserialize)]\n\npub struct ScreenShakeConfig {\n\n pub intensity_fall_off_speed: f32,\n\n pub max_intensity: f32,\n\n pub max_rotation_radians: f32,\n\n pub noise_time_multiplier: f32,\n\n pub noise_seed_offset: f32,\n\n pub noise_iterations: usize,\n\n}\n", "file_path": "fortress/src/lib/render/cameras/camera_config.rs", "rank": 48, "score": 80149.03588366363 }, { "content": "#[derive(Deserialize)]\n\npub struct CameraConfig {\n\n pub zoom: f32,\n\n pub z_near: f32,\n\n pub z_far: f32,\n\n pub lookat: (f32, f32, f32),\n\n pub right: (f32, f32, f32),\n\n pub camera_pos_offset: (f32, f32),\n\n pub initial_position_when_no_players: (f64, f64, f64),\n\n\n\n pub physical_no_move_half_lengths: (f64, f64),\n\n pub physical_follow_player_factor: f64,\n\n\n\n pub stream_inside_half_extents: (f64, f64),\n\n pub stream_margin_length: f64,\n\n pub stream_light_margin_length: f64,\n\n\n\n pub screen_shake: ScreenShakeConfig,\n\n}\n\n\n", "file_path": "fortress/src/lib/render/cameras/camera_config.rs", "rank": 49, "score": 80148.34120958921 }, { "content": "#[derive(Deserialize)]\n\npub struct BloomConfig {\n\n pub num_passes: usize,\n\n pub bloom_intensity_multiplier: f32,\n\n}", "file_path": "fortress/src/lib/render/bloom/bloom_config.rs", "rank": 50, "score": 80146.04184430398 }, { "content": "fn compile_shader(path: &PathBuf, shader_type: GLenum) -> StatusOr<GLuint> {\n\n let slurped_shader_code = file::util::slurp_file(path)\n\n .map_err(|err| format!(\"Error reading shader ({:?}), code: {}\", path, err))?;\n\n let shader_c_str =\n\n CString::new(slurped_shader_code.as_str())\n\n .map_err(|err| format!(\"Couldn't turn shader {:?} into a C string. Reason: {}\", path, err))?;\n\n unsafe {\n\n let shader_id = gl::CreateShader(shader_type);\n\n gl::ShaderSource(shader_id, 1, &shader_c_str.as_ptr(), std::ptr::null());\n\n gl::CompileShader(shader_id);\n\n\n\n let mut success = GLint::from(gl::FALSE);\n\n gl::GetShaderiv(shader_id, gl::COMPILE_STATUS, &mut success);\n\n if success == GLint::from(gl::TRUE) {\n\n Ok(shader_id)\n\n } else {\n\n let mut info_log_len = 0;\n\n gl::GetShaderiv(shader_id, gl::INFO_LOG_LENGTH, &mut info_log_len);\n\n let mut info_log = Vec::with_capacity(info_log_len as usize);\n\n info_log.set_len((info_log_len as usize) - 1);\n\n gl::GetShaderInfoLog(shader_id, info_log_len, std::ptr::null_mut(), info_log.as_mut_ptr() as *mut GLchar);\n\n let err_string = String::from_utf8(info_log)\n\n .map_err(|_err|\n\n format!(\"Shader failed to compile. Explanation was invalid UTF-8. Shader: {:?}\", path))?;\n\n Err(err_string)\n\n }\n\n }\n\n}\n\n\n", "file_path": "fortress/src/lib/render/shader.rs", "rank": 51, "score": 79061.19013914806 }, { "content": "use crate::render::{\n\n NamedSpriteSheet,\n\n TextureStyle,\n\n};\n\nuse std::collections::HashMap;\n\n\n\n#[derive(Clone, Deserialize)]\n\npub struct SheetConfig {\n\n pub width: usize,\n\n pub height: usize,\n\n pub style: TextureStyle,\n\n pub sprites: HashMap<String, SpriteConfig>,\n\n}\n\n\n\n#[derive(Clone, Deserialize)]\n\npub struct SpriteConfig {\n\n pub frame_width: usize,\n\n pub frame_height: usize,\n\n}\n\n\n\n#[derive(Clone, Deserialize)]\n\npub struct SpriteSheetConfig {\n\n pub sheets: HashMap<NamedSpriteSheet, SheetConfig>\n\n}\n", "file_path": "fortress_bake/src/render/sprites/sprite_sheet_config.rs", "rank": 52, "score": 78548.43875826232 }, { "content": "#[repr(C)]\n\nstruct PositionAttr {\n\n position: glm::Vec3,\n\n}\n\n\n\nimpl attribute::KnownComponent for PositionAttr {\n\n fn component() -> (attribute::NumComponents, attribute::ComponentType) {\n\n (attribute::NumComponents::S3, attribute::ComponentType::Float)\n\n }\n\n}\n\n\n", "file_path": "fortress/src/lib/text/screen_text_renderer.rs", "rank": 53, "score": 77257.82956429395 }, { "content": "#[repr(C)]\n\nstruct PositionAttr {\n\n position: glm::Vec3,\n\n}\n\n\n\nimpl attribute::KnownComponent for PositionAttr {\n\n fn component() -> (attribute::NumComponents, attribute::ComponentType) {\n\n (attribute::NumComponents::S3, attribute::ComponentType::Float)\n\n }\n\n}\n\n\n", "file_path": "fortress/src/lib/text/world_text_renderer.rs", "rank": 54, "score": 77257.82956429395 }, { "content": "use crate::render::{\n\n FramesInfo,\n\n NamedSpriteSheet,\n\n SpriteSheetConfig,\n\n SpriteSheetFrameId,\n\n TextureStyle,\n\n};\n\nuse std::collections::HashMap;\n\n\n\n#[derive(Serialize, Deserialize)]\n\npub struct BakedSpriteSheetConfig {\n\n pub images: Vec<(NamedSpriteSheet, TextureStyle)>,\n\n pub frames: Vec<(SpriteSheetFrameId, FramesInfo)>,\n\n}\n\n\n\nimpl BakedSpriteSheetConfig {\n\n pub fn new(config: SpriteSheetConfig, frames: HashMap<SpriteSheetFrameId, FramesInfo>) -> Self {\n\n let images = config.sheets\n\n .into_iter()\n\n .map(|(named_sprite_sheet, sheet_config)| {\n", "file_path": "fortress_bake/src/render/sprites/baked_sprite_sheet_config.rs", "rank": 55, "score": 77007.80394999386 }, { "content": " (named_sprite_sheet, sheet_config.style)\n\n })\n\n .collect();\n\n\n\n let frames = frames.into_iter().collect();\n\n\n\n BakedSpriteSheetConfig {\n\n images,\n\n frames,\n\n }\n\n }\n\n}\n", "file_path": "fortress_bake/src/render/sprites/baked_sprite_sheet_config.rs", "rank": 56, "score": 76990.50302558327 }, { "content": "#[derive(Deserialize)]\n\nstruct PointLightsConfig {\n\n initial_capacity: usize\n\n}\n\n\n\npub struct PointLights {\n\n max_num_lights: usize,\n\n lights: Vec<PointLight>,\n\n camera_stream_info: Option<CameraStreamInfo>,\n\n}\n\n\n\nimpl PointLights {\n\n pub fn new() -> StatusOr<Self> {\n\n let config_path = file::util::resource_path(\"config\", \"lights.conf\");\n\n let config = PointLightsConfig::from_path(&config_path)?;\n\n\n\n Ok(PointLights {\n\n max_num_lights: config.initial_capacity,\n\n lights: Vec::with_capacity(config.initial_capacity),\n\n camera_stream_info: None,\n\n })\n", "file_path": "fortress/src/lib/render/point_light.rs", "rank": 57, "score": 76985.93667541492 }, { "content": "#[repr(C)]\n\nstruct BlurPositionAttr {\n\n position: glm::Vec3,\n\n}\n\n\n\nimpl attribute::KnownComponent for BlurPositionAttr {\n\n fn component() -> (attribute::NumComponents, attribute::ComponentType) {\n\n (attribute::NumComponents::S3, attribute::ComponentType::Float)\n\n }\n\n}\n\n\n", "file_path": "fortress/src/lib/render/bloom/blur_shader.rs", "rank": 58, "score": 75767.93128297431 }, { "content": "#[repr(C)]\n\nstruct BloomPositionAttr {\n\n position: glm::Vec3,\n\n}\n\n\n\nimpl attribute::KnownComponent for BloomPositionAttr {\n\n fn component() -> (attribute::NumComponents, attribute::ComponentType) {\n\n (attribute::NumComponents::S3, attribute::ComponentType::Float)\n\n }\n\n}\n\n\n", "file_path": "fortress/src/lib/render/bloom/bloom_compositor_shader.rs", "rank": 59, "score": 74334.41042229673 }, { "content": "enum GamepadControl {\n\n ButtonPress(sdl2::controller::Button),\n\n AxisAboveThreshold(sdl2::controller::Axis, f32),\n\n AxisBelowThreshold(sdl2::controller::Axis, f32),\n\n}\n\n\n", "file_path": "fortress/src/lib/control/gamepad.rs", "rank": 60, "score": 64315.997366007694 }, { "content": "fn main() -> StatusOr<()> {\n\n let mut root = PathBuf::new();\n\n root.push(\"..\");\n\n let input_output = InputOutput::new(root)?;\n\n bake::run(input_output)\n\n}", "file_path": "fortress/build.rs", "rank": 61, "score": 63617.594404697855 }, { "content": "#[derive(Clone)]\n\nenum Entry<T> {\n\n Vacant(usize),\n\n Occupied(T, u64),\n\n}\n\n\n\nimpl<T> Slab<T> {\n\n pub fn new() -> Slab<T> {\n\n Slab::with_capacity(10)\n\n }\n\n\n\n pub fn with_capacity(capacity: usize) -> Slab<T> {\n\n Slab {\n\n entries: Vec::with_capacity(capacity),\n\n generation: 0,\n\n next: 0,\n\n len: 0,\n\n }\n\n }\n\n\n\n pub fn capacity(&self) -> usize {\n", "file_path": "generational_slab/src/lib.rs", "rank": 62, "score": 62473.62427064496 }, { "content": "fn find_sdl_gl_driver() -> StatusOr<u32> {\n\n for (index, item) in sdl2::render::drivers().enumerate() {\n\n if item.name == \"opengl\" {\n\n return Ok(index as u32);\n\n }\n\n }\n\n Err(String::from(\"Could not find SDL GL driver.\"))\n\n}\n\npub struct AppContext {\n\n pub events: sdl2::EventPump,\n\n pub canvas: sdl2::render::WindowCanvas,\n\n pub controller_subsystem: sdl2::GameControllerSubsystem,\n\n _video_subsystem: sdl2::VideoSubsystem,\n\n _gl_context: sdl2::video::GLContext,\n\n _sdl_context: sdl2::Sdl,\n\n}\n\n\n\nimpl AppContext {\n\n pub fn new(window_size: (i32, i32)) -> StatusOr<AppContext> {\n\n let sdl_context = sdl2::init()?;\n", "file_path": "fortress/src/lib/app/app_context.rs", "rank": 63, "score": 55020.22515060504 }, { "content": "fn try_find_resource_base() -> StatusOr<PathBuf> {\n\n let root = PathBuf::from(\".\");\n\n\n\n let mut path_buf = root.canonicalize()\n\n .map_err(|e| format! (\"Couldn't canonicalize CWD: {}\", e))?;\n\n\n\n // TODO: Apply this call recursively instead of stopping.\n\n if dir_contains_res(path_buf.as_path())? {\n\n path_buf.push(\"res\");\n\n Ok(path_buf)\n\n } else {\n\n Err(String::from(\"Could not find resource folder!\"))\n\n }\n\n}\n", "file_path": "fortress_bake/src/file/util.rs", "rank": 64, "score": 55020.22515060504 }, { "content": "}\n\n\n\nimpl BackgroundRenderer {\n\n pub fn new(config_watcher: &mut ConfigWatcher) -> StatusOr<BackgroundRenderer> {\n\n let config_manager = SimpleConfigManager::from_config_resource(config_watcher, \"background_renderer.conf\")?;\n\n\n\n let vertex = file::util::resource_path(\"shaders\", \"background_vert.glsl\");\n\n let fragment = file::util::resource_path(\"shaders\", \"background_frag.glsl\");\n\n let shader_program = ShaderProgram::from_short_pipeline(&vertex, &fragment)?;\n\n\n\n let mut attribute_program_builder = AttributeProgram::builder();\n\n let mut attr_vertex = attribute_program_builder.add_attribute_with_advance(AttributeAdvance::PerVertex);\n\n let attr_texel = attribute_program_builder.add_attribute_with_advance(AttributeAdvance::PerVertex);\n\n let attribute_program = attribute_program_builder.build();\n\n\n\n for vertex in [\n\n glm::vec2(-1.0, 1.0),\n\n glm::vec2(-1.0, -1.0),\n\n glm::vec2( 1.0, 1.0),\n\n glm::vec2( 1.0, -1.0)].iter() {\n", "file_path": "fortress/src/lib/render/renderer/background_renderer.rs", "rank": 65, "score": 52000.91636063964 }, { "content": "use crate::{\n\n app::StatusOr,\n\n file::{\n\n self,\n\n ConfigWatcher,\n\n SimpleConfigManager,\n\n },\n\n render::{\n\n attribute,\n\n Attribute,\n\n AttributeAdvance,\n\n AttributeProgram,\n\n CameraGeometry,\n\n NamedSpriteSheet,\n\n ShaderProgram,\n\n ShaderUniformKey,\n\n SpriteSheetTextureManager,\n\n PngTexture,\n\n TextureUnit,\n\n }\n\n};\n\nuse std::ffi::CString;\n\n\n\n#[derive(Deserialize)]\n", "file_path": "fortress/src/lib/render/renderer/background_renderer.rs", "rank": 66, "score": 51995.09077848272 }, { "content": " attr_vertex.data.push( VertexAttr {\n\n vertex: *vertex\n\n });\n\n }\n\n\n\n Ok(BackgroundRenderer {\n\n config_manager,\n\n shader_program,\n\n attribute_program,\n\n attr_vertex,\n\n attr_texel,\n\n })\n\n }\n\n\n\n pub fn pre_update(&mut self) {\n\n self.config_manager.update();\n\n }\n\n\n\n pub fn draw(&mut self, textures: &SpriteSheetTextureManager, camera_geometry: &CameraGeometry) {\n\n let texture = textures.texture(NamedSpriteSheet::GalaxyGround);\n", "file_path": "fortress/src/lib/render/renderer/background_renderer.rs", "rank": 67, "score": 51994.93106942326 }, { "content": "\n\n let config = self.config_manager.get();\n\n let (image_width, image_height) = texture.dimensions();\n\n let texel_width = config.zoom / image_width as f32;\n\n let texel_height = config.zoom / image_height as f32;\n\n\n\n let bottom_left = glm::vec2(camera_pos.x, -camera_pos.z) * config.camera_speed;\n\n let top_right = bottom_left + glm::vec2(texel_width, texel_height);\n\n\n\n for texel in [\n\n glm::vec2(bottom_left.x, top_right.y),\n\n bottom_left,\n\n top_right,\n\n glm::vec2(top_right.x, bottom_left.y)].iter() {\n\n self.attr_texel.data.push( TexelAttr {\n\n texel: *texel,\n\n });\n\n }\n\n }\n\n}\n\n\n", "file_path": "fortress/src/lib/render/renderer/background_renderer.rs", "rank": 68, "score": 51989.913464404104 }, { "content": " self.set_texels(texture, camera_geometry.world_position);\n\n\n\n self.shader_program.activate();\n\n self.attribute_program.activate();\n\n self.attr_vertex.prepare_buffer();\n\n self.attr_texel.prepare_buffer();\n\n\n\n let texture_unit = texture.activate();\n\n self.shader_program.set_texture(UniformKey::Texture(texture_unit), texture_unit);\n\n\n\n unsafe {\n\n gl::DrawArrays(gl::TRIANGLE_STRIP, 0, 4);\n\n }\n\n\n\n self.attribute_program.deactivate();\n\n self.shader_program.deactivate();\n\n }\n\n\n\n fn set_texels(&mut self, texture: &PngTexture, camera_pos: glm::Vec3) {\n\n self.attr_texel.data.clear();\n", "file_path": "fortress/src/lib/render/renderer/background_renderer.rs", "rank": 69, "score": 51988.72399681591 }, { "content": "fn dir_contains_res(path: &Path) -> StatusOr<bool> {\n\n for entry in fs::read_dir(path)\n\n .map_err(|e| format!(\"Couldn't read dir {:?}: {}\", path, e))? {\n\n let entry = entry\n\n .map_err(|e| format!(\"Couldn't read entry in {:?}: {}\", path, e))?;\n\n if entry.file_name() == OsString::from(\"res\") {\n\n return Ok(true)\n\n }\n\n }\n\n Ok(false)\n\n}\n\n\n", "file_path": "fortress_bake/src/file/util.rs", "rank": 70, "score": 51959.71900898253 }, { "content": "use crate::{\n\n items::{\n\n ItemConfig,\n\n ItemId,\n\n ItemPickup,\n\n state::{\n\n ItemBody,\n\n ItemState,\n\n ItemStateMachine,\n\n }\n\n },\n\n physics::PhysicsSimulation,\n\n render::{\n\n FullyIlluminatedSpriteRenderer,\n\n PointLight,\n\n },\n\n};\n\nuse nalgebra::Point2;\n\n\n\npub struct Item {\n", "file_path": "fortress/src/lib/items/item.rs", "rank": 71, "score": 51623.47870964306 }, { "content": " item_state: ItemState,\n\n item_state_machine: ItemStateMachine,\n\n}\n\n\n\nimpl Item {\n\n pub fn new(config: &ItemConfig, item_id: ItemId, item_pickup: ItemPickup, spawn: Point2<f64>, physics_sim: &mut PhysicsSimulation) -> Item {\n\n let item_body = ItemBody::new(config, item_id, spawn, physics_sim);\n\n let item_state = ItemState::new(item_body, item_pickup);\n\n let item_state_machine = ItemStateMachine::default();\n\n Item {\n\n item_state,\n\n item_state_machine\n\n }\n\n }\n\n\n\n pub fn point_light(&self, config: &ItemConfig) -> Option<PointLight> {\n\n self.item_state_machine.point_light(config, &self.item_state)\n\n }\n\n\n\n pub fn queue_draw(&self, config: &ItemConfig, sprite_renderer: &mut FullyIlluminatedSpriteRenderer) {\n", "file_path": "fortress/src/lib/items/item.rs", "rank": 72, "score": 51616.89934667133 }, { "content": " self.item_state_machine.queue_draw(config, &self.item_state, sprite_renderer);\n\n }\n\n\n\n pub fn collect(&mut self) {\n\n self.item_state_machine.collect();\n\n }\n\n\n\n pub fn collected(&self) -> bool {\n\n self.item_state_machine.collected()\n\n }\n\n\n\n pub fn item_pickup(&self) -> ItemPickup {\n\n self.item_state_machine.item_pickup(&self.item_state)\n\n }\n\n}\n", "file_path": "fortress/src/lib/items/item.rs", "rank": 73, "score": 51611.782832001954 }, { "content": "use crate::{\n\n audio::AudioPlayer,\n\n dimensions::{\n\n Attack,\n\n time::DeltaTime\n\n },\n\n enemies::{\n\n DamageTextWriter,\n\n EnemySystemConfig,\n\n EnemyConfig,\n\n EnemyGeneratorId,\n\n EnemyId,\n\n state::{\n\n EnemyBody,\n\n EnemyState,\n\n EnemyStateMachine,\n\n }\n\n },\n\n items::ItemSystem,\n\n particles::ParticleSystem,\n", "file_path": "fortress/src/lib/enemies/enemy.rs", "rank": 74, "score": 51515.59976598293 }, { "content": "\n\n pub fn take_attack(&mut self,\n\n config: &EnemySystemConfig,\n\n attack: Attack,\n\n bullet_direction: Option<Vector2<f64>>,\n\n particles: &mut ParticleSystem,\n\n damage_text: &mut DamageTextWriter) {\n\n self.enemy_state_machine.take_attack(config, attack, bullet_direction, &mut self.enemy_state, particles, damage_text);\n\n }\n\n\n\n pub fn dead(&self) -> bool {\n\n self.enemy_state_machine.dead()\n\n }\n\n\n\n pub fn generator_id(&self) -> EnemyGeneratorId {\n\n self.enemy_state.generator_id()\n\n }\n\n}\n", "file_path": "fortress/src/lib/enemies/enemy.rs", "rank": 75, "score": 51514.223719522866 }, { "content": " enemy_state,\n\n enemy_state_machine\n\n }\n\n }\n\n\n\n pub fn pre_update(&mut self, config: &EnemyConfig, dt: DeltaTime, player_locs: &Vec<Point2<f64>>) {\n\n if let Some(enemy_state_machine) = self.enemy_state_machine.pre_update(config, dt, player_locs, &mut self.enemy_state) {\n\n self.enemy_state_machine = enemy_state_machine;\n\n }\n\n }\n\n\n\n pub fn post_update(&mut self, config: &EnemyConfig, audio: &AudioPlayer, items: &mut ItemSystem, physics_sim: &mut PhysicsSimulation) {\n\n if let Some(enemy_state_machine) = self.enemy_state_machine.post_update(config, audio, &self.enemy_state, items, physics_sim) {\n\n self.enemy_state_machine = enemy_state_machine;\n\n }\n\n }\n\n\n\n pub fn queue_draw(&self, config: &EnemyConfig, sprite_renderer: &mut LightDependentSpriteRenderer) {\n\n self.enemy_state_machine.queue_draw(config, &self.enemy_state, sprite_renderer);\n\n }\n", "file_path": "fortress/src/lib/enemies/enemy.rs", "rank": 76, "score": 51512.23566864943 }, { "content": " physics::PhysicsSimulation,\n\n render::LightDependentSpriteRenderer,\n\n};\n\nuse nalgebra::{\n\n Point2,\n\n Vector2,\n\n};\n\n\n\npub struct Enemy {\n\n enemy_state: EnemyState,\n\n enemy_state_machine: EnemyStateMachine,\n\n}\n\n\n\nimpl Enemy {\n\n pub fn new(config: &EnemyConfig, enemy_id: EnemyId, generator_id: EnemyGeneratorId, spawn: Point2<f64>, physics_sim: &mut PhysicsSimulation) -> Enemy {\n\n let enemy_state = EnemyState::new(config, generator_id);\n\n let enemy_body = EnemyBody::new(config, enemy_id, spawn, physics_sim);\n\n let enemy_state_machine = EnemyStateMachine::new(enemy_body);\n\n\n\n Enemy {\n", "file_path": "fortress/src/lib/enemies/enemy.rs", "rank": 77, "score": 51507.381407656045 }, { "content": "use crate::{\n\n app::StatusOr,\n\n audio::{\n\n AudioConfig,\n\n Sound\n\n },\n\n file::{\n\n ConfigWatcher,\n\n SimpleConfigManager\n\n },\n\n};\n\nuse sdl2::mixer::{\n\n Chunk,\n\n self,\n\n};\n\nuse std::collections::HashMap;\n\n\n\nconst CHUNK_SIZE: i32 = 2048;\n\n\n\npub struct AudioPlayer {\n", "file_path": "fortress/src/lib/audio/audio_player.rs", "rank": 78, "score": 51461.019458744246 }, { "content": " config,\n\n chunks: sound_chunks?\n\n })\n\n }\n\n\n\n pub fn update(&mut self) {\n\n if self.config.update() {\n\n Self::set_volume(self.config.get());\n\n }\n\n }\n\n\n\n pub fn play_sound(&self, sound: Sound) {\n\n if let Some(chunk) = self.chunks.get(&sound) {\n\n let channel = mixer::Channel::all();\n\n match channel.play(chunk, 0) {\n\n _ => {}\n\n }\n\n }\n\n }\n\n\n", "file_path": "fortress/src/lib/audio/audio_player.rs", "rank": 79, "score": 51460.69849780525 }, { "content": " config: SimpleConfigManager<AudioConfig>,\n\n chunks: HashMap<Sound, Chunk>\n\n}\n\n\n\nimpl AudioPlayer {\n\n pub fn new(config_watcher: &mut ConfigWatcher) -> StatusOr<AudioPlayer> {\n\n mixer::open_audio(mixer::DEFAULT_FREQUENCY, mixer::DEFAULT_FORMAT, mixer::DEFAULT_CHANNELS, CHUNK_SIZE)\n\n .map_err(|e| format!(\"Error opening audio: {}\", e))?;\n\n\n\n let sound_chunks: StatusOr<HashMap<Sound, Chunk>> = Sound::all_sounds()\n\n .map(|sound| {\n\n let path_buf = sound.to_path_buf();\n\n Chunk::from_file(path_buf).map(|chunk| (sound, chunk))\n\n })\n\n .collect();\n\n\n\n let config = SimpleConfigManager::from_config_resource(config_watcher, \"audio.conf\")?;\n\n Self::set_volume(config.get());\n\n\n\n Ok(AudioPlayer {\n", "file_path": "fortress/src/lib/audio/audio_player.rs", "rank": 80, "score": 51460.38927273811 }, { "content": " fn set_volume(config: &AudioConfig) {\n\n let channel = mixer::Channel::all();\n\n channel.set_volume((config.sound_volume * mixer::MAX_VOLUME as f64) as i32);\n\n }\n\n}\n\n\n\nimpl Drop for AudioPlayer {\n\n fn drop(&mut self) {\n\n mixer::close_audio();\n\n }\n\n}", "file_path": "fortress/src/lib/audio/audio_player.rs", "rank": 81, "score": 51456.56536940626 }, { "content": "use nalgebra::Point2;\n\n\n\npub struct BloodParticleEvent {\n\n pub position: Point2<f64>,\n\n pub color: glm::Vec3,\n\n pub num_particles_to_generate: u32,\n\n}\n\n\n\npub struct HeroSwitchParticleEvent {\n\n pub position: Point2<f64>,\n\n}\n\n\n\npub enum ParticleEvent {\n\n Blood(BloodParticleEvent),\n\n HeroSwitch(HeroSwitchParticleEvent),\n\n}\n\n\n\nimpl ParticleEvent {\n\n pub fn blood(position: Point2<f64>, color: glm::Vec3, num_particles_to_generate: u32) -> Self {\n\n Self::Blood(BloodParticleEvent {\n", "file_path": "fortress/src/lib/particles/particle_event.rs", "rank": 82, "score": 51095.558918043476 }, { "content": "use crate::{\n\n app::StatusOr,\n\n dimensions::time::DeltaTime,\n\n file::{\n\n self,\n\n ConfigWatcher,\n\n SimpleConfigManager,\n\n },\n\n math::RandGen,\n\n particles::{\n\n BloodParticles,\n\n ParticleConfig,\n\n ParticleEvent,\n\n ParticleRenderView,\n\n particle_render_view::{\n\n BloomAttr,\n\n FloatAttr,\n\n Vec3Attr,\n\n },\n\n HeroSwitchParticles,\n", "file_path": "fortress/src/lib/particles/particle_system.rs", "rank": 83, "score": 51085.64115701753 }, { "content": " position,\n\n color,\n\n num_particles_to_generate,\n\n })\n\n }\n\n\n\n pub fn hero_switch(position: Point2<f64>) -> Self {\n\n Self::HeroSwitch(HeroSwitchParticleEvent {\n\n position\n\n })\n\n }\n\n}\n\n\n", "file_path": "fortress/src/lib/particles/particle_event.rs", "rank": 84, "score": 51082.67069019984 }, { "content": " self.attr_pos.data.clear();\n\n self.attr_color.data.clear();\n\n self.attr_bloom.data.clear();\n\n self.attr_alpha.data.clear();\n\n self.attr_size.data.clear();\n\n self.queued_events.clear();\n\n\n\n self.blood_particles.respawn();\n\n self.snow_particles.respawn();\n\n self.hero_switch_particles.respawn();\n\n }\n\n\n\n pub fn pre_update(&mut self, dt: DeltaTime) {\n\n self.config.update();\n\n let config = self.config.get();\n\n self.blood_particles.pre_update(&config.blood, dt);\n\n self.snow_particles.pre_update(&config.snow, dt);\n\n self.hero_switch_particles.pre_update(&config.hero_switch, dt);\n\n }\n\n\n", "file_path": "fortress/src/lib/particles/particle_system.rs", "rank": 85, "score": 51082.556951595594 }, { "content": "\n\n pub fn draw(&mut self, camera_geometry: &CameraGeometry) {\n\n let config = self.config.get();\n\n {\n\n let render_view = ParticleRenderView {\n\n attr_pos: &mut self.attr_pos.data,\n\n attr_color: &mut self.attr_color.data,\n\n attr_bloom: &mut self.attr_bloom.data,\n\n attr_alpha: &mut self.attr_alpha.data,\n\n attr_size: &mut self.attr_size.data,\n\n };\n\n self.blood_particles.queue_draw(&config.blood, render_view);\n\n }\n\n {\n\n let render_view = ParticleRenderView {\n\n attr_pos: &mut self.attr_pos.data,\n\n attr_color: &mut self.attr_color.data,\n\n attr_bloom: &mut self.attr_bloom.data,\n\n attr_alpha: &mut self.attr_alpha.data,\n\n attr_size: &mut self.attr_size.data,\n", "file_path": "fortress/src/lib/particles/particle_system.rs", "rank": 86, "score": 51080.0216461962 }, { "content": "use gl::{\n\n self,\n\n types::*\n\n};\n\nuse glm;\n\nuse nalgebra::{\n\n self,\n\n Point2,\n\n};\n\nuse std::{\n\n collections::HashMap,\n\n ffi::CString,\n\n};\n\n\n\n#[derive(Clone)]\n\npub struct LightDependentSpriteData {\n\n pub world_center_position: glm::Vec3,\n\n pub world_half_size: glm::Vec2,\n\n pub sprite_frame_id: SpriteSheetFrameId,\n\n pub frame: usize,\n\n pub unit_world_rotation: nalgebra::Vector2<f64>,\n\n pub reverse: Reverse,\n\n}\n\n\n\n#[derive(Copy, Clone, PartialEq, Eq, Hash)]\n", "file_path": "fortress/src/lib/render/renderer/light_dependent_sprite_renderer.rs", "rank": 87, "score": 51079.6584780239 }, { "content": " pub fn queue_event(&mut self, event: ParticleEvent) {\n\n self.queued_events.push(event);\n\n }\n\n\n\n pub fn post_update(&mut self, camera_stream_info: &CameraStreamInfo, rng: &mut RandGen) {\n\n let config = self.config.get();\n\n for event in self.queued_events.iter() {\n\n match event {\n\n ParticleEvent::Blood(ref event) => {\n\n self.blood_particles.add_event(&config.blood, event, rng);\n\n },\n\n ParticleEvent::HeroSwitch(ref event) => {\n\n self.hero_switch_particles.add_event(&config.hero_switch, event, rng);\n\n }\n\n }\n\n }\n\n\n\n self.queued_events.clear();\n\n self.snow_particles.post_update(&config.snow, camera_stream_info, rng);\n\n }\n", "file_path": "fortress/src/lib/particles/particle_system.rs", "rank": 88, "score": 51079.57569646223 }, { "content": " types::*\n\n};\n\nuse glm;\n\nuse nalgebra;\n\nuse std::{\n\n collections::HashMap,\n\n ffi::CString\n\n};\n\n\n\n#[derive(Clone)]\n\npub struct FullyIlluminatedSpriteData {\n\n pub world_center_position: glm::Vec3,\n\n pub world_half_size: glm::Vec2,\n\n pub sprite_frame_id: SpriteSheetFrameId,\n\n pub frame: usize,\n\n pub unit_world_rotation: nalgebra::Vector2<f64>,\n\n pub reverse: Reverse,\n\n pub bloom_intensity: f32,\n\n}\n\n\n\n#[derive(Copy, Clone, PartialEq, Eq, Hash)]\n", "file_path": "fortress/src/lib/render/renderer/fully_illuminated_sprite_renderer.rs", "rank": 89, "score": 51076.54313504937 }, { "content": " attribute_program: AttributeProgram,\n\n attr_pos: Attribute<Vec3Attr>,\n\n attr_color: Attribute<Vec3Attr>,\n\n attr_bloom: Attribute<BloomAttr>,\n\n attr_alpha: Attribute<FloatAttr>,\n\n attr_size: Attribute<FloatAttr>,\n\n queued_events: Vec<ParticleEvent>,\n\n blood_particles: BloodParticles,\n\n snow_particles: SnowParticles,\n\n hero_switch_particles: HeroSwitchParticles,\n\n}\n\n\n\nimpl ParticleSystem {\n\n pub fn new(config_watcher: &mut ConfigWatcher) -> StatusOr<ParticleSystem> {\n\n let config = SimpleConfigManager::<ParticleConfig>::from_config_resource(config_watcher, \"particle.conf\")?;\n\n let vertex = file::util::resource_path(\"shaders\", \"particle_vert.glsl\");\n\n let geometry = file::util::resource_path(\"shaders\", \"particle_geo.glsl\");\n\n let fragment = file::util::resource_path(\"shaders\", \"particle_frag.glsl\");\n\n let shader_program = ShaderProgram::from_long_pipeline(&vertex, &geometry, &fragment)?;\n\n\n", "file_path": "fortress/src/lib/particles/particle_system.rs", "rank": 90, "score": 51076.258375170735 }, { "content": " };\n\n self.snow_particles.queue_draw(&config.snow, render_view);\n\n }\n\n {\n\n let render_view = ParticleRenderView {\n\n attr_pos: &mut self.attr_pos.data,\n\n attr_color: &mut self.attr_color.data,\n\n attr_bloom: &mut self.attr_bloom.data,\n\n attr_alpha: &mut self.attr_alpha.data,\n\n attr_size: &mut self.attr_size.data,\n\n };\n\n self.hero_switch_particles.queue_draw(&config.hero_switch, render_view);\n\n }\n\n\n\n self.shader_program.activate();\n\n self.attribute_program.activate();\n\n\n\n self.shader_program.set_mat4(UniformKey::ProjectionView, &camera_geometry.projection_view);\n\n self.shader_program.set_vec3(UniformKey::CameraRight, &camera_geometry.isometric_right);\n\n self.shader_program.set_vec3(UniformKey::CameraUp, &camera_geometry.isometric_up);\n", "file_path": "fortress/src/lib/particles/particle_system.rs", "rank": 91, "score": 51076.19646147142 }, { "content": " Vec::with_capacity(config.initial_particle_events_limit_guess))\n\n };\n\n\n\n Ok(ParticleSystem {\n\n config,\n\n shader_program,\n\n attribute_program,\n\n attr_pos,\n\n attr_color,\n\n attr_bloom,\n\n attr_alpha,\n\n attr_size,\n\n queued_events,\n\n blood_particles,\n\n snow_particles,\n\n hero_switch_particles,\n\n })\n\n }\n\n\n\n pub fn respawn(&mut self) {\n", "file_path": "fortress/src/lib/particles/particle_system.rs", "rank": 92, "score": 51075.82002298513 }, { "content": " let mut attribute_program_builder = AttributeProgram::builder();\n\n let mut attr_pos = attribute_program_builder.add_attribute();\n\n let mut attr_color = attribute_program_builder.add_attribute();\n\n let mut attr_bloom = attribute_program_builder.add_attribute();\n\n let mut attr_alpha = attribute_program_builder.add_attribute();\n\n let mut attr_size = attribute_program_builder.add_attribute();\n\n let attribute_program = attribute_program_builder.build();\n\n\n\n let (blood_particles, snow_particles, hero_switch_particles, queued_events) = {\n\n let config = config.get();\n\n let total_particle_limit = config.blood.particle_limit + config.snow.particle_limit + config.hero_switch.particle_limit;\n\n attr_pos.data.reserve(total_particle_limit);\n\n attr_color.data.reserve(total_particle_limit);\n\n attr_bloom.data.reserve(total_particle_limit);\n\n attr_alpha.data.reserve(total_particle_limit);\n\n attr_size.data.reserve(total_particle_limit);\n\n\n\n (BloodParticles::new(&config.blood),\n\n SnowParticles::new(&config.snow),\n\n HeroSwitchParticles::new(&config.hero_switch),\n", "file_path": "fortress/src/lib/particles/particle_system.rs", "rank": 93, "score": 51074.693694985675 }, { "content": " SnowParticles,\n\n },\n\n render::{\n\n Attribute,\n\n AttributeProgram,\n\n CameraGeometry,\n\n CameraStreamInfo,\n\n ShaderProgram,\n\n ShaderUniformKey,\n\n },\n\n};\n\nuse gl::types::GLsizei;\n\nuse std::ffi::CString;\n\n\n\n#[derive(Copy, Clone, PartialEq, Eq, Hash)]\n", "file_path": "fortress/src/lib/particles/particle_system.rs", "rank": 94, "score": 51071.71724821736 }, { "content": " let attribute_program = attribute_program_builder.build();\n\n\n\n Ok(LightDependentSpriteRenderer {\n\n shader_program,\n\n attribute_program,\n\n attr_pos,\n\n attr_size,\n\n attr_texel,\n\n attr_rot,\n\n per_pack_attrs: HashMap::new(),\n\n camera_stream_info: None,\n\n })\n\n }\n\n\n\n pub fn set_camera_stream_info(&mut self, camera_stream_info: CameraStreamInfo) {\n\n self.camera_stream_info = Some(camera_stream_info);\n\n }\n\n\n\n pub fn queue(&mut self, datum: LightDependentSpriteData) {\n\n if let Some(ref camera_stream_info) = self.camera_stream_info {\n", "file_path": "fortress/src/lib/render/renderer/light_dependent_sprite_renderer.rs", "rank": 95, "score": 51065.98327955043 }, { "content": "\n\npub struct FullyIlluminatedSpriteRenderer {\n\n shader_program: ShaderProgram<UniformKey>,\n\n attribute_program: AttributeProgram,\n\n attr_pos: Attribute<SpritePositionAttr>,\n\n attr_size: Attribute<SpriteSizeAttr>,\n\n attr_texel: Attribute<Texel>,\n\n attr_rot: Attribute<RotationAttr>,\n\n attr_bloom_intensity: Attribute<BloomIntensityAttr>,\n\n per_pack_attrs: HashMap<NamedSpriteSheet, Vec<FullyIlluminatedSpriteData>>,\n\n}\n\n\n\nimpl FullyIlluminatedSpriteRenderer {\n\n pub fn new() -> StatusOr<FullyIlluminatedSpriteRenderer> {\n\n let vertex = file::util::resource_path(\"shaders\", \"full_light_sprite_vert.glsl\");\n\n let geometry = file::util::resource_path(\"shaders\", \"full_light_sprite_geo.glsl\");\n\n let fragment = file::util::resource_path(\"shaders\", \"full_light_sprite_frag.glsl\");\n\n let shader_program = ShaderProgram::from_long_pipeline(&vertex, &geometry, &fragment)?;\n\n\n\n let mut attribute_program_builder = AttributeProgram::builder();\n", "file_path": "fortress/src/lib/render/renderer/fully_illuminated_sprite_renderer.rs", "rank": 96, "score": 51065.80479957095 }, { "content": " let attr_pos = attribute_program_builder.add_attribute();\n\n let attr_size = attribute_program_builder.add_attribute();\n\n let attr_texel = attribute_program_builder.add_attribute();\n\n let attr_rot = attribute_program_builder.add_attribute();\n\n let attr_bloom_intensity = attribute_program_builder.add_attribute();\n\n let attribute_program = attribute_program_builder.build();\n\n\n\n Ok(FullyIlluminatedSpriteRenderer {\n\n shader_program,\n\n attribute_program,\n\n attr_pos,\n\n attr_size,\n\n attr_texel,\n\n attr_rot,\n\n attr_bloom_intensity,\n\n per_pack_attrs: HashMap::new(),\n\n })\n\n }\n\n\n\n pub fn queue(&mut self, data: impl IntoIterator<Item = FullyIlluminatedSpriteData>) {\n", "file_path": "fortress/src/lib/render/renderer/fully_illuminated_sprite_renderer.rs", "rank": 97, "score": 51065.53332496249 }, { "content": "use crate::{\n\n app::StatusOr,\n\n dimensions::Reverse,\n\n file,\n\n render::{\n\n attribute,\n\n Attribute,\n\n AttributeProgram,\n\n CameraGeometry,\n\n NamedSpriteSheet,\n\n SpriteSheetFrameId,\n\n SpriteSheetTextureManager,\n\n ShaderProgram,\n\n ShaderUniformKey,\n\n Texel,\n\n TextureUnit,\n\n }\n\n};\n\nuse gl::{\n\n self,\n", "file_path": "fortress/src/lib/render/renderer/fully_illuminated_sprite_renderer.rs", "rank": 98, "score": 51064.88330669005 }, { "content": " let world_position = Point2::new(datum.world_center_position.x as f64, -datum.world_center_position.z as f64);\n\n if !camera_stream_info.is_point_inside(world_position) {\n\n // Ignore points not inside camera stream inner bounds.\n\n return;\n\n }\n\n }\n\n\n\n self.per_pack_attrs\n\n .entry(datum.sprite_frame_id.sprite_sheet())\n\n .or_insert(Vec::new())\n\n .push(datum);\n\n }\n\n\n\n pub fn draw(&mut self, lights: &PointLights, textures: &SpriteSheetTextureManager, camera_geometry: &CameraGeometry) {\n\n self.shader_program.activate();\n\n self.attribute_program.activate();\n\n\n\n self.shader_program.set_mat4(UniformKey::ProjectionView, &camera_geometry.projection_view);\n\n self.shader_program.set_mat4(UniformKey::PositionIndependentView, &camera_geometry.isometric_view);\n\n self.shader_program.set_vec3(UniformKey::CameraRight, &camera_geometry.isometric_right);\n", "file_path": "fortress/src/lib/render/renderer/light_dependent_sprite_renderer.rs", "rank": 99, "score": 51064.45932798878 } ]
Rust
examples/box_game/box_game_p2p.rs
johanhelsing/ggrs
6d1b7d6112692619a8638646cf5229a085bc3986
extern crate freetype as ft; use ggrs::{GGRSEvent, PlayerType, SessionState}; use glutin_window::GlutinWindow as Window; use opengl_graphics::{GlGraphics, OpenGL}; use piston::event_loop::{EventSettings, Events}; use piston::input::{RenderEvent, UpdateEvent}; use piston::window::WindowSettings; use piston::{Button, EventLoop, IdleEvent, Key, PressEvent, ReleaseEvent}; use std::net::SocketAddr; use structopt::StructOpt; const FPS: u64 = 60; const INPUT_SIZE: usize = std::mem::size_of::<u8>(); const WINDOW_HEIGHT: u32 = 800; const WINDOW_WIDTH: u32 = 600; mod box_game; #[derive(StructOpt)] struct Opt { #[structopt(short, long)] local_port: u16, #[structopt(short, long)] players: Vec<String>, #[structopt(short, long)] spectators: Vec<SocketAddr>, } fn main() -> Result<(), Box<dyn std::error::Error>> { let opt = Opt::from_args(); let mut local_handle = 0; let num_players = opt.players.len(); assert!(num_players > 0); let mut sess = ggrs::start_p2p_session(num_players as u32, INPUT_SIZE, opt.local_port)?; sess.set_sparse_saving(true)?; for (i, player_addr) in opt.players.iter().enumerate() { if player_addr == "localhost" { sess.add_player(PlayerType::Local, i)?; local_handle = i; } else { let remote_addr: SocketAddr = player_addr.parse()?; sess.add_player(PlayerType::Remote(remote_addr), i)?; } } for (i, spec_addr) in opt.spectators.iter().enumerate() { sess.add_player(PlayerType::Spectator(*spec_addr), num_players + i)?; } sess.set_frame_delay(4, local_handle)?; sess.set_fps(FPS as u32)?; sess.start_session()?; let opengl = OpenGL::V3_2; let mut window: Window = WindowSettings::new("Box Game", [WINDOW_WIDTH, WINDOW_HEIGHT]) .graphics_api(opengl) .exit_on_esc(true) .build() .unwrap(); let mut game = box_game::BoxGame::new(num_players); let mut gl = GlGraphics::new(opengl); let mut event_settings = EventSettings::new(); event_settings.set_ups(FPS); event_settings.set_max_fps(FPS); let mut events = Events::new(event_settings); let mut frames_to_skip = 0; while let Some(e) = events.next(&mut window) { if let Some(args) = e.render_args() { game.render(&mut gl, &args); } if let Some(_) = e.update_args() { if frames_to_skip > 0 { frames_to_skip -= 1; println!("Skipping a frame: WaitRecommendation"); continue; } if sess.current_state() == SessionState::Running { let local_input = game.local_input(0); match sess.advance_frame(local_handle, &local_input) { Ok(requests) => game.handle_requests(requests), Err(ggrs::GGRSError::PredictionThreshold) => { } Err(e) => return Err(Box::new(e)), } if game.current_frame() % 120 == 0 { for i in 0..num_players { if let Ok(stats) = sess.network_stats(i) { println!("NetworkStats to player {}: {:?}", i, stats); } } } } } if let Some(_args) = e.idle_args() { sess.poll_remote_clients(); for event in sess.events() { if let GGRSEvent::WaitRecommendation { skip_frames } = event { frames_to_skip += skip_frames } println!("Event: {:?}", event); } } if let Some(Button::Keyboard(key)) = e.press_args() { match key { Key::W => game.key_states[0] = true, Key::A => game.key_states[1] = true, Key::S => game.key_states[2] = true, Key::D => game.key_states[3] = true, _ => (), } } if let Some(Button::Keyboard(key)) = e.release_args() { match key { Key::W => game.key_states[0] = false, Key::A => game.key_states[1] = false, Key::S => game.key_states[2] = false, Key::D => game.key_states[3] = false, _ => (), } } } Ok(()) }
extern crate freetype as ft; use ggrs::{GGRSEvent, PlayerType, SessionState}; use glutin_window::GlutinWindow as Window; use opengl_graphics::{GlGraphics, OpenGL}; use piston::event_loop::{EventSettings, Events}; use piston::input::{RenderEvent, UpdateEvent}; use piston::window::WindowSettings; use piston::{Button, EventLoop, IdleEvent, Key, PressEvent, ReleaseEvent}; use std::net::SocketAddr; use structopt::StructOpt; const FPS: u64 = 60; const INPUT_SIZE: usize = std::mem::size_of::<u8>(); const WINDOW_HEIGHT: u32 = 800; const WINDOW_WIDTH: u32 = 600; mod box_game; #[derive(StructOpt)] struct Opt { #[structopt(short, long)] local_port: u16, #[structopt(short, long)] players: Vec<String>, #[structopt(short, long)] spectators: Vec<SocketAddr>, } fn main() -> Result<(), Box<dyn std::error::Error>> { let opt = Opt::from_args(); let mut local_handle = 0; let num_players = opt.players.len(); assert!(num_players > 0); let mut sess = ggrs::start_p2p_session(num_players as u32, INPUT_SIZE, opt.local_port)?; sess.set_sparse_saving(true)?; for (i, player_addr) in opt.players.iter().enumerate() { if player_addr == "localhost" { sess.add_player(PlayerType::Local, i)?; local_handle = i; } else { let remote_addr: SocketAddr = player_addr.parse()?; sess.add_player(PlayerType::Remote(remote_addr), i)?; } } for (i, spec_addr) in opt.spectators.iter().enumerate() { sess.add_player(PlayerType::Spectator(*spec_addr), num_players + i)?; } sess.set_frame_delay(4, local_handle)?; sess.set_fps(FPS as u32)?; sess.start_session()?; let opengl = OpenGL::V3_2; let mut window: Window = WindowSettings::new("Box Game", [WINDOW_WIDTH, WINDOW_HEIGHT]) .graphics_api(opengl) .exit_on_esc(true) .build() .unwrap(); let mut game = box_game::BoxGame::new(num_players); let mut gl = GlGraphics::new(opengl);
let mut event_settings = EventSettings::new(); event_settings.set_ups(FPS); event_settings.set_max_fps(FPS); let mut events = Events::new(event_settings); let mut frames_to_skip = 0; while let Some(e) = events.next(&mut window) { if let Some(args) = e.render_args() { game.render(&mut gl, &args); } if let Some(_) = e.update_args() { if frames_to_skip > 0 { frames_to_skip -= 1; println!("Skipping a frame: WaitRecommendation"); continue; } if sess.current_state() == SessionState::Running { let local_input = game.local_input(0); match sess.advance_frame(local_handle, &local_input) { Ok(requests) => game.handle_requests(requests), Err(ggrs::GGRSError::PredictionThreshold) => { } Err(e) => return Err(Box::new(e)), } if game.current_frame() % 120 == 0 { for i in 0..num_players { if let Ok(stats) = sess.network_stats(i) { println!("NetworkStats to player {}: {:?}", i, stats); } } } } } if let Some(_args) = e.idle_args() { sess.poll_remote_clients(); for event in sess.events() { if let GGRSEvent::WaitRecommendation { skip_frames } = event { frames_to_skip += skip_frames } println!("Event: {:?}", event); } } if let Some(Button::Keyboard(key)) = e.press_args() { match key { Key::W => game.key_states[0] = true, Key::A => game.key_states[1] = true, Key::S => game.key_states[2] = true, Key::D => game.key_states[3] = true, _ => (), } } if let Some(Button::Keyboard(key)) = e.release_args() { match key { Key::W => game.key_states[0] = false, Key::A => game.key_states[1] = false, Key::S => game.key_states[2] = false, Key::D => game.key_states[3] = false, _ => (), } } } Ok(()) }
function_block-function_prefix_line
[ { "content": "#[derive(StructOpt)]\n\nstruct Opt {\n\n #[structopt(short, long)]\n\n local_port: u16,\n\n #[structopt(short, long)]\n\n num_players: usize,\n\n #[structopt(short, long)]\n\n host: SocketAddr,\n\n}\n\n\n", "file_path": "examples/box_game/box_game_spectator.rs", "rank": 0, "score": 155777.40958452603 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n // read cmd line arguments\n\n let opt = Opt::from_args();\n\n\n\n // create a GGRS session for a spectator\n\n let mut sess = ggrs::start_p2p_spectator_session(\n\n opt.num_players as u32,\n\n INPUT_SIZE,\n\n opt.local_port,\n\n opt.host,\n\n )?;\n\n\n\n // change catch-up parameters, if desired\n\n sess.set_max_frames_behind(5)?; // when the spectator is more than this amount of frames behind, it will catch up\n\n sess.set_catchup_speed(2)?; // set this to 1 if you don't want any catch-ups\n\n\n\n // start the GGRS session\n\n sess.start_session()?;\n\n\n\n // Change this to OpenGL::V2_1 if not working\n", "file_path": "examples/box_game/box_game_spectator.rs", "rank": 1, "score": 145593.90640010286 }, { "content": "#[derive(StructOpt)]\n\nstruct Opt {\n\n #[structopt(short, long)]\n\n num_players: usize,\n\n #[structopt(short, long)]\n\n check_distance: u32,\n\n}\n\n\n", "file_path": "examples/box_game/box_game_synctest.rs", "rank": 2, "score": 129386.85011903301 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n // read cmd line arguments\n\n let opt = Opt::from_args();\n\n\n\n // create a GGRS session\n\n let mut sess =\n\n ggrs::start_synctest_session(opt.num_players as u32, INPUT_SIZE, opt.check_distance)?;\n\n\n\n // set input delay for any player you want\n\n for i in 0..opt.num_players {\n\n sess.set_frame_delay(2, i)?;\n\n }\n\n\n\n // Change this to OpenGL::V2_1 if not working\n\n let opengl = OpenGL::V3_2;\n\n\n\n // Create a Glutin window\n\n let mut window: Window =\n\n WindowSettings::new(\"Box Game Synctest\", [WINDOW_WIDTH, WINDOW_HEIGHT])\n\n .graphics_api(opengl)\n", "file_path": "examples/box_game/box_game_synctest.rs", "rank": 5, "score": 124552.30220864451 }, { "content": "fn glyphs(face: &mut ft::Face, text: &str) -> Vec<(Texture, [f64; 2])> {\n\n let mut x = 10;\n\n let mut y = 0;\n\n let mut res = vec![];\n\n for ch in text.chars() {\n\n face.load_char(ch as usize, ft::face::LoadFlag::RENDER)\n\n .unwrap();\n\n let g = face.glyph();\n\n\n\n let bitmap = g.bitmap();\n\n let texture = Texture::from_memory_alpha(\n\n bitmap.buffer(),\n\n bitmap.width() as u32,\n\n bitmap.rows() as u32,\n\n &TextureSettings::new(),\n\n )\n\n .unwrap();\n\n res.push((\n\n texture,\n\n [(x + g.bitmap_left()) as f64, (y - g.bitmap_top()) as f64],\n\n ));\n\n\n\n x += (g.advance().x >> 6) as i32;\n\n y += (g.advance().y >> 6) as i32;\n\n }\n\n res\n\n}\n\n\n", "file_path": "examples/box_game/box_game.rs", "rank": 6, "score": 112045.46801294919 }, { "content": "fn render_text<G, T>(glyphs: &[(T, [f64; 2])], c: &Context, gl: &mut G)\n\nwhere\n\n G: Graphics<Texture = T>,\n\n T: ImageSize,\n\n{\n\n for &(ref texture, [x, y]) in glyphs {\n\n use graphics::*;\n\n\n\n Image::new_color(color::WHITE).draw(texture, &c.draw_state, c.transform.trans(x, y), gl);\n\n }\n\n}\n\n\n\n// BoxGame will handle rendering, gamestate, inputs and GGRSRequests\n\npub struct BoxGame {\n\n num_players: usize,\n\n game_state: BoxGameState,\n\n pub key_states: [bool; 8],\n\n font: PathBuf,\n\n freetype: Library,\n\n last_checksum: (Frame, u64),\n", "file_path": "examples/box_game/box_game.rs", "rank": 7, "score": 108247.78499270452 }, { "content": "/// computes the fletcher16 checksum, copied from wikipedia: <https://en.wikipedia.org/wiki/Fletcher%27s_checksum>\n\nfn fletcher16(data: &[u8]) -> u16 {\n\n let mut sum1: u16 = 0;\n\n let mut sum2: u16 = 0;\n\n\n\n for index in 0..data.len() {\n\n sum1 = (sum1 + data[index] as u16) % 255;\n\n sum2 = (sum2 + sum1) % 255;\n\n }\n\n\n\n (sum2 << 8) | sum1\n\n}\n\n\n", "file_path": "examples/box_game/box_game.rs", "rank": 8, "score": 107932.10899629479 }, { "content": "fn glyphs(face: &mut ft::Face, text: &str) -> Vec<(Texture, [f64; 2])> {\n\n let mut x = 10;\n\n let mut y = 0;\n\n let mut res = vec![];\n\n for ch in text.chars() {\n\n face.load_char(ch as usize, ft::face::LoadFlag::RENDER)\n\n .unwrap();\n\n let g = face.glyph();\n\n\n\n let bitmap = g.bitmap();\n\n let texture = Texture::from_memory_alpha(\n\n bitmap.buffer(),\n\n bitmap.width() as u32,\n\n bitmap.rows() as u32,\n\n &TextureSettings::new(),\n\n )\n\n .unwrap();\n\n res.push((\n\n texture,\n\n [(x + g.bitmap_left()) as f64, (y - g.bitmap_top()) as f64],\n\n ));\n\n\n\n x += (g.advance().x >> 6) as i32;\n\n y += (g.advance().y >> 6) as i32;\n\n }\n\n res\n\n}\n\n\n", "file_path": "examples/rapier/rapier_game.rs", "rank": 9, "score": 105453.48260812738 }, { "content": "/// computes the fletcher16 checksum, copied from wikipedia: <https://en.wikipedia.org/wiki/Fletcher%27s_checksum>\n\nfn fletcher16(data: &[u8]) -> u16 {\n\n let mut sum1: u16 = 0;\n\n let mut sum2: u16 = 0;\n\n\n\n for index in 0..data.len() {\n\n sum1 = (sum1 + data[index] as u16) % 255;\n\n sum2 = (sum2 + sum1) % 255;\n\n }\n\n\n\n (sum2 << 8) | sum1\n\n}\n\n\n", "file_path": "examples/rapier/rapier_game.rs", "rank": 10, "score": 101602.81147550611 }, { "content": "fn render_text<G, T>(glyphs: &[(T, [f64; 2])], c: &Context, gl: &mut G)\n\nwhere\n\n G: Graphics<Texture = T>,\n\n T: ImageSize,\n\n{\n\n for &(ref texture, [x, y]) in glyphs {\n\n use graphics::*;\n\n\n\n Image::new_color(color::WHITE).draw(texture, &c.draw_state, c.transform.trans(x, y), gl);\n\n }\n\n}\n\n\n\n// RapierGame will handle rendering, gamestate, inputs and GGRSRequests\n\npub struct RapierGame {\n\n num_players: usize,\n\n state: RapierState,\n\n font: PathBuf,\n\n freetype: Library,\n\n last_checksum: (Frame, u64),\n\n periodic_checksum: (Frame, u64),\n", "file_path": "examples/rapier/rapier_game.rs", "rank": 11, "score": 101554.89539455649 }, { "content": "#[derive(StructOpt)]\n\nstruct Opt {\n\n #[structopt(short, long)]\n\n num_bodies: usize,\n\n #[structopt(short, long)]\n\n check_distance: u32,\n\n}\n\n\n", "file_path": "examples/rapier/rapier_synctest.rs", "rank": 12, "score": 99334.96290072815 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let num_players = 1;\n\n // read cmd line arguments\n\n let opt = Opt::from_args();\n\n\n\n // create a GGRS session\n\n let mut sess =\n\n ggrs::start_synctest_session(num_players as u32, INPUT_SIZE, opt.check_distance)?;\n\n\n\n // set input delay for any player you want\n\n for i in 0..num_players {\n\n sess.set_frame_delay(2, i)?;\n\n }\n\n\n\n // Change this to OpenGL::V2_1 if not working\n\n let opengl = OpenGL::V3_2;\n\n\n\n // Create a Glutin window\n\n let mut window: Window = WindowSettings::new(\"Rapier Synctest\", [WINDOW_WIDTH, WINDOW_HEIGHT])\n\n .graphics_api(opengl)\n", "file_path": "examples/rapier/rapier_synctest.rs", "rank": 13, "score": 99305.88927913297 }, { "content": "#[derive(Serialize, Deserialize)]\n\nstruct BoxGameState {\n\n pub frame: i32,\n\n pub num_players: usize,\n\n pub positions: Vec<(f64, f64)>,\n\n pub velocities: Vec<(f64, f64)>,\n\n pub rotations: Vec<f64>,\n\n}\n\n\n\nimpl BoxGameState {\n\n pub fn new(num_players: usize) -> Self {\n\n let mut positions = Vec::new();\n\n let mut velocities = Vec::new();\n\n let mut rotations = Vec::new();\n\n\n\n let r = WINDOW_WIDTH as f64 / 4.0;\n\n\n\n for i in 0..num_players as i32 {\n\n let rot = i as f64 / num_players as f64 * 2.0 * std::f64::consts::PI;\n\n let x = WINDOW_WIDTH as f64 / 2.0 + r * rot.cos();\n\n let y = WINDOW_HEIGHT as f64 / 2.0 + r * rot.sin();\n", "file_path": "examples/box_game/box_game.rs", "rank": 14, "score": 84807.56016748221 }, { "content": "/// Computes the fletcher16 checksum, copied from wikipedia: <https://en.wikipedia.org/wiki/Fletcher%27s_checksum>\n\nfn fletcher16(data: &[u8]) -> u16 {\n\n let mut sum1: u16 = 0;\n\n let mut sum2: u16 = 0;\n\n\n\n for index in 0..data.len() {\n\n sum1 = (sum1 + data[index] as u16) % 255;\n\n sum2 = (sum2 + sum1) % 255;\n\n }\n\n\n\n (sum2 << 8) | sum1\n\n}\n\n\n\n/// Represents a serialized game state of your game for a single frame. The buffer `buffer` holds your state, `frame` indicates the associated frame number\n\n/// and `checksum` can additionally be provided for use during a `SyncTestSession`. You are expected to return this during `save_game_state()` and use them during `load_game_state()`.\n\n#[derive(Debug, Clone)]\n\npub struct GameState {\n\n /// The frame to which this info belongs to.\n\n pub frame: Frame,\n\n /// The serialized gamestate in bytes.\n\n pub buffer: Option<Vec<u8>>,\n", "file_path": "src/frame_info.rs", "rank": 15, "score": 78978.11562921335 }, { "content": "#[derive(Serialize, Deserialize)]\n\nstruct RapierState {\n\n pub frame: i32,\n\n pub num_players: usize,\n\n\n\n // rapier stuff\n\n bodies: RigidBodySet,\n\n colliders: ColliderSet,\n\n joint_set: JointSet,\n\n broad_phase: BroadPhase,\n\n narrow_phase: NarrowPhase,\n\n island_manager: IslandManager,\n\n\n\n cube_handles: Vec<RigidBodyHandle>,\n\n sphere_handles: Vec<RigidBodyHandle>,\n\n}\n\n\n\nimpl RapierState {\n\n pub fn new(num_players: usize, num_bodies: usize) -> Self {\n\n /*\n\n * World\n", "file_path": "examples/rapier/rapier_game.rs", "rank": 16, "score": 75191.98532758183 }, { "content": "#[test]\n\n#[serial]\n\nfn test_disconnect_player() {\n\n let mut sess = ggrs::start_p2p_session(2, stubs::INPUT_SIZE, 7777).unwrap();\n\n let addr = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), 8080);\n\n assert!(sess.add_player(ggrs::PlayerType::Local, 0).is_ok());\n\n assert!(sess.add_player(ggrs::PlayerType::Remote(addr), 1).is_ok());\n\n assert!(sess.start_session().is_ok());\n\n assert!(sess.disconnect_player(0).is_err()); // for now, local players cannot be disconnected\n\n assert!(sess.disconnect_player(1).is_ok());\n\n assert!(sess.disconnect_player(1).is_err()); // already disconnected\n\n}\n\n\n", "file_path": "tests/test_p2p_session.rs", "rank": 17, "score": 69211.16527678615 }, { "content": "#[test]\n\n#[serial]\n\nfn test_add_player() {\n\n let mut sess = ggrs::start_p2p_session(2, stubs::INPUT_SIZE, 7777).unwrap();\n\n let addr = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), 8080);\n\n assert!(sess.add_player(PlayerType::Local, 0).is_ok());\n\n assert!(sess.add_player(PlayerType::Remote(addr), 1).is_ok());\n\n assert!(sess.add_player(PlayerType::Remote(addr), 1).is_err()); // handle already registered\n\n assert!(sess.add_player(PlayerType::Remote(addr), 2).is_err()); // invalid handle\n\n assert!(sess.add_player(PlayerType::Spectator(addr), 2).is_ok());\n\n assert!(sess.add_player(PlayerType::Spectator(addr), 2).is_err()); // specatator handle already registered\n\n assert!(sess.start_session().is_ok());\n\n assert!(sess.add_player(ggrs::PlayerType::Remote(addr), 1).is_err()); // cannot add player after starting\n\n}\n\n\n", "file_path": "tests/test_p2p_session.rs", "rank": 18, "score": 69211.16527678615 }, { "content": "/// Used to create a new `P2PSpectatorSession` for a spectator.\n\n/// The session will receive inputs from all players from the given host directly.\n\n/// # Example\n\n///\n\n/// ```\n\n/// # use std::net::SocketAddr;\n\n/// # fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n/// let local_port: u16 = 7777;\n\n/// let num_players : u32 = 2;\n\n/// let input_size : usize = std::mem::size_of::<u32>();\n\n/// let host_addr: SocketAddr = \"127.0.0.1:8888\".parse()?;\n\n/// let mut sess = ggrs::start_p2p_spectator_session(num_players, input_size, local_port, host_addr)?;\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\n///\n\n/// # Errors\n\n/// - Will return a `InvalidRequest` if the number of players is higher than the allowed maximum (see `MAX_PLAYERS`).\n\n/// - Will return a `InvalidRequest` if `input_size` is higher than the allowed maximum (see `MAX_INPUT_BYTES`).\n\n/// - Will return `SocketCreationFailed` if the UPD socket could not be created.\n\npub fn start_p2p_spectator_session(\n\n num_players: u32,\n\n input_size: usize,\n\n local_port: u16,\n\n host_addr: SocketAddr,\n\n) -> Result<P2PSpectatorSession, GGRSError> {\n\n if num_players > MAX_PLAYERS {\n\n return Err(GGRSError::InvalidRequest {\n\n info: \"Too many players.\".to_owned(),\n\n });\n\n }\n\n if input_size > MAX_INPUT_BYTES {\n\n return Err(GGRSError::InvalidRequest {\n\n info: \"Input size too big.\".to_owned(),\n\n });\n\n }\n\n P2PSpectatorSession::new(num_players, input_size, local_port, host_addr)\n\n .map_err(|_| GGRSError::SocketCreationFailed)\n\n}\n", "file_path": "src/lib.rs", "rank": 19, "score": 66818.83271968122 }, { "content": "#[test]\n\n#[serial]\n\nfn test_start_session() {\n\n let host_addr = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), 7777);\n\n let mut spec_sess =\n\n ggrs::start_p2p_spectator_session(1, stubs::INPUT_SIZE, 9999, host_addr).unwrap();\n\n assert!(spec_sess.start_session().is_ok());\n\n assert!(spec_sess.current_state() == SessionState::Synchronizing);\n\n}\n\n\n", "file_path": "tests/test_p2p_spectator_session.rs", "rank": 20, "score": 66619.534105492 }, { "content": "#[test]\n\n#[serial]\n\nfn test_synchronize_with_host() {\n\n let host_addr = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), 7777);\n\n let spec_addr = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), 8888);\n\n\n\n let mut host_sess = ggrs::start_p2p_session(1, stubs::INPUT_SIZE, 7777).unwrap();\n\n let mut spec_sess =\n\n ggrs::start_p2p_spectator_session(1, stubs::INPUT_SIZE, 8888, host_addr).unwrap();\n\n\n\n host_sess.add_player(PlayerType::Local, 0).unwrap();\n\n host_sess\n\n .add_player(PlayerType::Spectator(spec_addr), 2)\n\n .unwrap();\n\n\n\n host_sess.start_session().unwrap();\n\n spec_sess.start_session().unwrap();\n\n\n\n assert_eq!(spec_sess.current_state(), SessionState::Synchronizing);\n\n assert_eq!(host_sess.current_state(), SessionState::Synchronizing);\n\n\n\n for _ in 0..10 {\n\n spec_sess.poll_remote_clients();\n\n host_sess.poll_remote_clients();\n\n }\n\n\n\n assert_eq!(spec_sess.current_state(), SessionState::Running);\n\n assert_eq!(host_sess.current_state(), SessionState::Running);\n\n}\n", "file_path": "tests/test_p2p_spectator_session.rs", "rank": 21, "score": 66619.534105492 }, { "content": "#[test]\n\n#[serial]\n\nfn test_create_session() {\n\n let host_addr = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), 7777);\n\n assert!(ggrs::start_p2p_spectator_session(1, stubs::INPUT_SIZE, 9999, host_addr).is_ok());\n\n}\n\n\n", "file_path": "tests/test_p2p_spectator_session.rs", "rank": 22, "score": 66619.534105492 }, { "content": "extern crate freetype as ft;\n\n\n\nuse ggrs::{GGRSError, GGRSEvent, SessionState};\n\nuse glutin_window::GlutinWindow as Window;\n\nuse opengl_graphics::{GlGraphics, OpenGL};\n\nuse piston::event_loop::{EventSettings, Events};\n\nuse piston::input::{RenderEvent, UpdateEvent};\n\nuse piston::window::WindowSettings;\n\nuse piston::{EventLoop, IdleEvent};\n\nuse std::net::SocketAddr;\n\nuse structopt::StructOpt;\n\n\n\nconst FPS: u64 = 60;\n\nconst INPUT_SIZE: usize = std::mem::size_of::<u8>();\n\n\n\nconst WINDOW_HEIGHT: u32 = 800;\n\nconst WINDOW_WIDTH: u32 = 600;\n\n\n\nmod box_game;\n\n\n\n#[derive(StructOpt)]\n", "file_path": "examples/box_game/box_game_spectator.rs", "rank": 23, "score": 63650.196358937494 }, { "content": " let opengl = OpenGL::V3_2;\n\n\n\n // Create a Glutin window\n\n let mut window: Window =\n\n WindowSettings::new(\"Box Game Spectator\", [WINDOW_WIDTH, WINDOW_HEIGHT])\n\n .graphics_api(opengl)\n\n .exit_on_esc(true)\n\n .build()\n\n .unwrap();\n\n\n\n // Create a new box game\n\n let mut game = box_game::BoxGame::new(opt.num_players);\n\n let mut gl = GlGraphics::new(opengl);\n\n\n\n // event settings\n\n let mut event_settings = EventSettings::new();\n\n event_settings.set_ups(FPS);\n\n event_settings.set_max_fps(FPS);\n\n let mut events = Events::new(event_settings);\n\n\n", "file_path": "examples/box_game/box_game_spectator.rs", "rank": 24, "score": 63640.84183868885 }, { "content": " // event loop\n\n while let Some(e) = events.next(&mut window) {\n\n // render update\n\n if let Some(args) = e.render_args() {\n\n game.render(&mut gl, &args);\n\n }\n\n\n\n // game update - tell GGRS it is time to advance the frame and handle the requests\n\n if let Some(_) = e.update_args() {\n\n if sess.current_state() == SessionState::Running {\n\n match sess.advance_frame() {\n\n Ok(requests) => game.handle_requests(requests),\n\n Err(GGRSError::PredictionThreshold) => {\n\n println!(\"Skipping a frame: Waiting for input from host.\");\n\n }\n\n Err(e) => return Err(Box::new(e)),\n\n }\n\n }\n\n }\n\n\n", "file_path": "examples/box_game/box_game_spectator.rs", "rank": 25, "score": 63624.83369786789 }, { "content": " // idle\n\n if let Some(_args) = e.idle_args() {\n\n sess.poll_remote_clients();\n\n\n\n // handle GGRS events\n\n for event in sess.events() {\n\n println!(\"Event: {:?}\", event);\n\n if let GGRSEvent::Disconnected { .. } = event {\n\n println!(\"Disconnected from host.\");\n\n return Ok(());\n\n }\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/box_game/box_game_spectator.rs", "rank": 26, "score": 63616.244769952 }, { "content": "#[derive(Debug, PartialEq, Eq)]\n\nenum Player {\n\n Local,\n\n Remote(Box<UdpProtocol>),\n\n Spectator(Box<UdpProtocol>),\n\n}\n\n\n\nimpl Player {\n\n #[allow(dead_code)]\n\n const fn as_endpoint(&self) -> Option<&UdpProtocol> {\n\n match self {\n\n Player::Remote(endpoint) => Some(endpoint),\n\n Player::Spectator(endpoint) => Some(endpoint),\n\n Player::Local => None,\n\n }\n\n }\n\n\n\n fn as_endpoint_mut(&mut self) -> Option<&mut UdpProtocol> {\n\n match self {\n\n Player::Remote(endpoint) => Some(endpoint),\n\n Player::Spectator(endpoint) => Some(endpoint),\n", "file_path": "src/sessions/p2p_session.rs", "rank": 27, "score": 50258.239183046484 }, { "content": "#[test]\n\nfn test_create_session() {\n\n assert!(ggrs::start_synctest_session(2, stubs::INPUT_SIZE, 2).is_ok());\n\n}\n\n\n", "file_path": "tests/test_synctest_session.rs", "rank": 28, "score": 42375.07032934912 }, { "content": "#[test]\n\n#[serial]\n\nfn test_start_session() {\n\n let mut sess = ggrs::start_p2p_session(2, stubs::INPUT_SIZE, 7777).unwrap();\n\n let addr = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), 8080);\n\n assert!(sess.add_player(ggrs::PlayerType::Local, 0).is_ok());\n\n assert!(sess.start_session().is_err()); // not enough players\n\n assert!(sess.add_player(ggrs::PlayerType::Remote(addr), 1).is_ok());\n\n assert!(sess.start_session().is_ok()); // works\n\n assert!(sess.start_session().is_err()); // cannot start twice\n\n}\n\n\n", "file_path": "tests/test_p2p_session.rs", "rank": 29, "score": 42375.07032934912 }, { "content": "#[test]\n\n#[serial]\n\nfn test_create_session() {\n\n assert!(ggrs::start_p2p_session(2, stubs::INPUT_SIZE, 7777).is_ok());\n\n}\n\n\n", "file_path": "tests/test_p2p_session.rs", "rank": 30, "score": 42375.07032934912 }, { "content": "/// Used to create a new `P2PSession` for players who participate on the game input. After creating the session, add local and remote players,\n\n/// set input delay for local players and then start the session.\n\n/// # Example\n\n///\n\n/// ```\n\n/// # use ggrs::GGRSError;\n\n/// # fn main() -> Result<(), GGRSError> {\n\n/// let local_port: u16 = 7777;\n\n/// let num_players : u32 = 2;\n\n/// let input_size : usize = std::mem::size_of::<u32>();\n\n/// let mut sess = ggrs::start_p2p_session(num_players, input_size, local_port)?;\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\n///\n\n/// # Errors\n\n/// - Will return a `InvalidRequest` if the number of players is higher than the allowed maximum (see `MAX_PLAYERS`).\n\n/// - Will return a `InvalidRequest` if `input_size` is higher than the allowed maximum (see `MAX_INPUT_BYTES`).\n\n/// - Will return `SocketCreationFailed` if the UPD socket could not be created.\n\npub fn start_p2p_session(\n\n num_players: u32,\n\n input_size: usize,\n\n local_port: u16,\n\n) -> Result<P2PSession, GGRSError> {\n\n if num_players > MAX_PLAYERS {\n\n return Err(GGRSError::InvalidRequest {\n\n info: \"Too many players.\".to_owned(),\n\n });\n\n }\n\n if input_size > MAX_INPUT_BYTES {\n\n return Err(GGRSError::InvalidRequest {\n\n info: \"Input size too big.\".to_owned(),\n\n });\n\n }\n\n P2PSession::new(num_players, input_size, local_port)\n\n .map_err(|_| GGRSError::SocketCreationFailed)\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 31, "score": 41595.93516382153 }, { "content": "/// Used to create a new `SyncTestSession`. During a sync test, GGRS will simulate a rollback every frame and resimulate the last n states, where n is the given `check_distance`.\n\n/// During a `SyncTestSession`, GGRS will simulate a rollback every frame and resimulate the last n states, where n is the given check distance.\n\n/// The resimulated checksums will be compared with the original checksums and report if there was a mismatch.\n\n/// Due to the decentralized nature of saving and loading gamestates, checksum comparisons can only be made if `check_distance` is 2 or higher.\n\n/// This is a great way to test if your system runs deterministically. After creating the session, add a local player, set input delay for them and then start the session.\n\n/// # Example\n\n///\n\n/// ```\n\n/// # use ggrs::GGRSError;\n\n/// # fn main() -> Result<(), GGRSError> {\n\n/// let check_distance : u32 = 7;\n\n/// let num_players : u32 = 2;\n\n/// let input_size : usize = std::mem::size_of::<u32>();\n\n/// let mut sess = ggrs::start_synctest_session(num_players, input_size, check_distance)?;\n\n/// # Ok(())\n\n/// # }\n\n/// ```\n\n///\n\n/// # Errors\n\n/// - Will return a `InvalidRequestError` if the number of players is higher than the allowed maximum (see `MAX_PLAYERS`).\n\n/// - Will return a `InvalidRequestError` if `input_size` is higher than the allowed maximum (see `MAX_INPUT_BYTES`).\n\n/// - Will return a `InvalidRequestError` if the `check_distance is` higher than or equal to `MAX_PREDICTION_FRAMES`.\n\npub fn start_synctest_session(\n\n num_players: u32,\n\n input_size: usize,\n\n check_distance: u32,\n\n) -> Result<SyncTestSession, GGRSError> {\n\n if num_players > MAX_PLAYERS {\n\n return Err(GGRSError::InvalidRequest {\n\n info: \"Too many players.\".to_owned(),\n\n });\n\n }\n\n if input_size > MAX_INPUT_BYTES {\n\n return Err(GGRSError::InvalidRequest {\n\n info: \"Input size too big.\".to_owned(),\n\n });\n\n }\n\n if check_distance >= MAX_PREDICTION_FRAMES {\n\n return Err(GGRSError::InvalidRequest {\n\n info: \"Check distance too big.\".to_owned(),\n\n });\n\n }\n\n Ok(SyncTestSession::new(\n\n num_players,\n\n input_size,\n\n check_distance,\n\n ))\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 32, "score": 41588.15260336756 }, { "content": "#[test]\n\n#[serial]\n\nfn test_synchronize_p2p_sessions() {\n\n let mut sess1 = ggrs::start_p2p_session(2, stubs::INPUT_SIZE, 7777).unwrap();\n\n let mut sess2 = ggrs::start_p2p_session(2, stubs::INPUT_SIZE, 8888).unwrap();\n\n let addr1 = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), 7777);\n\n let addr2 = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), 8888);\n\n\n\n assert!(sess1.current_state() == SessionState::Initializing);\n\n assert!(sess2.current_state() == SessionState::Initializing);\n\n\n\n assert!(sess1.add_player(ggrs::PlayerType::Local, 0).is_ok());\n\n assert!(sess1.add_player(ggrs::PlayerType::Remote(addr2), 1).is_ok());\n\n assert!(sess1.start_session().is_ok());\n\n\n\n assert!(sess2.add_player(ggrs::PlayerType::Local, 1).is_ok());\n\n assert!(sess2.add_player(ggrs::PlayerType::Remote(addr1), 0).is_ok());\n\n assert!(sess2.start_session().is_ok());\n\n\n\n assert!(sess1.current_state() == SessionState::Synchronizing);\n\n assert!(sess2.current_state() == SessionState::Synchronizing);\n\n\n\n for _ in 0..10 {\n\n sess1.poll_remote_clients();\n\n sess2.poll_remote_clients();\n\n }\n\n\n\n assert!(sess1.current_state() == SessionState::Running);\n\n assert!(sess2.current_state() == SessionState::Running);\n\n}\n\n\n", "file_path": "tests/test_p2p_session.rs", "rank": 33, "score": 41301.64519453055 }, { "content": "#[test]\n\nfn test_advance_frame_with_rollbacks() {\n\n let check_distance = 7;\n\n let mut stub = stubs::GameStub::new();\n\n let mut sess = ggrs::start_synctest_session(2, stubs::INPUT_SIZE, check_distance).unwrap();\n\n\n\n for i in 0..200 {\n\n let input: u32 = i;\n\n let mut serialized_input = Vec::new();\n\n serialized_input.push(bincode::serialize(&input).unwrap());\n\n serialized_input.push(bincode::serialize(&input).unwrap());\n\n let requests = sess.advance_frame(&serialized_input).unwrap();\n\n stub.handle_requests(requests);\n\n assert_eq!(stub.gs.frame, i as i32 + 1); // frame should have advanced\n\n }\n\n}\n\n\n", "file_path": "tests/test_synctest_session.rs", "rank": 34, "score": 41301.64519453055 }, { "content": "#[test]\n\n#[serial]\n\nfn test_advance_frame_p2p_sessions() {\n\n let mut stub1 = stubs::GameStub::new();\n\n let mut stub2 = stubs::GameStub::new();\n\n let mut sess1 = ggrs::start_p2p_session(2, stubs::INPUT_SIZE, 7777).unwrap();\n\n let mut sess2 = ggrs::start_p2p_session(2, stubs::INPUT_SIZE, 8888).unwrap();\n\n let addr1 = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), 7777);\n\n let addr2 = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(127, 0, 0, 1)), 8888);\n\n\n\n assert!(sess1.current_state() == SessionState::Initializing);\n\n assert!(sess2.current_state() == SessionState::Initializing);\n\n\n\n assert!(sess1.add_player(ggrs::PlayerType::Local, 0).is_ok());\n\n assert!(sess1.add_player(ggrs::PlayerType::Remote(addr2), 1).is_ok());\n\n assert!(sess1.start_session().is_ok());\n\n\n\n assert!(sess2.add_player(ggrs::PlayerType::Local, 1).is_ok());\n\n assert!(sess2.add_player(ggrs::PlayerType::Remote(addr1), 0).is_ok());\n\n assert!(sess2.start_session().is_ok());\n\n\n\n assert!(sess1.current_state() == SessionState::Synchronizing);\n", "file_path": "tests/test_p2p_session.rs", "rank": 35, "score": 40312.07265343635 }, { "content": "#[test]\n\nfn test_advance_frames_with_delayed_input() {\n\n let handle = 1;\n\n let check_distance = 7;\n\n let mut stub = stubs::GameStub::new();\n\n let mut sess = ggrs::start_synctest_session(2, stubs::INPUT_SIZE, check_distance).unwrap();\n\n assert!(sess.set_frame_delay(2, handle).is_ok());\n\n\n\n for i in 0..200 {\n\n let input: u32 = i;\n\n let mut serialized_input = Vec::new();\n\n serialized_input.push(bincode::serialize(&input).unwrap());\n\n serialized_input.push(bincode::serialize(&input).unwrap());\n\n let requests = sess.advance_frame(&serialized_input).unwrap();\n\n stub.handle_requests(requests);\n\n assert_eq!(stub.gs.frame, i as i32 + 1); // frame should have advanced\n\n }\n\n}\n", "file_path": "tests/test_synctest_session.rs", "rank": 36, "score": 40312.07265343635 }, { "content": "fn millis_since_epoch() -> u128 {\n\n SystemTime::now()\n\n .duration_since(UNIX_EPOCH)\n\n .expect(\"Time went backwards\")\n\n .as_millis()\n\n}\n\n\n", "file_path": "src/network/udp_protocol.rs", "rank": 37, "score": 39327.70991979196 }, { "content": "extern crate freetype as ft;\n\n\n\nuse ft::Library;\n\nuse ggrs::{\n\n Frame, GGRSRequest, GameInput, GameState, GameStateCell, PlayerHandle, MAX_PLAYERS, NULL_FRAME,\n\n};\n\nuse graphics::{Context, Graphics, ImageSize};\n\nuse opengl_graphics::{GlGraphics, Texture, TextureSettings};\n\nuse piston::input::RenderArgs;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::path::PathBuf;\n\n\n\nconst FPS: u64 = 60;\n\nconst CHECKSUM_PERIOD: i32 = 100;\n\n\n\nconst BLACK: [f32; 4] = [0.0, 0.0, 0.0, 1.0];\n\nconst BLUE: [f32; 4] = [0.0, 0.35, 0.78, 1.0];\n\nconst ORANGE: [f32; 4] = [0.78, 0.59, 0.2, 1.0];\n\nconst MAGENTA: [f32; 4] = [0.9, 0.2, 0.2, 1.0];\n\nconst GREEN: [f32; 4] = [0.35, 0.7, 0.35, 1.0];\n", "file_path": "examples/box_game/box_game.rs", "rank": 38, "score": 37173.58538884045 }, { "content": " periodic_checksum: (Frame, u64),\n\n}\n\n\n\nimpl BoxGame {\n\n pub fn new(num_players: usize) -> Self {\n\n // load a font to render text\n\n let assets = find_folder::Search::ParentsThenKids(3, 3)\n\n .for_folder(\"assets\")\n\n .unwrap();\n\n assert!(num_players <= MAX_PLAYERS as usize);\n\n Self {\n\n num_players,\n\n game_state: BoxGameState::new(num_players),\n\n key_states: [false; 8],\n\n font: assets.join(\"FiraSans-Regular.ttf\"),\n\n freetype: ft::Library::init().unwrap(),\n\n last_checksum: (NULL_FRAME, 0),\n\n periodic_checksum: (NULL_FRAME, 0),\n\n }\n\n }\n", "file_path": "examples/box_game/box_game.rs", "rank": 39, "score": 37169.160369691104 }, { "content": " }\n\n\n\n // renders the game to the window\n\n pub fn render(&mut self, gl: &mut GlGraphics, args: &RenderArgs) {\n\n use graphics::*;\n\n\n\n // preparation for last checksum rendering\n\n let mut face = self.freetype.new_face(&self.font, 0).unwrap();\n\n face.set_pixel_sizes(0, 40).unwrap();\n\n let checksum_string = format!(\n\n \"Frame {}: Checksum {}\",\n\n self.last_checksum.0, self.last_checksum.1\n\n );\n\n let checksum_glyphs = glyphs(&mut face, &checksum_string);\n\n // preparation for periodic checksum rendering\n\n let periodic_string = format!(\n\n \"Frame {}: Checksum {}\",\n\n self.periodic_checksum.0, self.periodic_checksum.1\n\n );\n\n let periodic_glyphs = glyphs(&mut face, &periodic_string);\n", "file_path": "examples/box_game/box_game.rs", "rank": 40, "score": 37165.491255107656 }, { "content": " .trans(-PLAYER_SIZE / 2.0, -PLAYER_SIZE / 2.0);\n\n rectangle(PLAYER_COLORS[i], square, transform, gl);\n\n }\n\n });\n\n }\n\n\n\n #[allow(dead_code)]\n\n // creates a compact representation of currently pressed keys and serializes it\n\n pub fn local_input(&self, handle: PlayerHandle) -> Vec<u8> {\n\n let mut input: u8 = 0;\n\n\n\n // ugly, but it works...\n\n if handle == 0 {\n\n if self.key_states[0] {\n\n input |= INPUT_UP;\n\n }\n\n if self.key_states[1] {\n\n input |= INPUT_LEFT;\n\n }\n\n if self.key_states[2] {\n", "file_path": "examples/box_game/box_game.rs", "rank": 41, "score": 37160.155462004805 }, { "content": " }\n\n\n\n // deserialize gamestate to load and overwrite current gamestate\n\n fn load_game_state(&mut self, cell: GameStateCell) {\n\n let state_to_load = cell.load();\n\n self.game_state = bincode::deserialize(&state_to_load.buffer.unwrap()).unwrap();\n\n }\n\n\n\n fn advance_frame(&mut self, inputs: Vec<GameInput>) {\n\n // advance the game state\n\n self.game_state.advance(inputs);\n\n\n\n // remember checksum to render it later\n\n // it is very inefficient to serialize the gamestate here just for the checksum\n\n let buffer = bincode::serialize(&self.game_state).unwrap();\n\n let checksum = fletcher16(&buffer) as u64;\n\n self.last_checksum = (self.game_state.frame, checksum);\n\n if self.game_state.frame % CHECKSUM_PERIOD == 0 {\n\n self.periodic_checksum = (self.game_state.frame, checksum);\n\n }\n", "file_path": "examples/box_game/box_game.rs", "rank": 42, "score": 37159.586824293125 }, { "content": "const PLAYER_COLORS: [[f32; 4]; MAX_PLAYERS as usize] = [BLUE, ORANGE, MAGENTA, GREEN];\n\n\n\nconst PLAYER_SIZE: f64 = 50.0;\n\nconst WINDOW_HEIGHT: u32 = 800;\n\nconst WINDOW_WIDTH: u32 = 600;\n\n\n\nconst INPUT_UP: u8 = 1 << 0;\n\nconst INPUT_DOWN: u8 = 1 << 1;\n\nconst INPUT_LEFT: u8 = 1 << 2;\n\nconst INPUT_RIGHT: u8 = 1 << 3;\n\n\n\nconst MOVEMENT_SPEED: f64 = 15.0 / FPS as f64;\n\nconst ROTATION_SPEED: f64 = 2.5 / FPS as f64;\n\nconst MAX_SPEED: f64 = 7.0;\n\nconst FRICTION: f64 = 0.98;\n\n\n\n/// computes the fletcher16 checksum, copied from wikipedia: <https://en.wikipedia.org/wiki/Fletcher%27s_checksum>\n", "file_path": "examples/box_game/box_game.rs", "rank": 43, "score": 37159.345266074255 }, { "content": "\n\n // start drawing\n\n gl.draw(args.viewport(), |c, gl| {\n\n // clear the screen\n\n clear(BLACK, gl);\n\n\n\n // render checksums\n\n render_text(&checksum_glyphs, &c.trans(0.0, 40.0), gl);\n\n render_text(&periodic_glyphs, &c.trans(0.0, 80.0), gl);\n\n\n\n // draw the player rectangles\n\n for i in 0..self.num_players {\n\n let square = rectangle::square(0.0, 0.0, PLAYER_SIZE);\n\n let (x, y) = self.game_state.positions[i];\n\n let rotation = self.game_state.rotations[i];\n\n\n\n let transform = c\n\n .transform\n\n .trans(x, y)\n\n .rot_rad(rotation)\n", "file_path": "examples/box_game/box_game.rs", "rank": 44, "score": 37158.31221056718 }, { "content": "\n\n // for each request, call the appropriate function\n\n pub fn handle_requests(&mut self, requests: Vec<GGRSRequest>) {\n\n for request in requests {\n\n match request {\n\n GGRSRequest::LoadGameState { cell } => self.load_game_state(cell),\n\n GGRSRequest::SaveGameState { cell, frame } => self.save_game_state(cell, frame),\n\n GGRSRequest::AdvanceFrame { inputs } => self.advance_frame(inputs),\n\n }\n\n }\n\n }\n\n\n\n // serialize current gamestate, create a checksum\n\n // creating a checksum here is only relevant for SyncTestSessions\n\n fn save_game_state(&mut self, cell: GameStateCell, frame: Frame) {\n\n assert_eq!(self.game_state.frame, frame);\n\n let buffer = bincode::serialize(&self.game_state).unwrap();\n\n let checksum = fletcher16(&buffer) as u64;\n\n\n\n cell.save(GameState::new(frame, Some(buffer), Some(checksum)));\n", "file_path": "examples/box_game/box_game.rs", "rank": 45, "score": 37157.422261384905 }, { "content": " positions.push((x as f64, y as f64));\n\n velocities.push((0.0, 0.0));\n\n rotations.push((rot + std::f64::consts::PI) % (2.0 * std::f64::consts::PI));\n\n }\n\n\n\n Self {\n\n frame: 0,\n\n num_players,\n\n positions,\n\n velocities,\n\n rotations,\n\n }\n\n }\n\n\n\n pub fn advance(&mut self, inputs: Vec<GameInput>) {\n\n // increase the frame counter\n\n self.frame += 1;\n\n\n\n for i in 0..self.num_players {\n\n // get input of that player\n", "file_path": "examples/box_game/box_game.rs", "rank": 46, "score": 37155.86917028492 }, { "content": " let input = if inputs[i].frame == NULL_FRAME {\n\n // disconnected players spin\n\n 4\n\n } else {\n\n // otherwise deserialize the input\n\n bincode::deserialize(inputs[i].input()).unwrap()\n\n };\n\n\n\n // old values\n\n let (old_x, old_y) = self.positions[i];\n\n let (old_vel_x, old_vel_y) = self.velocities[i];\n\n let mut rot = self.rotations[i];\n\n\n\n // slow down\n\n let mut vel_x = old_vel_x * FRICTION;\n\n let mut vel_y = old_vel_y * FRICTION;\n\n\n\n // thrust\n\n if input & INPUT_UP != 0 && input & INPUT_DOWN == 0 {\n\n vel_x += MOVEMENT_SPEED * rot.cos();\n", "file_path": "examples/box_game/box_game.rs", "rank": 47, "score": 37155.280167791294 }, { "content": " vel_y = (vel_y * MAX_SPEED) / magnitude;\n\n }\n\n\n\n // compute new position\n\n let mut x = old_x + vel_x;\n\n let mut y = old_y + vel_y;\n\n\n\n // constrain boxes to canvas borders\n\n x = x.max(0.0);\n\n x = x.min(WINDOW_WIDTH as f64);\n\n y = y.max(0.0);\n\n y = y.min(WINDOW_HEIGHT as f64);\n\n\n\n // update all state\n\n self.positions[i] = (x, y);\n\n self.velocities[i] = (vel_x, vel_y);\n\n self.rotations[i] = rot;\n\n }\n\n }\n\n}\n", "file_path": "examples/box_game/box_game.rs", "rank": 48, "score": 37154.18949189238 }, { "content": " }\n\n\n\n // serialization is completely unnecessary here, since the data is already u8\n\n // this is for demonstration\n\n bincode::serialize(&input).unwrap()\n\n }\n\n\n\n #[allow(dead_code)]\n\n pub const fn current_frame(&self) -> i32 {\n\n self.game_state.frame\n\n }\n\n}\n\n\n\n// BoxGameState holds all relevant information about the game state\n", "file_path": "examples/box_game/box_game.rs", "rank": 49, "score": 37154.055592385754 }, { "content": " input |= INPUT_DOWN;\n\n }\n\n if self.key_states[3] {\n\n input |= INPUT_RIGHT;\n\n }\n\n }\n\n\n\n if handle == 1 {\n\n if self.key_states[4] {\n\n input |= INPUT_UP;\n\n }\n\n if self.key_states[5] {\n\n input |= INPUT_LEFT;\n\n }\n\n if self.key_states[6] {\n\n input |= INPUT_DOWN;\n\n }\n\n if self.key_states[7] {\n\n input |= INPUT_RIGHT;\n\n }\n", "file_path": "examples/box_game/box_game.rs", "rank": 50, "score": 37151.97069288095 }, { "content": " vel_y += MOVEMENT_SPEED * rot.sin();\n\n }\n\n // break\n\n if input & INPUT_UP == 0 && input & INPUT_DOWN != 0 {\n\n vel_x -= MOVEMENT_SPEED * rot.cos();\n\n vel_y -= MOVEMENT_SPEED * rot.sin();\n\n }\n\n // turn left\n\n if input & INPUT_LEFT != 0 && input & INPUT_RIGHT == 0 {\n\n rot = (rot - ROTATION_SPEED).rem_euclid(2.0 * std::f64::consts::PI);\n\n }\n\n // turn right\n\n if input & INPUT_LEFT == 0 && input & INPUT_RIGHT != 0 {\n\n rot = (rot + ROTATION_SPEED).rem_euclid(2.0 * std::f64::consts::PI);\n\n }\n\n\n\n // limit speed\n\n let magnitude = (vel_x * vel_x + vel_y * vel_y).sqrt();\n\n if magnitude > MAX_SPEED {\n\n vel_x = (vel_x * MAX_SPEED) / magnitude;\n", "file_path": "examples/box_game/box_game.rs", "rank": 51, "score": 37147.18980463221 }, { "content": "extern crate freetype as ft;\n\n\n\nuse glutin_window::GlutinWindow as Window;\n\nuse opengl_graphics::{GlGraphics, OpenGL};\n\nuse piston::event_loop::{EventSettings, Events};\n\nuse piston::input::{RenderEvent, UpdateEvent};\n\nuse piston::window::WindowSettings;\n\nuse piston::{Button, EventLoop, Key, PressEvent, ReleaseEvent};\n\nuse structopt::StructOpt;\n\n\n\nconst FPS: u64 = 60;\n\nconst INPUT_SIZE: usize = std::mem::size_of::<u8>();\n\n\n\nconst WINDOW_HEIGHT: u32 = 800;\n\nconst WINDOW_WIDTH: u32 = 600;\n\n\n\nmod box_game;\n\n\n\n#[derive(StructOpt)]\n", "file_path": "examples/box_game/box_game_synctest.rs", "rank": 52, "score": 36095.84071381486 }, { "content": " .exit_on_esc(true)\n\n .build()\n\n .unwrap();\n\n\n\n // Create a new box game\n\n let mut game = box_game::BoxGame::new(opt.num_players);\n\n let mut gl = GlGraphics::new(opengl);\n\n\n\n // event settings\n\n let mut event_settings = EventSettings::new();\n\n event_settings.set_ups(FPS);\n\n event_settings.set_max_fps(FPS);\n\n let mut events = Events::new(event_settings);\n\n\n\n // event loop\n\n while let Some(e) = events.next(&mut window) {\n\n // render update\n\n if let Some(args) = e.render_args() {\n\n game.render(&mut gl, &args);\n\n }\n", "file_path": "examples/box_game/box_game_synctest.rs", "rank": 55, "score": 36078.18966345755 }, { "content": "\n\n // game update\n\n if let Some(_) = e.update_args() {\n\n // create inputs for all players\n\n let mut all_inputs = Vec::new();\n\n for i in 0..opt.num_players {\n\n all_inputs.push(game.local_input(i));\n\n }\n\n // tell GGRS it is time to advance the frame and handle the requests\n\n let requests = sess.advance_frame(&all_inputs)?;\n\n game.handle_requests(requests);\n\n }\n\n\n\n // key state update\n\n if let Some(Button::Keyboard(key)) = e.press_args() {\n\n match key {\n\n Key::W => game.key_states[0] = true,\n\n Key::A => game.key_states[1] = true,\n\n Key::S => game.key_states[2] = true,\n\n Key::D => game.key_states[3] = true,\n", "file_path": "examples/box_game/box_game_synctest.rs", "rank": 56, "score": 36067.26303927747 }, { "content": " Key::Up => game.key_states[4] = true,\n\n Key::Left => game.key_states[5] = true,\n\n Key::Down => game.key_states[6] = true,\n\n Key::Right => game.key_states[7] = true,\n\n _ => (),\n\n }\n\n }\n\n\n\n // key state update\n\n if let Some(Button::Keyboard(key)) = e.release_args() {\n\n match key {\n\n Key::W => game.key_states[0] = false,\n\n Key::A => game.key_states[1] = false,\n\n Key::S => game.key_states[2] = false,\n\n Key::D => game.key_states[3] = false,\n\n Key::Up => game.key_states[4] = false,\n\n Key::Left => game.key_states[5] = false,\n\n Key::Down => game.key_states[6] = false,\n\n Key::Right => game.key_states[7] = false,\n\n _ => (),\n\n }\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n", "file_path": "examples/box_game/box_game_synctest.rs", "rank": 61, "score": 36058.92015535854 }, { "content": "extern crate freetype as ft;\n\nuse nalgebra::{vector, Vector2};\n\nuse rapier2d::na::ComplexField;\n\nuse rapier2d::prelude::*;\n\n\n\nuse ft::Library;\n\nuse ggrs::{\n\n Frame, GGRSRequest, GameInput, GameState, GameStateCell, PlayerHandle, MAX_PLAYERS, NULL_FRAME,\n\n};\n\nuse graphics::{Context, Graphics, ImageSize};\n\nuse opengl_graphics::{GlGraphics, Texture, TextureSettings};\n\nuse piston::input::RenderArgs;\n\nuse serde::{Deserialize, Serialize};\n\nuse std::path::PathBuf;\n\n\n\nconst CHECKSUM_PERIOD: i32 = 100;\n\n\n\nconst BLACK: [f32; 4] = [0.0, 0.0, 0.0, 1.0];\n\nconst ORANGE: [f32; 4] = [0.78, 0.59, 0.2, 1.0];\n\nconst BLUE: [f32; 4] = [0.0, 0.35, 0.78, 1.0];\n\n\n\nconst WINDOW_HEIGHT: u32 = 800;\n\nconst WINDOW_WIDTH: u32 = 600;\n\nconst SCALE: f64 = 10.;\n\n\n\n//const GRAVITY: Vec<f32> = vector![0.0, -9.81];\n\n\n\n/// computes the fletcher16 checksum, copied from wikipedia: <https://en.wikipedia.org/wiki/Fletcher%27s_checksum>\n", "file_path": "examples/rapier/rapier_game.rs", "rank": 63, "score": 29628.082826468744 }, { "content": " }\n\n\n\n // physics update\n\n self.physics_pipeline.step(\n\n &self.gravity,\n\n &self.integration_parameters,\n\n &mut self.state.island_manager,\n\n &mut self.state.broad_phase,\n\n &mut self.state.narrow_phase,\n\n &mut self.state.bodies,\n\n &mut self.state.colliders,\n\n &mut self.state.joint_set,\n\n &mut self.ccd_solver,\n\n &self.physics_hooks,\n\n &self.event_handler,\n\n );\n\n }\n\n\n\n // renders the game to the window\n\n pub fn render(&mut self, gl: &mut GlGraphics, args: &RenderArgs) {\n", "file_path": "examples/rapier/rapier_game.rs", "rank": 64, "score": 29616.47222455282 }, { "content": " use graphics::*;\n\n\n\n // preparation for last checksum rendering\n\n let mut face = self.freetype.new_face(&self.font, 0).unwrap();\n\n face.set_pixel_sizes(0, 40).unwrap();\n\n let checksum_string = format!(\n\n \"Frame {}: Checksum {}\",\n\n self.last_checksum.0, self.last_checksum.1\n\n );\n\n let checksum_glyphs = glyphs(&mut face, &checksum_string);\n\n // preparation for periodic checksum rendering\n\n let periodic_string = format!(\n\n \"Frame {}: Checksum {}\",\n\n self.periodic_checksum.0, self.periodic_checksum.1\n\n );\n\n let periodic_glyphs = glyphs(&mut face, &periodic_string);\n\n\n\n // start drawing\n\n gl.draw(args.viewport(), |c, gl| {\n\n // clear the screen\n", "file_path": "examples/rapier/rapier_game.rs", "rank": 65, "score": 29615.638144059132 }, { "content": "\n\n // rapier stuff\n\n physics_pipeline: PhysicsPipeline,\n\n integration_parameters: IntegrationParameters,\n\n gravity: Vector2<f32>,\n\n ccd_solver: CCDSolver,\n\n physics_hooks: (),\n\n event_handler: (),\n\n}\n\n\n\nimpl RapierGame {\n\n pub fn new(num_players: usize, num_bodies: usize) -> Self {\n\n // load a font to render text\n\n let assets = find_folder::Search::ParentsThenKids(3, 3)\n\n .for_folder(\"assets\")\n\n .unwrap();\n\n assert!(num_players <= MAX_PLAYERS as usize);\n\n Self {\n\n num_players,\n\n state: RapierState::new(num_players, num_bodies),\n", "file_path": "examples/rapier/rapier_game.rs", "rank": 66, "score": 29615.453365847792 }, { "content": "\n\n // deserialize gamestate to load and overwrite current gamestate\n\n fn load_game_state(&mut self, cell: GameStateCell) {\n\n let state_to_load = cell.load();\n\n self.state = bincode::deserialize(&state_to_load.buffer.unwrap()).unwrap();\n\n }\n\n\n\n fn advance_frame(&mut self, inputs: Vec<GameInput>) {\n\n // increase the frame counter\n\n self.state.frame += 1;\n\n\n\n for i in 0..self.num_players {\n\n // get input of that player\n\n let _input: u8 = if inputs[i].frame == NULL_FRAME {\n\n // disconnected players spin\n\n 4\n\n } else {\n\n // otherwise deserialize the input\n\n bincode::deserialize(inputs[i].input()).unwrap()\n\n };\n", "file_path": "examples/rapier/rapier_game.rs", "rank": 67, "score": 29615.040046990744 }, { "content": " font: assets.join(\"FiraSans-Regular.ttf\"),\n\n freetype: ft::Library::init().unwrap(),\n\n last_checksum: (NULL_FRAME, 0),\n\n periodic_checksum: (NULL_FRAME, 0),\n\n\n\n physics_pipeline: PhysicsPipeline::new(),\n\n gravity: vector![0.0, -9.81],\n\n integration_parameters: IntegrationParameters::default(),\n\n ccd_solver: CCDSolver::new(),\n\n physics_hooks: (),\n\n event_handler: (),\n\n }\n\n }\n\n\n\n // for each request, call the appropriate function\n\n pub fn handle_requests(&mut self, requests: Vec<GGRSRequest>) {\n\n for request in requests {\n\n match request {\n\n GGRSRequest::LoadGameState { cell } => self.load_game_state(cell),\n\n GGRSRequest::SaveGameState { cell, frame } => self.save_game_state(cell, frame),\n", "file_path": "examples/rapier/rapier_game.rs", "rank": 68, "score": 29614.772706373402 }, { "content": "\n\n for cube_handle in &self.state.cube_handles {\n\n let cube_body = &self.state.bodies[*cube_handle];\n\n let rect = rectangle::square(0.0, 0.0, SCALE);\n\n let transform = c\n\n .transform\n\n .trans(\n\n cube_body.translation().x as f64 * SCALE,\n\n WINDOW_HEIGHT as f64 - cube_body.translation().y as f64 * SCALE,\n\n )\n\n .trans(WINDOW_WIDTH as f64 / 2., -30.)\n\n .rot_rad(cube_body.rotation().angle() as f64)\n\n .trans(-SCALE / 2., -SCALE / 2.);\n\n rectangle(ORANGE, rect, transform, gl);\n\n }\n\n });\n\n }\n\n\n\n #[allow(dead_code)]\n\n // creates a compact representation of currently pressed keys and serializes it\n\n pub fn local_input(&self, _handle: PlayerHandle) -> Vec<u8> {\n\n vec![0u8]\n\n }\n\n}\n\n\n\n// BoxGameState holds all relevant information about the game state\n", "file_path": "examples/rapier/rapier_game.rs", "rank": 69, "score": 29613.052629959046 }, { "content": " GGRSRequest::AdvanceFrame { inputs } => self.advance_frame(inputs),\n\n }\n\n }\n\n }\n\n\n\n // serialize current gamestate, create a checksum\n\n // creating a checksum here is only relevant for SyncTestSessions\n\n fn save_game_state(&mut self, cell: GameStateCell, frame: Frame) {\n\n assert_eq!(self.state.frame, frame);\n\n let buffer = bincode::serialize(&self.state).unwrap();\n\n let checksum = fletcher16(&buffer) as u64;\n\n\n\n // remember checksum to render it later\n\n self.last_checksum = (self.state.frame, checksum);\n\n if self.state.frame % CHECKSUM_PERIOD == 0 {\n\n self.periodic_checksum = (self.state.frame, checksum);\n\n }\n\n\n\n cell.save(GameState::new(frame, Some(buffer), Some(checksum)));\n\n }\n", "file_path": "examples/rapier/rapier_game.rs", "rank": 70, "score": 29611.077979893893 }, { "content": " let rigid_body = RigidBodyBuilder::new_static().build();\n\n let handle = bodies.insert(rigid_body);\n\n let collider = ColliderBuilder::polyline(points, None).build();\n\n colliders.insert_with_parent(collider, handle, &mut bodies);\n\n\n\n /*\n\n * Create the bodies\n\n */\n\n let rad = 0.5;\n\n\n\n let mut cube_handles = Vec::with_capacity(num_bodies / 2);\n\n let mut sphere_handles = Vec::with_capacity(num_bodies / 2);\n\n\n\n let shift = rad * 2.0;\n\n let centerx = shift * (num_bodies / 2) as f32;\n\n let centery = shift / 2.0 + 20.0;\n\n\n\n for i in 0..num_bodies {\n\n for j in 0usize..num_bodies {\n\n let x = i as f32 * shift - centerx;\n", "file_path": "examples/rapier/rapier_game.rs", "rank": 71, "score": 29610.359792797637 }, { "content": " let y = j as f32 * shift + centery + 3.0;\n\n\n\n // Build the rigid body.\n\n let rigid_body = RigidBodyBuilder::new_dynamic()\n\n .translation(vector![x, y])\n\n .build();\n\n let handle = bodies.insert(rigid_body);\n\n\n\n if j % 2 == 0 {\n\n let collider = ColliderBuilder::cuboid(rad, rad).build();\n\n colliders.insert_with_parent(collider, handle, &mut bodies);\n\n cube_handles.push(handle);\n\n } else {\n\n let collider = ColliderBuilder::ball(rad).build();\n\n colliders.insert_with_parent(collider, handle, &mut bodies);\n\n sphere_handles.push(handle);\n\n }\n\n }\n\n }\n\n\n", "file_path": "examples/rapier/rapier_game.rs", "rank": 72, "score": 29608.73599030933 }, { "content": " clear(BLACK, gl);\n\n\n\n // render checksums\n\n render_text(&checksum_glyphs, &c.trans(0.0, 40.0), gl);\n\n render_text(&periodic_glyphs, &c.trans(0.0, 80.0), gl);\n\n\n\n for sphere_handle in &self.state.sphere_handles {\n\n let sphere_body = &self.state.bodies[*sphere_handle];\n\n let rect = rectangle::square(0.0, 0.0, SCALE);\n\n let transform = c\n\n .transform\n\n .trans(\n\n sphere_body.translation().x as f64 * SCALE,\n\n WINDOW_HEIGHT as f64 - sphere_body.translation().y as f64 * SCALE,\n\n )\n\n .trans(WINDOW_WIDTH as f64 / 2., -30.)\n\n .rot_rad(sphere_body.rotation().angle() as f64)\n\n .trans(-SCALE / 2., -SCALE / 2.);\n\n ellipse(BLUE, rect, transform, gl);\n\n }\n", "file_path": "examples/rapier/rapier_game.rs", "rank": 73, "score": 29608.715645575798 }, { "content": " */\n\n let mut bodies = RigidBodySet::new();\n\n let mut colliders = ColliderSet::new();\n\n\n\n /*\n\n * Ground\n\n */\n\n let ground_size = 50.0;\n\n let nsubdivs = 2000;\n\n let step_size = ground_size / (nsubdivs as f32);\n\n let mut points = Vec::new();\n\n\n\n points.push(point![-ground_size / 2.0, 40.0]);\n\n for i in 1..nsubdivs - 1 {\n\n let x = -ground_size / 2.0 + i as f32 * step_size;\n\n let y = ComplexField::cos(i as f32 * step_size) * 2.0;\n\n points.push(point![x, y]);\n\n }\n\n points.push(point![ground_size / 2.0, 40.0]);\n\n\n", "file_path": "examples/rapier/rapier_game.rs", "rank": 74, "score": 29605.127900841577 }, { "content": " Self {\n\n frame: 0,\n\n num_players,\n\n\n\n bodies,\n\n colliders,\n\n joint_set: JointSet::new(),\n\n broad_phase: BroadPhase::new(),\n\n narrow_phase: NarrowPhase::new(),\n\n island_manager: IslandManager::new(),\n\n\n\n cube_handles,\n\n sphere_handles,\n\n }\n\n }\n\n}\n", "file_path": "examples/rapier/rapier_game.rs", "rank": 75, "score": 29604.336274282414 }, { "content": "const NORMAL_SPEED: u32 = 1;\n\n// The amount of frames the spectator advances in a single step if too far behing\n\nconst DEFAULT_CATCHUP_SPEED: u32 = 2;\n\n// The amount of events a spectator can buffer; should never be an issue if the user polls the events at every step\n\nconst MAX_EVENT_QUEUE_SIZE: usize = 100;\n\n\n\n/// A `P2PSpectatorSession` provides a UDP protocol to connect to a remote host in a peer-to-peer fashion. The host will broadcast all confirmed inputs to this session.\n\n/// This session can be used to spectate a session without contributing to the game input.\n\n#[derive(Debug)]\n\npub struct P2PSpectatorSession {\n\n state: SessionState,\n\n num_players: u32,\n\n input_size: usize,\n\n inputs: [GameInput; SPECTATOR_BUFFER_SIZE],\n\n host_connect_status: Vec<ConnectionStatus>,\n\n socket: NonBlockingSocket,\n\n host: UdpProtocol,\n\n event_queue: VecDeque<GGRSEvent>,\n\n current_frame: Frame,\n\n last_recv_frame: Frame,\n", "file_path": "src/sessions/p2p_spectator_session.rs", "rank": 76, "score": 28852.25470205952 }, { "content": " max_frames_behind: u32,\n\n catchup_speed: u32,\n\n}\n\n\n\nimpl P2PSpectatorSession {\n\n pub(crate) fn new(\n\n num_players: u32,\n\n input_size: usize,\n\n local_port: u16,\n\n host_addr: SocketAddr,\n\n ) -> Result<Self, std::io::Error> {\n\n // udp nonblocking socket creation\n\n let addr = SocketAddr::new(IpAddr::V4(Ipv4Addr::new(0, 0, 0, 0)), local_port); //TODO: IpV6?\n\n let socket = NonBlockingSocket::new(addr)?;\n\n\n\n // host connection status\n\n let mut host_connect_status = Vec::new();\n\n for _ in 0..num_players {\n\n host_connect_status.push(ConnectionStatus::default());\n\n }\n", "file_path": "src/sessions/p2p_spectator_session.rs", "rank": 77, "score": 28851.681284968152 }, { "content": "use std::{\n\n collections::{vec_deque::Drain, VecDeque},\n\n net::{IpAddr, Ipv4Addr, SocketAddr},\n\n};\n\n\n\nuse crate::{\n\n frame_info::BLANK_INPUT,\n\n network::{\n\n udp_msg::ConnectionStatus, udp_protocol::UdpProtocol, udp_socket::NonBlockingSocket,\n\n },\n\n Frame, GGRSError, GGRSEvent, GGRSRequest, GameInput, NetworkStats, SessionState, NULL_FRAME,\n\n};\n\n\n\nuse super::p2p_session::Event;\n\n\n\n// The amount of inputs a spectator can buffer (a second worth of inputs)\n\nconst SPECTATOR_BUFFER_SIZE: usize = 60;\n\n// If the spectator is more than this amount of frames behind, it will advance the game two steps at a time to catch up\n\nconst DEFAULT_MAX_FRAMES_BEHIND: u32 = 10;\n\n// The amount of frames the spectator advances in a single step if not too far behing\n", "file_path": "src/sessions/p2p_spectator_session.rs", "rank": 78, "score": 28851.20228514309 }, { "content": "\n\n // send out all pending UDP messages\n\n self.host.send_all_messages(&self.socket);\n\n }\n\n\n\n /// Returns the number of players this session was constructed with.\n\n pub const fn num_players(&self) -> u32 {\n\n self.num_players\n\n }\n\n\n\n /// Returns the input size this session was constructed with.\n\n pub const fn input_size(&self) -> usize {\n\n self.input_size\n\n }\n\n\n\n /// Sets the FPS this session is used with. This influences ping estimates.\n\n pub fn set_fps(&mut self, fps: u32) -> Result<(), GGRSError> {\n\n if fps == 0 {\n\n return Err(GGRSError::InvalidRequest {\n\n info: \"FPS should be higher than 0.\".to_owned(),\n", "file_path": "src/sessions/p2p_spectator_session.rs", "rank": 79, "score": 28849.384848900438 }, { "content": " // save the input\n\n self.inputs[input.frame as usize % SPECTATOR_BUFFER_SIZE] = input;\n\n assert!(input.frame > self.last_recv_frame);\n\n self.last_recv_frame = input.frame;\n\n\n\n // update the frame advantage\n\n self.host.update_local_frame_advantage(input.frame);\n\n\n\n // update the host connection status\n\n for i in 0..self.num_players as usize {\n\n self.host_connect_status[i] = self.host.peer_connect_status(i);\n\n }\n\n }\n\n }\n\n\n\n // check event queue size and discard oldest events if too big\n\n while self.event_queue.len() > MAX_EVENT_QUEUE_SIZE {\n\n self.event_queue.pop_front();\n\n }\n\n }\n\n}\n", "file_path": "src/sessions/p2p_spectator_session.rs", "rank": 80, "score": 28845.127065069428 }, { "content": " .to_owned(),\n\n });\n\n }\n\n\n\n self.max_frames_behind = desired_value;\n\n Ok(())\n\n }\n\n\n\n /// Used to fetch some statistics about the quality of the network connection.\n\n /// # Errors\n\n /// - Returns `NotSynchronized` if the session is not connected to other clients yet.\n\n pub fn network_stats(&self) -> Result<NetworkStats, GGRSError> {\n\n match self.host.network_stats() {\n\n Some(stats) => Ok(stats),\n\n None => Err(GGRSError::NotSynchronized),\n\n }\n\n }\n\n\n\n /// Returns all events that happened since last queried for events. If the number of stored events exceeds `MAX_EVENT_QUEUE_SIZE`, the oldest events will be discarded.\n\n pub fn events(&mut self) -> Drain<GGRSEvent> {\n", "file_path": "src/sessions/p2p_spectator_session.rs", "rank": 81, "score": 28844.46246211097 }, { "content": "\n\n Ok(synced_inputs)\n\n }\n\n\n\n fn handle_event(&mut self, event: Event) {\n\n let player_handle = 0;\n\n match event {\n\n // forward to user\n\n Event::Synchronizing { total, count } => {\n\n self.event_queue.push_back(GGRSEvent::Synchronizing {\n\n player_handle,\n\n total,\n\n count,\n\n });\n\n }\n\n // forward to user\n\n Event::NetworkInterrupted { disconnect_timeout } => {\n\n self.event_queue.push_back(GGRSEvent::NetworkInterrupted {\n\n player_handle,\n\n disconnect_timeout,\n", "file_path": "src/sessions/p2p_spectator_session.rs", "rank": 82, "score": 28844.4019327745 }, { "content": "use ggrs::{PlayerType, SessionState};\n\nuse std::net::{IpAddr, Ipv4Addr, SocketAddr};\n\n\n\nuse serial_test::serial;\n\n\n\nmod stubs;\n\n\n\n#[test]\n\n#[serial]\n", "file_path": "tests/test_p2p_spectator_session.rs", "rank": 83, "score": 28844.23781287692 }, { "content": " });\n\n }\n\n\n\n self.host.set_fps(fps);\n\n\n\n Ok(())\n\n }\n\n\n\n fn inputs_at_frame(&self, frame_to_grab: Frame) -> Result<Vec<GameInput>, GGRSError> {\n\n let merged_input = self.inputs[frame_to_grab as usize % SPECTATOR_BUFFER_SIZE];\n\n\n\n // We haven't received the input from the host yet. Wait.\n\n if merged_input.frame < frame_to_grab {\n\n return Err(GGRSError::PredictionThreshold);\n\n }\n\n\n\n // The host is more than `SPECTATOR_BUFFER_SIZE` frames ahead of the spectator. The input we need is gone forever.\n\n if merged_input.frame > frame_to_grab {\n\n return Err(GGRSError::SpectatorTooFarBehind);\n\n }\n", "file_path": "src/sessions/p2p_spectator_session.rs", "rank": 84, "score": 28844.13487651819 }, { "content": " }\n\n\n\n /// Returns the number of frames behind the host\n\n pub fn frames_behind_host(&self) -> u32 {\n\n let diff = self.last_recv_frame - self.current_frame;\n\n assert!(diff >= 0);\n\n diff as u32\n\n }\n\n\n\n /// Sets the amount of frames the spectator advances in a single `advance_frame()` call if it is too far behind the host.\n\n /// If set to 1, the spectator will never catch up.\n\n pub fn set_catchup_speed(&mut self, desired_catchup_speed: u32) -> Result<(), GGRSError> {\n\n if desired_catchup_speed < 1 {\n\n return Err(GGRSError::InvalidRequest {\n\n info: \"Catchup speed cannot be smaller than 1.\".to_owned(),\n\n });\n\n }\n\n\n\n if desired_catchup_speed >= self.max_frames_behind {\n\n return Err(GGRSError::InvalidRequest {\n", "file_path": "src/sessions/p2p_spectator_session.rs", "rank": 85, "score": 28844.108469415733 }, { "content": "\n\n // split the inputs back into an input for each player\n\n assert!(merged_input.size % self.input_size == 0);\n\n let mut synced_inputs = Vec::new();\n\n\n\n for i in 0..self.num_players as usize {\n\n let mut input = GameInput::new(frame_to_grab, self.input_size);\n\n let start = i * input.size;\n\n let end = (i + 1) * input.size;\n\n input.copy_input(&merged_input.buffer[start..end]);\n\n\n\n // disconnected players are identified by NULL_FRAME\n\n if self.host_connect_status[i].disconnected\n\n && self.host_connect_status[i].last_frame < frame_to_grab\n\n {\n\n input.frame = NULL_FRAME;\n\n }\n\n\n\n synced_inputs.push(input);\n\n }\n", "file_path": "src/sessions/p2p_spectator_session.rs", "rank": 86, "score": 28844.095745421022 }, { "content": " info: \"Catchup speed cannot be larger or equal than the allowed maximum frames behind host\"\n\n .to_owned(),\n\n });\n\n }\n\n\n\n self.catchup_speed = desired_catchup_speed;\n\n Ok(())\n\n }\n\n\n\n /// Sets the amount of frames behind the host before starting to catch up\n\n pub fn set_max_frames_behind(&mut self, desired_value: u32) -> Result<(), GGRSError> {\n\n if desired_value < 1 {\n\n return Err(GGRSError::InvalidRequest {\n\n info: \"Max frames behind cannot be smaller than 2.\".to_owned(),\n\n });\n\n }\n\n\n\n if desired_value >= SPECTATOR_BUFFER_SIZE as u32 {\n\n return Err(GGRSError::InvalidRequest {\n\n info: \"Max frames behind cannot be larger or equal than the Spectator buffer size (60)\"\n", "file_path": "src/sessions/p2p_spectator_session.rs", "rank": 87, "score": 28843.889759988553 }, { "content": "\n\n Ok(Self {\n\n state: SessionState::Initializing,\n\n num_players,\n\n input_size,\n\n inputs: [BLANK_INPUT; SPECTATOR_BUFFER_SIZE],\n\n host_connect_status,\n\n socket,\n\n host: UdpProtocol::new(0, host_addr, num_players, input_size * num_players as usize),\n\n event_queue: VecDeque::new(),\n\n current_frame: NULL_FRAME,\n\n last_recv_frame: NULL_FRAME,\n\n max_frames_behind: DEFAULT_MAX_FRAMES_BEHIND,\n\n catchup_speed: DEFAULT_CATCHUP_SPEED,\n\n })\n\n }\n\n\n\n /// Returns the current `SessionState` of a session.\n\n pub const fn current_state(&self) -> SessionState {\n\n self.state\n", "file_path": "src/sessions/p2p_spectator_session.rs", "rank": 88, "score": 28843.268767321904 }, { "content": " self.event_queue.drain(..)\n\n }\n\n\n\n /// A spectator can directly start the session. Then, the synchronization process will begin.\n\n /// # Errors\n\n /// - Returns `InvalidRequest` if the session has already been started.\n\n pub fn start_session(&mut self) -> Result<(), GGRSError> {\n\n // if we are not in the initialization state, we already started the session at some point\n\n if self.state != SessionState::Initializing {\n\n return Err(GGRSError::InvalidRequest {\n\n info: \"Session already started.\".to_owned(),\n\n });\n\n }\n\n\n\n // start the synchronisation\n\n self.state = SessionState::Synchronizing;\n\n self.host.synchronize();\n\n\n\n Ok(())\n\n }\n", "file_path": "src/sessions/p2p_spectator_session.rs", "rank": 89, "score": 28842.86718095781 }, { "content": " });\n\n }\n\n // forward to user\n\n Event::NetworkResumed => {\n\n self.event_queue\n\n .push_back(GGRSEvent::NetworkResumed { player_handle });\n\n }\n\n // synced with the host, then forward to user\n\n Event::Synchronized => {\n\n self.state = SessionState::Running;\n\n self.event_queue\n\n .push_back(GGRSEvent::Synchronized { player_handle });\n\n }\n\n // disconnect the player, then forward to user\n\n Event::Disconnected => {\n\n self.event_queue\n\n .push_back(GGRSEvent::Disconnected { player_handle });\n\n }\n\n // add the input and all associated information\n\n Event::Input(input) => {\n", "file_path": "src/sessions/p2p_spectator_session.rs", "rank": 90, "score": 28842.27956694529 }, { "content": " /// Should be called periodically by your application to give GGRS a chance to do internal work like packet transmissions.\n\n pub fn poll_remote_clients(&mut self) {\n\n // Get all udp packets and distribute them to associated endpoints.\n\n // The endpoints will handle their packets, which will trigger both events and UPD replies.\n\n for (from, msg) in &self.socket.receive_all_messages() {\n\n if self.host.is_handling_message(from) {\n\n self.host.handle_message(msg);\n\n }\n\n }\n\n\n\n // run host poll and get events. This will trigger additional UDP packets to be sent.\n\n let mut events = VecDeque::new();\n\n for event in self.host.poll(&self.host_connect_status) {\n\n events.push_back(event);\n\n }\n\n\n\n // handle all events locally\n\n for event in events.drain(..) {\n\n self.handle_event(event);\n\n }\n", "file_path": "src/sessions/p2p_spectator_session.rs", "rank": 91, "score": 28841.064126972287 }, { "content": "\n\n /// You should call this to notify GGRS that you are ready to advance your gamestate by a single frame.\n\n /// Returns an order-sensitive `Vec<GGRSRequest>`. You should fulfill all requests in the exact order they are provided.\n\n /// Failure to do so will cause panics later.\n\n /// # Errors\n\n /// - Returns `NotSynchronized` if the session is not yet ready to accept input.\n\n /// In this case, you either need to start the session or wait for synchronization between clients.\n\n pub fn advance_frame(&mut self) -> Result<Vec<GGRSRequest>, GGRSError> {\n\n // receive info from host, trigger events and send messages\n\n self.poll_remote_clients();\n\n\n\n if self.state != SessionState::Running {\n\n return Err(GGRSError::NotSynchronized);\n\n }\n\n\n\n let mut requests = Vec::new();\n\n\n\n let frames_to_advance = if self.frames_behind_host() > self.max_frames_behind {\n\n self.catchup_speed\n\n } else {\n", "file_path": "src/sessions/p2p_spectator_session.rs", "rank": 92, "score": 28840.215754489738 }, { "content": " NORMAL_SPEED\n\n };\n\n\n\n for _ in 0..frames_to_advance {\n\n // get inputs for the next frame\n\n let frame_to_grab = self.current_frame + 1;\n\n let synced_inputs = self.inputs_at_frame(frame_to_grab)?;\n\n\n\n requests.push(GGRSRequest::AdvanceFrame {\n\n inputs: synced_inputs,\n\n });\n\n\n\n // advance the frame, but only if grabbing the inputs succeeded\n\n self.current_frame += 1;\n\n }\n\n\n\n Ok(requests)\n\n }\n\n\n\n /// Receive UDP packages, distribute them to corresponding UDP endpoints, handle all occurring events and send all outgoing UDP packages.\n", "file_path": "src/sessions/p2p_spectator_session.rs", "rank": 93, "score": 28836.254972746807 }, { "content": "// Set darkmode\n\ndocument.getElementById('mode').addEventListener('click', () => {\n\n\n\n document.body.classList.toggle('dark');\n\n localStorage.setItem('theme', document.body.classList.contains('dark') ? 'dark' : 'light');\n\n \n\n});\n\n \n\nif (localStorage.getItem('theme') === 'dark') {\n\n \n\n document.body.classList.add('dark');\n\n \n\n}\n", "file_path": "docs/js/main.js", "rank": 94, "score": 20670.76654233985 }, { "content": "extern crate freetype as ft;\n\n\n\nuse std::time::Instant;\n\n\n\nuse glutin_window::GlutinWindow as Window;\n\nuse opengl_graphics::{GlGraphics, OpenGL};\n\nuse piston::event_loop::{EventSettings, Events};\n\nuse piston::input::{RenderEvent, UpdateEvent};\n\nuse piston::window::WindowSettings;\n\nuse piston::EventLoop;\n\nuse structopt::StructOpt;\n\n\n\nconst FPS: u64 = 60;\n\nconst INPUT_SIZE: usize = std::mem::size_of::<u8>();\n\n\n\nconst WINDOW_HEIGHT: u32 = 800;\n\nconst WINDOW_WIDTH: u32 = 600;\n\n\n\nmod rapier_game;\n\n\n\n#[derive(StructOpt)]\n", "file_path": "examples/rapier/rapier_synctest.rs", "rank": 95, "score": 44.7499321835247 }, { "content": " .exit_on_esc(true)\n\n .build()\n\n .unwrap();\n\n\n\n // Create a new box game\n\n let mut game = rapier_game::RapierGame::new(num_players, opt.num_bodies);\n\n let mut gl = GlGraphics::new(opengl);\n\n\n\n // event settings\n\n let mut event_settings = EventSettings::new();\n\n event_settings.set_ups(FPS);\n\n event_settings.set_max_fps(FPS);\n\n let mut events = Events::new(event_settings);\n\n\n\n // event loop\n\n while let Some(e) = events.next(&mut window) {\n\n // render update\n\n if let Some(args) = e.render_args() {\n\n game.render(&mut gl, &args);\n\n }\n", "file_path": "examples/rapier/rapier_synctest.rs", "rank": 96, "score": 28.60200516462731 }, { "content": " /// notes which inputs have already been sent to the spectators\n\n next_spectator_frame: Frame,\n\n /// The soonest frame on which the session can send a `GGRSEvent::WaitRecommendation` again.\n\n next_recommended_sleep: Frame,\n\n\n\n ///Contains all events to be forwarded to the user.\n\n event_queue: VecDeque<GGRSEvent>,\n\n}\n\n\n\nimpl P2PSession {\n\n pub(crate) fn new(\n\n num_players: u32,\n\n input_size: usize,\n\n port: u16,\n\n ) -> Result<Self, std::io::Error> {\n\n // local connection status\n\n let mut local_connect_status = Vec::new();\n\n for _ in 0..num_players {\n\n local_connect_status.push(ConnectionStatus::default());\n\n }\n", "file_path": "src/sessions/p2p_session.rs", "rank": 97, "score": 21.72857032465569 }, { "content": " Player::Spectator(endpoint) => Some(endpoint),\n\n Player::Remote(_) | Player::Local => None,\n\n }\n\n }\n\n\n\n fn spectator_as_endpoint_mut(&mut self) -> Option<&mut UdpProtocol> {\n\n match self {\n\n Player::Spectator(endpoint) => Some(endpoint),\n\n Player::Remote(_) | Player::Local => None,\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Copy, Clone, PartialEq, Eq)]\n\npub(crate) enum Event {\n\n /// The session is currently synchronizing with the remote client. It will continue until `count` reaches `total`.\n\n Synchronizing { total: u32, count: u32 },\n\n /// The session is now synchronized with the remote client.\n\n Synchronized,\n\n /// The session has received an input from the remote client. This event will not be forwarded to the user.\n", "file_path": "src/sessions/p2p_session.rs", "rank": 98, "score": 18.815332578183824 }, { "content": "use std::collections::HashMap;\n\n\n\nuse crate::error::GGRSError;\n\nuse crate::frame_info::GameInput;\n\nuse crate::network::udp_msg::ConnectionStatus;\n\nuse crate::sync_layer::SyncLayer;\n\nuse crate::{Frame, GGRSRequest, PlayerHandle};\n\n\n\n/// During a `SyncTestSession`, GGRS will simulate a rollback every frame and resimulate the last n states, where n is the given check distance.\n\n/// The resimulated checksums will be compared with the original checksums and report if there was a mismatch.\n\n#[derive(Debug)]\n\npub struct SyncTestSession {\n\n num_players: u32,\n\n input_size: usize,\n\n check_distance: u32,\n\n sync_layer: SyncLayer,\n\n dummy_connect_status: Vec<ConnectionStatus>,\n\n checksum_history: HashMap<Frame, u64>,\n\n}\n\n\n", "file_path": "src/sessions/sync_test_session.rs", "rank": 99, "score": 18.32024298272323 } ]
Rust
src/tcp.rs
tearust/natsclient
d41211cb35e0a4fcec5fa0e45a07fbce494655e4
use crate::protocol::{ProtocolHandler, ProtocolMessage, ServerInfo}; use crate::ClientOptions; use crate::Result; use crossbeam_channel::{Receiver, Sender}; use nats_types::DeliveredMessage; use std::io::Read; use std::sync::{Arc, RwLock}; use std::thread; use std::{ io::{BufRead, BufReader, Write}, net::TcpStream, str::FromStr, }; #[derive(Clone)] pub(crate) struct TcpClient { connect_urls: Arc<RwLock<Vec<ServerInfo>>>, delivery_sender: Sender<DeliveredMessage>, write_sender: Sender<Vec<u8>>, write_receiver: Receiver<Vec<u8>>, opts: ClientOptions, connlatch: Sender<bool>, } impl TcpClient { pub fn new( connect_urls: Vec<ServerInfo>, delivery_sender: Sender<DeliveredMessage>, write_sender: Sender<Vec<u8>>, write_receiver: Receiver<Vec<u8>>, opts: ClientOptions, connlatch: Sender<bool>, ) -> TcpClient { TcpClient { connect_urls: Arc::new(RwLock::new(connect_urls)), delivery_sender, write_sender, write_receiver, opts, connlatch, } } pub fn connect(&self) -> Result<()> { let stream_reader = { let urls = self.connect_urls.read().unwrap(); Self::connect_to_host(&urls)? }; let mut stream_writer = stream_reader.try_clone()?; let mut buf_reader = BufReader::new(stream_reader); let ph = ProtocolHandler::new(self.opts.clone(), self.delivery_sender.clone()); let write_sender = self.write_sender.clone(); let write_receiver = self.write_receiver.clone(); let connlatch = self.connlatch.clone(); thread::spawn(move || { let mut line = String::new(); loop { match buf_reader.read_line(&mut line) { Ok(line_len) if line_len > 0 => { let pm = if line.starts_with("MSG") { let msgheader = nats_types::parse_msg_header(&line).unwrap(); let mut buffer = vec![0; msgheader.message_len]; buf_reader.read_exact(&mut buffer).unwrap(); buf_reader.read_line(&mut line).unwrap(); ProtocolMessage::Message(DeliveredMessage { reply_to: msgheader.reply_to, payload_size: msgheader.message_len, payload: buffer, subject: msgheader.subject, subscription_id: msgheader.sid, }) } else { ProtocolMessage::from_str(&line).unwrap() }; line.clear(); ph.handle_protocol_message(&pm, &write_sender).unwrap(); } Ok(_) => {} Err(e) => { error!("Error receiving data: {}", e); } } } }); thread::spawn(move || { loop { let vec = write_receiver.recv().unwrap(); match stream_writer.write_all(&vec) { Ok(_) => { trace!("SEND {} bytes", vec.len()); if starts_with(&vec, b"CONNECT") { connlatch.send(true).unwrap(); } } Err(e) => error!("Failed to write buffer: {}", e), }; } }); Ok(()) } fn connect_to_host(servers: &[ServerInfo]) -> Result<TcpStream> { for si in servers { debug!("Attempting to connect to {}:{}", si.host, si.port); let stream = TcpStream::connect((si.host.as_ref(), si.port)); match stream { Ok(s) => return Ok(s), Err(e) => { error!("Failed to establish TCP connection: {}", e); continue; } } } Err(err!(IOError, "Failed to establish TCP connection")) } } fn starts_with(haystack: &[u8], needle: &[u8]) -> bool { let pos = haystack .windows(needle.len()) .position(|window| window == needle); if let Some(p) = pos { p == 0 } else { false } }
use crate::protocol::{ProtocolHandler, ProtocolMessage, ServerInfo}; use crate::ClientOptions; use crate::Result; use crossbeam_channel::{Receiver, Sender}; use nats_types::DeliveredMessage; use std::io::Read; use std::sync::{Arc, RwLock}; use std::thread; use std::{ io::{BufRead, BufReader, Write}, net::TcpStream, str::FromStr, }; #[derive(Clone)] pub(crate) struct TcpClient { connect_urls: Arc<RwLock<Vec<ServerInfo>>>, delivery_sender: Sender<DeliveredMessage>, write_sender: Sender<Vec<u8>>, write_receiver: Receiver<Vec<u8>>, opts: ClientOptions, connlatch: Sender<bool>, } impl TcpClient { pub fn new( connect_urls: Vec<ServerInfo>, delivery_sender: Sender<DeliveredMessage>, write_sender: Sender<Vec<u8>>, write_receiver: Receiver<Vec<u8>>, opts: ClientOptions, connlatch: Sender<bool>, ) -> TcpClient { TcpClient { connect_urls: Arc::new(RwLock::new(connect_urls)), delivery_sender, write_sender, write_receiver, opts, connlatch, } } pub fn connect(&self) -> Result<()> { let stream_reader = { let urls = self.connect_urls.read().unwrap(); Self::connect_to_host(&urls)? }; let mut stream_writer = stream_reader.try_clone()?; let mut buf_reader = BufReader::new(stream_reader); let ph = ProtocolHandler::new(self.opts.clone(), self.delivery_sender.clone()); let write_sender = self.write_sender.clone(); let write_receiver = self.write_receiver.clone();
fn connect_to_host(servers: &[ServerInfo]) -> Result<TcpStream> { for si in servers { debug!("Attempting to connect to {}:{}", si.host, si.port); let stream = TcpStream::connect((si.host.as_ref(), si.port)); match stream { Ok(s) => return Ok(s), Err(e) => { error!("Failed to establish TCP connection: {}", e); continue; } } } Err(err!(IOError, "Failed to establish TCP connection")) } } fn starts_with(haystack: &[u8], needle: &[u8]) -> bool { let pos = haystack .windows(needle.len()) .position(|window| window == needle); if let Some(p) = pos { p == 0 } else { false } }
let connlatch = self.connlatch.clone(); thread::spawn(move || { let mut line = String::new(); loop { match buf_reader.read_line(&mut line) { Ok(line_len) if line_len > 0 => { let pm = if line.starts_with("MSG") { let msgheader = nats_types::parse_msg_header(&line).unwrap(); let mut buffer = vec![0; msgheader.message_len]; buf_reader.read_exact(&mut buffer).unwrap(); buf_reader.read_line(&mut line).unwrap(); ProtocolMessage::Message(DeliveredMessage { reply_to: msgheader.reply_to, payload_size: msgheader.message_len, payload: buffer, subject: msgheader.subject, subscription_id: msgheader.sid, }) } else { ProtocolMessage::from_str(&line).unwrap() }; line.clear(); ph.handle_protocol_message(&pm, &write_sender).unwrap(); } Ok(_) => {} Err(e) => { error!("Error receiving data: {}", e); } } } }); thread::spawn(move || { loop { let vec = write_receiver.recv().unwrap(); match stream_writer.write_all(&vec) { Ok(_) => { trace!("SEND {} bytes", vec.len()); if starts_with(&vec, b"CONNECT") { connlatch.send(true).unwrap(); } } Err(e) => error!("Failed to write buffer: {}", e), }; } }); Ok(()) }
function_block-function_prefix_line
[ { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n pretty_env_logger::init();\n\n\n\n info!(\"Starting market service...\");\n\n let jwt = \"eyJ0eXAiOiJqd3QiLCJhbGciOiJlZDI1NTE5In0.eyJqdGkiOiJBNDNRN1NLT0tCT0tYUDc1WVhMWjcyVDZKNDVIVzJKR0ZRWUJFQ1I2VE1FWEZFN1RKSjVBIiwiaWF0IjoxNTU0ODk2OTQ1LCJpc3MiOiJBQU9KV0RRV1pPQkNFTUVWWUQ2VEhPTUVCSExYS0NBMzZGU0dJVUxINFBWRU1ORDVUMjNEUEM0VSIsIm5hbWUiOiJiZW1pc191c2VyIiwic3ViIjoiVUNMRkYzTFBLTTQ3WTZGNkQ3UExMVzU2MzZKU1JDUFhFUUFDTEdVWTZNT01BS1lXMkk2VUFFRUQiLCJ0eXBlIjoidXNlciIsIm5hdHMiOnsicHViIjp7fSwic3ViIjp7fX19.3aH-hCSTS8z8rg2km7Q_aat5VpwT-t9swSmh3bnVBY_9IV9wE9mjSOUgHE2sq-7pR4HTCpYa0RPrNcgNfaVuBg\";\n\n let seed = \"SUACGBWJZLVP4CHF7WTY65KT3I4QHAQ5DEZMFAJKTIUIRQPXE6DVMFQEUU\";\n\n let opts = ClientOptions::builder()\n\n .cluster_uris(vec![\"nats://localhost:4222\".into()])\n\n .authentication(AuthenticationStyle::UserCredentials(\n\n jwt.to_string(),\n\n seed.to_string(),\n\n ))\n\n .build()?;\n\n\n\n let client = Client::from_options(opts)?;\n\n client.connect()?;\n\n\n\n let c = client.clone();\n\n client.subscribe(\"symbolquery\", move |msg| {\n\n info!(\"Received stock symbol query: {}\", msg);\n", "file_path": "examples/marketservice.rs", "rank": 0, "score": 72098.94332190647 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n pretty_env_logger::init();\n\n\n\n let jwt = \"eyJ0eXAiOiJqd3QiLCJhbGciOiJlZDI1NTE5In0.eyJqdGkiOiJBNDNRN1NLT0tCT0tYUDc1WVhMWjcyVDZKNDVIVzJKR0ZRWUJFQ1I2VE1FWEZFN1RKSjVBIiwiaWF0IjoxNTU0ODk2OTQ1LCJpc3MiOiJBQU9KV0RRV1pPQkNFTUVWWUQ2VEhPTUVCSExYS0NBMzZGU0dJVUxINFBWRU1ORDVUMjNEUEM0VSIsIm5hbWUiOiJiZW1pc191c2VyIiwic3ViIjoiVUNMRkYzTFBLTTQ3WTZGNkQ3UExMVzU2MzZKU1JDUFhFUUFDTEdVWTZNT01BS1lXMkk2VUFFRUQiLCJ0eXBlIjoidXNlciIsIm5hdHMiOnsicHViIjp7fSwic3ViIjp7fX19.3aH-hCSTS8z8rg2km7Q_aat5VpwT-t9swSmh3bnVBY_9IV9wE9mjSOUgHE2sq-7pR4HTCpYa0RPrNcgNfaVuBg\";\n\n let seed = \"SUACGBWJZLVP4CHF7WTY65KT3I4QHAQ5DEZMFAJKTIUIRQPXE6DVMFQEUU\";\n\n let opts = ClientOptions::builder()\n\n .cluster_uris(vec![\"nats://localhost:4222\".into()])\n\n .authentication(AuthenticationStyle::UserCredentials(\n\n jwt.to_string(),\n\n seed.to_string(),\n\n ))\n\n .build()?;\n\n\n\n let client = Client::from_options(opts)?;\n\n client.connect()?;\n\n\n\n client.subscribe(\"ticker\", move |msg| {\n\n let symbol: SymbolReply = serde_json::from_slice(&msg.payload).unwrap();\n\n info!(\"Received stock ticker: {:?}\", symbol);\n\n Ok(())\n", "file_path": "examples/marketclient.rs", "rank": 1, "score": 72098.94332190647 }, { "content": "type MessageHandler = Arc<dyn Fn(&Message) -> Result<()> + Sync + Send>;\n\n\n\nconst NUID_LENGTH: usize = 22;\n\nconst INBOX_PREFIX: &'static str = \"_INBOX.\";\n\n\n\npub(crate) struct Subscription {\n\n id: usize,\n\n subject: String,\n\n handler: MessageHandler,\n\n}\n\n\n\n#[derive(Clone)]\n\npub(crate) struct SubscriptionManager {\n\n client_id: String,\n\n subs: Arc<RwLock<HashMap<usize, Subscription>>>,\n\n inboxes: Arc<RwLock<HashMap<String, Sender<Message>>>>,\n\n sender: channel::Sender<Vec<u8>>,\n\n current_sid: Arc<AtomicUsize>,\n\n}\n\n\n", "file_path": "src/subs.rs", "rank": 2, "score": 43314.217140154884 }, { "content": "type MessageHandler = Arc<dyn Fn(&Message) -> Result<()> + Sync + Send>;\n\n\n\n/// Options to configure the NATS client. A builder is available so a fluent\n\n/// API can be used to set options\n\n#[derive(Debug, Clone, Builder, PartialEq)]\n\n#[builder(setter(into), default)]\n\npub struct ClientOptions {\n\n cluster_uris: Vec<String>,\n\n authentication: AuthenticationStyle,\n\n connect_timeout: Duration,\n\n reconnect_attempts: u8,\n\n}\n\n\n\nimpl Default for ClientOptions {\n\n fn default() -> Self {\n\n ClientOptions {\n\n cluster_uris: Vec::new(),\n\n authentication: AuthenticationStyle::Anonymous,\n\n connect_timeout: Duration::from_millis(100),\n\n reconnect_attempts: 3,\n", "file_path": "src/lib.rs", "rank": 3, "score": 43314.217140154884 }, { "content": "fn new_inbox(client_id: &str) -> String {\n\n format!(\"{}{}.{}\", INBOX_PREFIX, client_id, nuid::next()) // _INBOX.(nuid).(nuid)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::SubscriptionManager;\n\n use crate::{protocol::ProtocolMessage, Message};\n\n use crossbeam_channel as channel;\n\n use nats_types::{SubscribeMessage, UnsubscribeMessage};\n\n use std::sync::Arc;\n\n\n\n #[test]\n\n fn add_subscription_sends_sub_message() {\n\n let (sender, r) = channel::unbounded();\n\n\n\n let sm = SubscriptionManager::new(\"test\".to_string(), sender);\n\n\n\n sm.add_sub(\"test\", None, Arc::new(msg_handler)).unwrap();\n\n let sub_message = r.recv().unwrap();\n", "file_path": "src/subs.rs", "rank": 4, "score": 41335.42270964245 }, { "content": "#[derive(Serialize, Deserialize, Debug)]\n\nstruct SymbolReply {\n\n symbol: String,\n\n price: u64,\n\n market_cap: u64,\n\n world_domination: bool,\n\n}\n", "file_path": "examples/marketservice.rs", "rank": 5, "score": 33966.39203726125 }, { "content": "#[derive(Serialize, Deserialize, Debug)]\n\nstruct SymbolQuery {\n\n symbol: String,\n\n}\n\n\n", "file_path": "examples/marketservice.rs", "rank": 6, "score": 33966.39203726125 }, { "content": "#[derive(Serialize, Deserialize, Debug)]\n\nstruct SymbolQuery {\n\n symbol: String,\n\n}\n\n\n", "file_path": "examples/marketclient.rs", "rank": 7, "score": 33966.39203726125 }, { "content": "#[derive(Serialize, Deserialize, Debug)]\n\nstruct SymbolReply {\n\n symbol: String,\n\n price: u64,\n\n market_cap: u64,\n\n world_domination: bool,\n\n}\n", "file_path": "examples/marketclient.rs", "rank": 8, "score": 33966.39203726125 }, { "content": "fn generate_symbol_reply(payload: &Vec<u8>) -> Vec<u8> {\n\n let query: SymbolQuery = serde_json::from_slice(payload.as_slice()).unwrap();\n\n\n\n let reply = SymbolReply {\n\n symbol: query.symbol,\n\n price: 10000,\n\n market_cap: 200000,\n\n world_domination: true,\n\n };\n\n\n\n serde_json::to_vec(&reply).unwrap()\n\n}\n\n\n", "file_path": "examples/marketservice.rs", "rank": 10, "score": 22292.685942369437 }, { "content": "impl ProtocolHandler {\n\n pub fn new(\n\n opts: ClientOptions,\n\n delivery_sender: channel::Sender<DeliveredMessage>,\n\n ) -> ProtocolHandler {\n\n ProtocolHandler {\n\n opts,\n\n delivery_sender,\n\n }\n\n }\n\n\n\n pub fn handle_protocol_message(\n\n &self,\n\n pm: &ProtocolMessage,\n\n sender: &channel::Sender<Vec<u8>>,\n\n ) -> Result<()> {\n\n match pm {\n\n ProtocolMessage::Info(server_info) => {\n\n if let Some(urls) = &server_info.connect_urls {\n\n let _server_urls = parse_server_uris(&urls)?;\n", "file_path": "src/protocol.rs", "rank": 13, "score": 14.52461314469602 }, { "content": "\n\n Ok(Client {\n\n inbox_wildcard: format!(\"_INBOX.{}.*\", &id),\n\n id: id.clone(),\n\n opts,\n\n servers: protocol::parse_server_uris(&uris)?,\n\n submgr: SubscriptionManager::new(id, ws.clone()),\n\n server_index: 0,\n\n delivery_sender: ds,\n\n delivery_receiver: dr,\n\n write_sender: ws,\n\n write_receiver: wr,\n\n })\n\n }\n\n\n\n /// Creates a new client using the default options and the given URL. A client created this way will\n\n /// attempt to establish an anonymous connection with the given NATS server\n\n pub fn new(url: &str) -> Result<Client> {\n\n let opts = ClientOptions::builder()\n\n .cluster_uris(vec![url.into()])\n", "file_path": "src/lib.rs", "rank": 14, "score": 14.460461094884604 }, { "content": " server_index: usize,\n\n submgr: SubscriptionManager,\n\n delivery_sender: channel::Sender<DeliveredMessage>,\n\n delivery_receiver: channel::Receiver<DeliveredMessage>,\n\n write_sender: channel::Sender<Vec<u8>>,\n\n write_receiver: channel::Receiver<Vec<u8>>,\n\n}\n\n\n\nimpl Client {\n\n /// Creates a new client from a set of options, which can be created directly\n\n /// or through a `ClientOptionsBuilder`\n\n pub fn from_options(opts: ClientOptions) -> Result<Client> {\n\n let uris = opts.cluster_uris.clone();\n\n let (ds, dr) = channel::unbounded();\n\n let (ws, wr) = channel::unbounded();\n\n\n\n let mut nuid = nuid::NUID::new();\n\n nuid.randomize_prefix();\n\n\n\n let id = nuid.next();\n", "file_path": "src/lib.rs", "rank": 16, "score": 13.411756280699628 }, { "content": "impl SubscriptionManager {\n\n pub fn new(client_id: String, sender: channel::Sender<Vec<u8>>) -> SubscriptionManager {\n\n SubscriptionManager {\n\n client_id,\n\n subs: Arc::new(RwLock::new(HashMap::new())),\n\n sender,\n\n current_sid: Arc::new(AtomicUsize::new(1)),\n\n inboxes: Arc::new(RwLock::new(HashMap::new())),\n\n }\n\n }\n\n\n\n pub fn add_new_inbox_sub(&self, sender: Sender<Message>) -> Result<String> {\n\n let subject = new_inbox(&self.client_id);\n\n let mut inboxes = self.inboxes.write().unwrap();\n\n inboxes.insert(subject.clone(), sender);\n\n\n\n Ok(subject)\n\n }\n\n\n\n pub fn sender_for_inbox(&self, inbox: &str) -> Option<Sender<Message>> {\n", "file_path": "src/subs.rs", "rank": 17, "score": 13.032745202457688 }, { "content": " fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}\", self.as_str())\n\n }\n\n}\n\n\n\nimpl Error {\n\n /// Creates a new natsclient error wrapper\n\n pub fn new(kind: ErrorKind, description: Option<&str>) -> Self {\n\n Error {\n\n kind,\n\n description: description.map(|desc| desc.to_string()),\n\n }\n\n }\n\n\n\n /// An accessor exposing the error kind enum. Crate consumers should have little to no\n\n /// need to access this directly and it's mostly used to assert that internal functions\n\n /// are creating appropriate error wrappers.\n\n pub fn kind(&self) -> ErrorKind {\n\n self.kind\n\n }\n", "file_path": "src/error.rs", "rank": 18, "score": 11.587454707742962 }, { "content": "pub(crate) fn parse_nats_uri(uri: &str) -> Result<Url> {\n\n let url_str = if uri.to_owned().contains(\"://\") {\n\n uri.to_owned()\n\n } else {\n\n let mut url_str = \"nats://\".to_owned();\n\n url_str.push_str(uri);\n\n url_str\n\n };\n\n let url = Url::parse(&url_str)?;\n\n if url.scheme() != URI_SCHEME {\n\n Err(err!(UriParseFailure, \"Failed to parse NATS URI\"))\n\n } else {\n\n Ok(url)\n\n }\n\n}\n\n\n\npub(crate) fn parse_server_uris(uris: &[String]) -> Result<Vec<ServerInfo>> {\n\n let mut servers = Vec::new();\n\n\n\n for uri in uris {\n", "file_path": "src/protocol.rs", "rank": 19, "score": 10.513354102775894 }, { "content": " let inboxes = self.inboxes.read().unwrap();\n\n if inboxes.contains_key(inbox) {\n\n let sender = &inboxes[inbox];\n\n Some(sender.clone())\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n pub fn remove_inbox(&self, inbox: &str) {\n\n let mut inboxes = self.inboxes.write().unwrap();\n\n inboxes.remove(inbox);\n\n }\n\n\n\n pub fn add_sub(\n\n &self,\n\n subject: &str,\n\n queue_group: Option<&str>,\n\n handler: MessageHandler,\n\n ) -> Result<usize> {\n", "file_path": "src/subs.rs", "rank": 20, "score": 9.600953275786182 }, { "content": " where\n\n F: Fn(&Message) -> Result<()> + Sync + Send,\n\n F: 'static,\n\n {\n\n self.raw_subscribe(subject, Some(queue_group), Arc::new(handler))\n\n }\n\n\n\n /// Perform a synchronous request by publishing a message on the given subject and waiting\n\n /// an expiration period indicated by the `timeout` parameter. If the timeout expires before\n\n /// a reply arrives on the inbox subject, an `Err` result will be returned.\n\n pub fn request<T>(\n\n &self,\n\n subject: T,\n\n payload: &[u8],\n\n timeout: std::time::Duration,\n\n ) -> Result<Message>\n\n where\n\n T: AsRef<str>,\n\n {\n\n let (sender, receiver) = channel::bounded(1);\n", "file_path": "src/lib.rs", "rank": 21, "score": 9.581005713913093 }, { "content": "use crate::{Message, Result};\n\nuse crossbeam_channel as channel;\n\nuse crossbeam_channel::Sender;\n\nuse nats_types::{SubscribeMessage, UnsubscribeMessage};\n\nuse std::collections::HashMap;\n\nuse std::sync::atomic::{AtomicUsize, Ordering};\n\nuse std::sync::{Arc, RwLock};\n\n\n", "file_path": "src/subs.rs", "rank": 22, "score": 9.498449337535913 }, { "content": " }\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n match self.description {\n\n Some(ref desc) => write!(f, \"{}: {}\", self.description(), desc),\n\n None => write!(f, \"{}\", self.description()),\n\n }\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 23, "score": 9.223755767309783 }, { "content": " let mut subs = self.subs.write().unwrap();\n\n let sid = self.next_sid();\n\n let vec = SubscribeMessage::as_vec(subject, queue_group, sid)?;\n\n self.sender.send(vec)?;\n\n subs.insert(\n\n sid,\n\n Subscription {\n\n id: sid,\n\n subject: subject.to_string(),\n\n handler: handler,\n\n },\n\n );\n\n Ok(sid)\n\n }\n\n\n\n pub fn unsubscribe(&self, sid: usize, max_msgs: Option<usize>) -> Result<()> {\n\n let mut subs = self.subs.write().unwrap();\n\n let vec = UnsubscribeMessage::as_vec(sid, max_msgs)?;\n\n self.sender.send(vec)?;\n\n subs.remove(&sid);\n", "file_path": "src/subs.rs", "rank": 24, "score": 9.08503184423998 }, { "content": "#![allow(dead_code)]\n\n#[macro_use]\n\nextern crate derive_builder;\n\n\n\n#[macro_use]\n\nextern crate log;\n\n\n\nuse crate::protocol::ServerInfo;\n\nuse crate::subs::SubscriptionManager;\n\nuse crate::tcp::TcpClient;\n\nuse crossbeam_channel::{self as channel, bounded};\n\nuse nats_types::{DeliveredMessage, PublishMessage};\n\nuse regex::Captures;\n\nuse regex::Regex;\n\nuse std::{sync::Arc, thread, time::Duration};\n\npub type Result<T> = std::result::Result<T, crate::error::Error>;\n\n\n\npub use nats_types::DeliveredMessage as Message;\n\n\n\n/// Indicates the type of client authentication used by the NATS client\n", "file_path": "src/lib.rs", "rank": 25, "score": 9.063186834848795 }, { "content": "use crate::ClientOptions;\n\nuse crate::{AuthenticationStyle, Result};\n\nuse crossbeam_channel as channel;\n\nuse nats_types::DeliveredMessage;\n\nuse nkeys::KeyPair;\n\nuse rand::{self, seq::SliceRandom, thread_rng};\n\nuse url::Url;\n\n\n\npub use nats_types::{ConnectionInformation, ProtocolMessage};\n\n\n\n#[derive(Debug, Clone)]\n\npub(crate) struct ServerInfo {\n\n pub host: String,\n\n pub port: u16,\n\n}\n\n\n\nconst URI_SCHEME: &str = \"nats\";\n\nconst DEFAULT_NAME: &str = \"#natsclientrust\";\n\nconst DEFAULT_PORT: u16 = 4222;\n\n\n", "file_path": "src/protocol.rs", "rank": 26, "score": 8.989723191591299 }, { "content": " payload_size: payload.len(),\n\n subject: subject.to_string(),\n\n reply_to,\n\n }); */\n\n let vec = PublishMessage::as_vec(subject, reply_to, payload)?;\n\n match self.write_sender.send(vec) {\n\n Ok(_) => Ok(()),\n\n Err(e) => Err(err!(ConcurrencyFailure, \"Concurrency failure: {}\", e)),\n\n }\n\n }\n\n\n\n /// Connect a client to the NATS server(s) indicated by previously supplied configuration\n\n pub fn connect(&self) -> Result<()> {\n\n let (s, r) = bounded(1); // Create a thread block until we send the CONNECT preamble\n\n\n\n let tcp_client = TcpClient::new(\n\n self.servers.clone(),\n\n self.delivery_sender.clone(),\n\n self.write_sender.clone(),\n\n self.write_receiver.clone(),\n", "file_path": "src/lib.rs", "rank": 27, "score": 8.832242522416028 }, { "content": " .build()\n\n .unwrap();\n\n Self::from_options(opts)\n\n }\n\n\n\n /// Creates a subscription to a new subject. The subject can be a specfic subject\n\n /// or a wildcard. The handler supplied will be given a reference to delivered messages\n\n /// as they arrive, and can return a Result to indicate processing failure\n\n pub fn subscribe<F>(&self, subject: &str, handler: F) -> Result<()>\n\n where\n\n F: Fn(&Message) -> Result<()> + Sync + Send,\n\n F: 'static,\n\n {\n\n self.raw_subscribe(subject, None, Arc::new(handler))\n\n }\n\n\n\n /// Creates a subscription for a queue group, allowing message delivery to be spread\n\n /// round-robin style across all clients expressing interest in that subject. For more information on how queue groups work,\n\n /// consult the NATS documentation.\n\n pub fn queue_subscribe<F>(&self, subject: &str, queue_group: &str, handler: F) -> Result<()>\n", "file_path": "src/lib.rs", "rank": 28, "score": 8.275012937065114 }, { "content": "#[derive(Clone, Debug, PartialEq)]\n\npub enum AuthenticationStyle {\n\n /// JSON Web Token (JWT)-based authentication using a JWT and a seed (private) key\n\n UserCredentials(String, String),\n\n /// Single token based authentication\n\n Token(String),\n\n /// Basic authentication with username and password\n\n Basic { username: String, password: String },\n\n /// Anonymous (unauthenticated)\n\n Anonymous,\n\n}\n\n\n\nimpl AuthenticationStyle {\n\n pub fn from_credsfile(file: &str) -> Result<AuthenticationStyle> {\n\n let raw = std::fs::read_to_string(file)?;\n\n\n\n Self::from_credsfile_str(&raw)\n\n }\n\n\n\n fn from_credsfile_str(raw: &str) -> Result<AuthenticationStyle> {\n", "file_path": "src/lib.rs", "rank": 30, "score": 7.9125430926631015 }, { "content": "//! # Error wrappers and boilerplate\n\n//!\n\n//! This module provides some basic boilerplate for errors. As a consumer of this\n\n//! library, you should expect that all public functions return a `Result` type\n\n//! using this local `Error`, which implements the standard Error trait.\n\n//! As a general rule, errors that come from dependent crates are wrapped by\n\n//! this crate's error type.\n\n#![allow(unused_macros)]\n\n\n\nuse core::fmt;\n\nuse nats_types::DeliveredMessage;\n\n\n\nuse std::{\n\n error::Error as StdError,\n\n string::{String, ToString},\n\n};\n\n\n\n/// Provides an error type specific to the natsclient library\n\n#[derive(Debug)]\n\npub struct Error {\n", "file_path": "src/error.rs", "rank": 31, "score": 7.685000625314395 }, { "content": " //TODO: dispatch these new URLs to the client for mutation\n\n }\n\n\n\n let conn = generate_connect_command(pm, &self.opts.authentication); // TODO: once accepting URL updates, only send connect once\n\n sender.send(conn.to_string().into_bytes())?;\n\n }\n\n ProtocolMessage::Ping => {\n\n sender.send(ProtocolMessage::Pong.to_string().into_bytes())?;\n\n }\n\n ProtocolMessage::Message(msg) => {\n\n self.delivery_sender.send(msg.clone())?;\n\n }\n\n _ => {}\n\n };\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/protocol.rs", "rank": 32, "score": 7.616830273321924 }, { "content": " let inbox = self.submgr.add_new_inbox_sub(sender)?;\n\n self.publish(subject.as_ref(), payload, Some(&inbox))?;\n\n match receiver.recv_timeout(timeout) {\n\n Ok(msg) => Ok(msg.clone()),\n\n Err(e) => Err(err!(Timeout, \"Request timeout expired: {}\", e)),\n\n }\n\n }\n\n\n\n /// Unsubscribe from a subject or wildcard\n\n pub fn unsubscribe(&self, subject: impl AsRef<str>) -> Result<()> {\n\n let s = subject.as_ref();\n\n self.submgr.unsubscribe_by_subject(s)\n\n }\n\n\n\n /// Asynchronously publish a message. This is a fire-and-forget style message and an `Ok`\n\n /// result here does not imply that interested parties have received the message, only that\n\n /// the message was successfully sent to NATS.\n\n pub fn publish(&self, subject: &str, payload: &[u8], reply_to: Option<&str>) -> Result<()> {\n\n /* let pm = ProtocolMessage::Publish(PublishMessage {\n\n payload: payload.to_vec(),\n", "file_path": "src/lib.rs", "rank": 33, "score": 7.426605636769103 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl ClientOptions {\n\n /// Create a new Client Options Builder\n\n pub fn builder() -> ClientOptionsBuilder {\n\n ClientOptionsBuilder::default()\n\n }\n\n}\n\n\n\n/// The main entry point for your application to consume NATS services. This client\n\n/// manages connections, connection retries, adjusts to new servers as they enter the\n\n/// cluster, and much more.\n\n#[derive(Clone)]\n\npub struct Client {\n\n id: String,\n\n inbox_wildcard: String,\n\n opts: ClientOptions,\n\n servers: Vec<ServerInfo>,\n", "file_path": "src/lib.rs", "rank": 34, "score": 7.378950245909177 }, { "content": "}\n\n\n\nimpl From<url::ParseError> for Error {\n\n fn from(source: url::ParseError) -> Error {\n\n err!(UriParseFailure, \"URI parse failure: {}\", source)\n\n }\n\n}\n\n\n\nimpl From<std::io::Error> for Error {\n\n fn from(source: std::io::Error) -> Error {\n\n err!(IOError, \"I/O error: {}\", source)\n\n }\n\n}\n\n\n\nimpl From<(ErrorKind, &'static str)> for Error {\n\n fn from((kind, description): (ErrorKind, &'static str)) -> Error {\n\n Error {\n\n kind,\n\n description: Some(description.to_string()),\n\n }\n", "file_path": "src/error.rs", "rank": 35, "score": 7.084277378549222 }, { "content": " ) -> Result<()> {\n\n match self.submgr.add_sub(subject, queue_group, handler) {\n\n Ok(_) => Ok(()),\n\n Err(e) => Err(err!(SubscriptionFailure, \"Subscription failure: {}\", e)),\n\n }\n\n }\n\n}\n\n\n\nimpl Default for Client {\n\n /// Creates a default client, using anonymous authentication and pointing to the localhost NATS server\n\n fn default() -> Client {\n\n Client::from_options(ClientOptions::default()).unwrap()\n\n }\n\n}\n\n\n\n#[macro_use]\n\npub mod error;\n\nmod protocol;\n\nmod subs;\n\nmod tcp;\n", "file_path": "src/lib.rs", "rank": 36, "score": 6.799056634145839 }, { "content": " false,\n\n auth_token,\n\n user,\n\n pass,\n\n \"en-us\".to_string(),\n\n \"natsclient-rust\".to_string(),\n\n \"0.0.1\".to_string(),\n\n Some(1),\n\n sig,\n\n jwt,\n\n );\n\n ProtocolMessage::Connect(ci)\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub(crate) struct ProtocolHandler {\n\n opts: ClientOptions,\n\n delivery_sender: channel::Sender<DeliveredMessage>,\n\n}\n\n\n", "file_path": "src/protocol.rs", "rank": 37, "score": 6.015520371059731 }, { "content": " self.opts.clone(),\n\n s,\n\n );\n\n tcp_client.connect()?;\n\n info!(\"TCP connection established.\");\n\n\n\n if r.recv_timeout(self.opts.connect_timeout).is_err() {\n\n error!(\"Failed to establish NATS connection within timeout\");\n\n return Err(err!(\n\n Timeout,\n\n \"Failed to establish connection without timeout\"\n\n ));\n\n };\n\n\n\n let mgr = self.submgr.clone();\n\n self.submgr.add_sub(\n\n &self.inbox_wildcard, // _INBOX.(nuid).*\n\n None,\n\n Arc::new(move |msg| {\n\n if let Some(sender) = mgr.sender_for_inbox(&msg.subject) {\n", "file_path": "src/lib.rs", "rank": 38, "score": 5.966050884905659 }, { "content": "![travis](https://travis-ci.org/encabulators/natsclient.svg?branch=master)&nbsp;\n\n![license](https://img.shields.io/github/license/encabulators/natsclient.svg)\n\n\n\n\n\n# NATS Client\n\nA simple, developer-friendly NATS client designed with an ergonomic API designed to allow you to use this client anywhere, whether you're using `tokio` or single-threaded apps or traditional multi-threaded.\n\n\n\n## Usage\n\nThe following sample illustrates basic publish and subscribe features:\n\n\n\n```rust\n\nlet jwt = \"...\";\n\nlet seed = \"...\";\n\n \n\nlet opts = ClientOptions::builder()\n\n .cluster_uris(vec![\"nats://localhost:4222\".into()])\n\n .authentication(AuthenticationStyle::UserCredentials(\n\n jwt.to_string(),\n\n seed.to_string(),\n\n ))\n\n .build()?;\n\n\n\nlet client = Client::from_options(opts)?;\n\nclient.connect()?;\n\n\n\nclient.subscribe(\"ticker\", move |msg| {\n\n let symbol: SymbolReply = serde_json::from_slice(&msg.payload).unwrap();\n\n info!(\"Received stock ticker: {:?}\", symbol);\n\n Ok(())\n\n})?;\n\n```\n\n\n\nTo publish a message:\n\n\n\n```rust\n\nc.publish(&r, payload_bytes, None)?;\n\n```\n\n\n\nAnd to utilize the request/response pattern:\n\n\n\n```rust\n\n let reply = client.request(\n\n \"symbolquery\",\n\n r#\"{\"symbol\": \"NATS\"}\"#.as_bytes(),\n\n Duration::from_millis(100),\n\n)?;\n\n\n\nlet symbol: SymbolReply = serde_json::from_slice(&reply.payload).unwrap();\n\ninfo!(\"Stock symbol response: {:?}\", symbol);\n\n```\n\n\n\n## Features\n\nThe following is a list of features currently supported and planned by this client:\n\n\n\n* [X] - Request/Reply\n\n* [X] - Subscribe\n\n* [X] - Publish\n\n* [X] - All authentication models, including NATS 2.0 JWT and seed keys\n\n* [X] - Adherance to protocol v1, accepts new server information whenever it's sent from NATS\n\n* [ ] - Automatic reconnect upon connection failure\n\n* [ ] - TLS support\n\n* [ ] - NATS Streaming (STAN)\n", "file_path": "README.md", "rank": 39, "score": 5.6515391769495675 }, { "content": "extern crate pretty_env_logger;\n\n#[macro_use]\n\nextern crate log;\n\n\n\n#[macro_use]\n\nextern crate serde_derive;\n\nuse natsclient::{self, AuthenticationStyle, Client, ClientOptions};\n\nuse serde_json;\n\nuse std::time::Duration;\n\n\n", "file_path": "examples/marketclient.rs", "rank": 40, "score": 5.158269076206549 }, { "content": "extern crate pretty_env_logger;\n\n#[macro_use]\n\nextern crate log;\n\n\n\n#[macro_use]\n\nextern crate serde_derive;\n\n\n\nuse natsclient::{self, AuthenticationStyle, Client, ClientOptions};\n\nuse serde_json;\n\nuse std::{thread, time::Duration};\n\n\n", "file_path": "examples/marketservice.rs", "rank": 41, "score": 5.1259909448256815 }, { "content": " }\n\n}\n\n\n\nimpl From<Box<dyn std::error::Error>> for Error {\n\n fn from(source: Box<dyn std::error::Error>) -> Error {\n\n err!(Miscellaneous, \"Misc error: {}\", source)\n\n }\n\n}\n\n\n\nimpl From<crossbeam_channel::SendError<Vec<u8>>> for Error {\n\n fn from(source: crossbeam_channel::SendError<Vec<u8>>) -> Error {\n\n err!(ConcurrencyFailure, \"Concurrency error: {}\", source)\n\n }\n\n}\n\n\n\nimpl From<crossbeam_channel::SendError<DeliveredMessage>> for Error {\n\n fn from(source: crossbeam_channel::SendError<DeliveredMessage>) -> Error {\n\n err!(ConcurrencyFailure, \"Concurrency error: {}\", source)\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 42, "score": 4.539211679033866 }, { "content": "\n\n assert_eq!(\n\n String::from_utf8(sub_message).unwrap(),\n\n ProtocolMessage::Subscribe(SubscribeMessage {\n\n queue_group: None,\n\n subject: \"test\".to_string(),\n\n subscription_id: 1,\n\n })\n\n .to_string()\n\n );\n\n }\n\n\n\n #[test]\n\n fn remove_subscription_sends_unsub_message() {\n\n let (sender, r) = channel::unbounded();\n\n\n\n let sm = SubscriptionManager::new(\"test\".to_string(), sender);\n\n\n\n let sid = sm.add_sub(\"test\", None, Arc::new(msg_handler)).unwrap();\n\n let _ = r.recv().unwrap();\n", "file_path": "src/subs.rs", "rank": 43, "score": 4.33614329071246 }, { "content": "\n\nimpl From<crossbeam_channel::RecvTimeoutError> for Error {\n\n fn from(source: crossbeam_channel::RecvTimeoutError) -> Error {\n\n err!(Timeout, \"Timeout expired: {}\", source)\n\n }\n\n}\n\n\n\nimpl From<regex::Error> for Error {\n\n fn from(source: regex::Error) -> Error {\n\n err!(Regex, \"Regular expression parse failure: {}\", source)\n\n }\n\n}\n\n\n\nimpl StdError for Error {\n\n fn description(&self) -> &str {\n\n if let Some(ref desc) = self.description {\n\n desc\n\n } else {\n\n self.kind.as_str()\n\n }\n", "file_path": "src/error.rs", "rank": 44, "score": 4.32354752962366 }, { "content": " Ok(())\n\n }\n\n\n\n pub fn unsubscribe_by_subject(&self, subject: &str) -> Result<()> {\n\n let sid = self.sid_for_subject(subject)?;\n\n self.unsubscribe(sid, None)\n\n }\n\n\n\n pub fn handler_for_sid(&self, sid: usize) -> Result<MessageHandler> {\n\n let subs = self.subs.read().unwrap();\n\n Ok(subs[&sid].handler.clone())\n\n }\n\n\n\n fn next_sid(&self) -> usize {\n\n self.current_sid.fetch_add(1, Ordering::Relaxed)\n\n }\n\n\n\n fn sid_for_subject(&self, subject: &str) -> Result<usize> {\n\n let subs = self.subs.read().unwrap();\n\n for (k, v) in subs.iter() {\n\n if v.subject == *subject {\n\n return Ok(*k);\n\n }\n\n }\n\n Err(err!(SubscriptionFailure, \"No such subject: {}\", subject))\n\n }\n\n}\n\n\n", "file_path": "src/subs.rs", "rank": 45, "score": 4.177593482117641 }, { "content": " let parsed = parse_nats_uri(uri)?;\n\n let host = parsed\n\n .host_str()\n\n .ok_or((\n\n crate::error::ErrorKind::InvalidClientConfig,\n\n \"Missing host name\",\n\n ))?\n\n .to_owned();\n\n let port = parsed.port().unwrap_or(DEFAULT_PORT);\n\n servers.push(ServerInfo { host, port });\n\n }\n\n\n\n let mut rng = thread_rng();\n\n servers.shuffle(&mut rng);\n\n\n\n Ok(servers)\n\n}\n\n\n\npub(crate) fn is_multiline_message(line: &str) -> bool {\n\n line.starts_with(\"MSG\") || line.starts_with(\"PUB\")\n", "file_path": "src/protocol.rs", "rank": 46, "score": 4.168933198839547 }, { "content": "}\n\n\n\npub(crate) fn generate_connect_command(\n\n info: &ProtocolMessage,\n\n auth: &AuthenticationStyle,\n\n) -> ProtocolMessage {\n\n let mut user: Option<String> = None;\n\n let mut pass: Option<String> = None;\n\n let mut auth_token: Option<String> = None;\n\n let mut jwt: Option<String> = None;\n\n let mut sig: Option<String> = None;\n\n\n\n if let AuthenticationStyle::UserCredentials(injwt, seed) = auth {\n\n if let ProtocolMessage::Info(ref server_info) = info {\n\n let kp = KeyPair::from_seed(seed.as_ref()).unwrap();\n\n let nonce = server_info.nonce.clone().unwrap();\n\n let sigbytes = kp.sign(nonce.as_bytes()).unwrap();\n\n sig = Some(data_encoding::BASE64URL_NOPAD.encode(&sigbytes));\n\n jwt = Some(injwt.to_string());\n\n } else {\n", "file_path": "src/protocol.rs", "rank": 47, "score": 3.8601440194961034 }, { "content": " };\n\n}\n\n\n\nimpl ErrorKind {\n\n pub fn as_str(self) -> &'static str {\n\n match self {\n\n ErrorKind::InvalidUriScheme => \"Invalid URI scheme\",\n\n ErrorKind::UriParseFailure => \"URI parse failure\",\n\n ErrorKind::InvalidClientConfig => \"Invalid client configuration\",\n\n ErrorKind::IOError => \"I/O failure\",\n\n ErrorKind::ConcurrencyFailure => \"Concurrency Failure\",\n\n ErrorKind::SubscriptionFailure => \"Subscription Failure\",\n\n ErrorKind::Timeout => \"Timeout expired\",\n\n ErrorKind::Miscellaneous => \"Miscellaneous error\",\n\n ErrorKind::Regex => \"Regular Expression pare failure\",\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for ErrorKind {\n", "file_path": "src/error.rs", "rank": 48, "score": 3.1918480775932103 }, { "content": " sender.send(msg.clone()).unwrap(); // TODO: kill the unwrap\n\n mgr.remove_inbox(&msg.subject)\n\n }\n\n Ok(())\n\n }),\n\n )?;\n\n self.start_subscription_dispatcher(self.delivery_receiver.clone())\n\n }\n\n\n\n fn start_subscription_dispatcher(\n\n &self,\n\n receiver: channel::Receiver<DeliveredMessage>,\n\n ) -> Result<()> {\n\n let c = self.clone();\n\n\n\n thread::spawn(move || {\n\n loop {\n\n match receiver.recv() {\n\n Ok(msg) => {\n\n let handler = c.get_handler(msg.subscription_id as usize);\n", "file_path": "src/lib.rs", "rank": 49, "score": 3.143079299050358 }, { "content": " kind: ErrorKind,\n\n\n\n description: Option<String>,\n\n}\n\n\n\n/// Provides context as to how a particular natsclient error might have occurred\n\n#[derive(Debug, Copy, Clone, PartialEq)]\n\npub enum ErrorKind {\n\n /// Indicates a bad URL scheme was supplied for server address(es)\n\n InvalidUriScheme,\n\n /// URI Parse failure\n\n UriParseFailure,\n\n /// Invalid client configuration\n\n InvalidClientConfig,\n\n /// I/O failure\n\n IOError,\n\n /// Concurrency Failure\n\n ConcurrencyFailure,\n\n /// Subscription Failure\n\n SubscriptionFailure,\n", "file_path": "src/error.rs", "rank": 51, "score": 2.9127689126885734 }, { "content": " })?;\n\n\n\n let reply = client.request(\n\n \"symbolquery\",\n\n r#\"{\"symbol\": \"DOOM\"}\"#.as_bytes(),\n\n Duration::from_millis(100),\n\n )?;\n\n\n\n let symbol: SymbolReply = serde_json::from_slice(&reply.payload).unwrap();\n\n info!(\"Stock symbol response: {:?}\", symbol);\n\n\n\n std::thread::park();\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/marketclient.rs", "rank": 53, "score": 2.030910181835318 }, { "content": " let re = Regex::new(CREDSFILE_FORMAT)?;\n\n\n\n let caps: Vec<Captures> = re.captures_iter(&raw).collect();\n\n let uc =\n\n AuthenticationStyle::UserCredentials(caps[1][0].to_string(), caps[1][1].to_string());\n\n Ok(uc)\n\n }\n\n}\n\n\n\nconst CREDSFILE_FORMAT: &str =\n\n r#\"\\s*(?:(?:[-]{3,}[^\\n]*[-]{3,}\\n)(.+)(?:\\n\\s*[-]{3,}[^\\n]*[-]{3,}\\n))\"#;\n\n\n", "file_path": "src/lib.rs", "rank": 54, "score": 1.9378409903268636 }, { "content": " panic!(\"No server information!\");\n\n }\n\n }\n\n\n\n if let AuthenticationStyle::Basic {\n\n username: uname,\n\n password: pwd,\n\n } = auth\n\n {\n\n user = Some(uname.to_string());\n\n pass = Some(pwd.to_string());\n\n }\n\n\n\n if let AuthenticationStyle::Token(tok) = auth {\n\n auth_token = Some(tok.to_string());\n\n }\n\n\n\n let ci = crate::protocol::ConnectionInformation::new(\n\n false,\n\n false,\n", "file_path": "src/protocol.rs", "rank": 55, "score": 1.7969303383240363 }, { "content": " sm.unsubscribe(sid, None).unwrap();\n\n let unsub_message = r.recv().unwrap();\n\n assert_eq!(\n\n String::from_utf8(unsub_message).unwrap(),\n\n ProtocolMessage::Unsubscribe(UnsubscribeMessage {\n\n max_messages: None,\n\n subscription_id: sid,\n\n })\n\n .to_string()\n\n );\n\n }\n\n\n\n fn msg_handler(_msg: &Message) -> super::Result<()> {\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/subs.rs", "rank": 56, "score": 1.7069407986279999 }, { "content": " match &msg.reply_to {\n\n Some(r) => {\n\n info!(\"About to respond with reply...\");\n\n c.publish(&r, &generate_symbol_reply(&msg.payload), None)?;\n\n }\n\n None => info!(\"Nowhere to send symbol query response...\"),\n\n };\n\n Ok(())\n\n })?;\n\n\n\n for i in 0..100 {\n\n let tick = SymbolReply {\n\n symbol: \"NATS\".to_string(),\n\n price: i * 2,\n\n market_cap: 1000000,\n\n world_domination: true,\n\n };\n\n trace!(\"Sending tick\");\n\n let slice = serde_json::to_vec(&tick).unwrap();\n\n client.publish(\"ticker\", &slice, None).unwrap();\n\n thread::sleep(Duration::from_millis(500));\n\n }\n\n\n\n std::thread::park();\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "examples/marketservice.rs", "rank": 57, "score": 1.53291026206709 }, { "content": " /// Timeout expired\n\n Timeout,\n\n /// Miscellaneous\n\n Miscellaneous,\n\n /// Regular Expression\n\n Regex,\n\n}\n\n\n\n/// A handy macro borrowed from the `signatory` crate that lets library-internal code generate\n\n/// more readable exception handling flows\n\n#[macro_export]\n\nmacro_rules! err {\n\n ($variant:ident, $msg:expr) => {\n\n $crate::error::Error::new(\n\n $crate::error::ErrorKind::$variant,\n\n Some($msg)\n\n )\n\n };\n\n ($variant:ident, $fmt:expr, $($arg:tt)+) => {\n\n err!($variant, &format!($fmt, $($arg)+))\n", "file_path": "src/error.rs", "rank": 58, "score": 1.5272302518096486 }, { "content": "\n\n#[cfg(test)]\n\nmod tests {\n\n use super::AuthenticationStyle;\n\n\n\n #[test]\n\n fn it_works() {\n\n assert_eq!(2 + 2, 4);\n\n }\n\n\n\n #[test]\n\n fn credsfile_parses() {\n\n let credsfile = r#\"-----BEGIN NATS USER JWT-----\n\neyJ0eXAiOiJqd3QiLCJhbGciOiJlZDI1NTE5In0.eyJqdGkiOiJBNDNRN1NLT0tCT0tYUDc1WVhMWjcyVDZKNDVIVzJKR0ZRWUJFQ1I2VE1FWEZFN1RKSjVBIiwiaWF0IjoxNTU0ODk2OTQ1LCJpc3MiOiJBQU9KV0RRV1pPQkNFTUVWWUQ2VEhPTUVCSExYS0NBMzZGU0dJVUxINFBWRU1ORDVUMjNEUEM0VSIsIm5hbWUiOiJiZW1pc191c2VyIiwic3ViIjoiVUNMRkYzTFBLTTQ3WTZGNkQ3UExMVzU2MzZKU1JDUFhFUUFDTEdVWTZNT01BS1lXMkk2VUFFRUQiLCJ0eXBlIjoidXNlciIsIm5hdHMiOnsicHViIjp7fSwic3ViIjp7fX19.3aH-hCSTS8z8rg2km7Q_aat5VpwT-t9swSmh3bnVBY_9IV9wE9mjSOUgHE2sq-7pR4HTCpYa0RPrNcgNfaVuBg\n\n------END NATS USER JWT------\n\n\n\n************************* IMPORTANT *************************\n\nNKEY Seed printed below can be used to sign and prove identity.\n\nNKEYs are sensitive and should be treated as secrets.\n\n\n", "file_path": "src/lib.rs", "rank": 59, "score": 1.3129287296246885 } ]
Rust
src/mesh/mod.rs
LukasKalbertodt/cantucci
dea982b39d849de4a34082c4864b46d806eadd72
use cgmath::{prelude::*, Point3, Vector3}; use num_cpus; use std::{array::IntoIter, sync::mpsc::{channel, Receiver, Sender}}; use std::sync::Arc; use threadpool::ThreadPool; use crate::{ prelude::*, camera::Camera, octree::{Octree, SpanExt}, shape::Shape, util::iter, wgpu::DrawContext, }; mod buffer; mod view; use self::buffer::{MeshBuffer, Timings}; use self::view::MeshView; pub struct ShapeMesh { tree: Octree<MeshStatus, ()>, pipeline: wgpu::RenderPipeline, shape: Arc<dyn Shape>, thread_pool: ThreadPool, new_meshes: Receiver<(Point3<f32>, (MeshView, Timings))>, mesh_tx: Sender<(Point3<f32>, (MeshView, Timings))>, active_jobs: u64, batch_timings: Timings, finished_jobs: u64, } impl ShapeMesh { pub fn new( device: &wgpu::Device, out_format: wgpu::TextureFormat, shape: Arc<dyn Shape>, ) -> Result<Self> { let mut tree = Octree::spanning(shape.bounding_box()); let _ = tree.root_mut().split(None); for mut child in IntoIter::new(tree.root_mut().into_children().unwrap()) { child.split(None); } let (tx, rx) = channel(); let num_threads = num_cpus::get(); let pool = ThreadPool::new(num_threads); info!("Using {} threads to generate mesh", num_threads); let pipeline = view::create_pipeline(device, out_format); Ok(ShapeMesh { tree, pipeline, shape, thread_pool: pool, new_meshes: rx, mesh_tx: tx, active_jobs: 0, batch_timings: Timings::default(), finished_jobs: 0, }) } pub fn update(&mut self, device: Arc<wgpu::Device>, camera: &Camera) { const FOCUS_POINTS: u8 = 5; let focii = self.get_focii(camera, FOCUS_POINTS); for focus in focii { if let Some(mut leaf) = self.tree.leaf_around_mut(focus) { if let Some(MeshStatus::Ready(_)) = leaf.leaf_data().unwrap() { let dist = camera.position.distance(focus); let span = leaf.span(); let threshold = 2.0 * (span.end.x - span.start.x).abs(); if dist < threshold { leaf.split(None); } } } } let jobs_before = self.active_jobs; let finished_jobs_before = self.finished_jobs; for (center, (view, timings)) in self.new_meshes.try_iter() { self.active_jobs -= 1; self.finished_jobs += 1; self.batch_timings = self.batch_timings + timings; *self.tree .leaf_around_mut(center) .unwrap() .leaf_data_mut() .unwrap() = Some(MeshStatus::Ready(view)); } let empty_leaves = self.tree.iter_mut() .filter_map(|n| n.into_leaf()) .filter(|&(_, ref leaf_data)| leaf_data.is_none()); for (span, leaf_data) in empty_leaves { const RESOLUTION: u32 = 64; let tx = self.mesh_tx.clone(); let shape = self.shape.clone(); let device = device.clone(); self.thread_pool.execute(move || { let (buf, timings) = MeshBuffer::generate_for_box(&span, &*shape, RESOLUTION); let view = MeshView::new(&device, &buf.vertices, &buf.indices); let _ = tx.send((span.center(), (view, timings))); }); self.active_jobs += 1; let old_view = match leaf_data.take() { Some(MeshStatus::Ready(view)) => Some(view), _ => None, }; *leaf_data = Some(MeshStatus::Requested { old_view }); } if jobs_before != self.active_jobs { trace!("Currently active sample jobs: {}", self.active_jobs); } const PRINT_EVERY_FINISHED_JOBS: u64 = 64; if self.finished_jobs % PRINT_EVERY_FINISHED_JOBS == 0 && self.finished_jobs > 0 && finished_jobs_before != self.finished_jobs { debug!( "Finished {} new jobs in: {}", PRINT_EVERY_FINISHED_JOBS, self.batch_timings, ); self.batch_timings = Timings::default(); } } pub(crate) fn draw( &self, draw_ctx: DrawContext<'_>, camera: &Camera, ) { let it = self.tree.iter() .filter_map(|n| n.leaf_data().map(|data| (data, n.span()))); for (leaf_data, _span) in it { match leaf_data { &MeshStatus::Ready(ref view) | &MeshStatus::Requested { old_view: Some(ref view) } => { view.draw(draw_ctx, camera, &self.pipeline); } _ => (), } } } pub fn get_focii(&self, camera: &Camera, focus_points: u8) -> Vec<Point3<f32>> { const EPSILON: f32 = 0.000_001; const MAX_ITERS: u64 = 100; let (top_left, bottom_right) = camera.near_plane_bb(); let (frustum_width, frustum_height) = camera.projection.near_plane_dimension(); let size_horizontal = frustum_width / focus_points as f32; let size_vertical = frustum_height / focus_points as f32; let center_diff = (bottom_right - top_left) / (2.0 * focus_points as f32); let inv_view_trans = camera.inv_view_transform(); iter::square(focus_points as u32) .map(|(x, y)| { let center = top_left + Vector3::new( x as f32 * size_horizontal, y as f32 * size_vertical, 0.0, ) + center_diff; Point3::from_homogeneous( inv_view_trans * center.to_homogeneous() ) }) .filter_map(|p| { let mut pos = camera.position; let dir = (p - camera.position).normalize(); for _ in 0..MAX_ITERS { let distance = self.shape.min_distance_from(pos); pos += dir * distance; if distance < EPSILON { return Some(pos); } } None }) .collect() } } enum MeshStatus { Requested { old_view: Option<MeshView>, }, Ready(MeshView), } #[derive(Copy, Clone)] #[repr(C)] pub struct Vertex { position: [f32; 3], normal: [f32; 3], distance_from_surface: f32, } unsafe impl bytemuck::Pod for Vertex {} unsafe impl bytemuck::Zeroable for Vertex {}
use cgmath::{prelude::*, Point3, Vector3}; use num_cpus; use std::{array::IntoIter, sync::mpsc::{channel, Receiver, Sender}}; use std::sync::Arc; use threadpool::ThreadPool; use crate::{ prelude::*, camera::Camera, octree::{Octree, SpanExt}, shape::Shape, util::iter, wgpu::DrawContext, }; mod buffer; mod view; use self::buffer::{MeshBuffer, Timings}; use self::view::MeshView; pub struct ShapeMesh { tree: Octree<MeshStatus, ()>, pipeline: wgpu::RenderPipeline, shape: Arc<dyn Shape>, thread_pool: ThreadPool, new_meshes: Receiver<(Point3<f32>, (MeshView, Timings))>, mesh_tx: Sender<(Point3<f32>, (MeshView, Timings))>, active_jobs: u64, batch_timings: Timings, finished_jobs: u64, } impl ShapeMesh { pub fn new( device: &wgpu::Device, out_format: wgpu::TextureFormat, shape: Arc<dyn Shape>, ) -> Result<Self> { let mut tree = Octree::spanning(shape.bounding_box()); let _ = tree.root_mut().split(None); for mut child in IntoIter::new(tree.root_mut().into_children().unwrap()) { child.split(None); } let (tx, rx) = channel(); let num_threads = num_cpus::get(); let pool = ThreadPool::new(num_threads); info!("Using {} threads to generate mesh", num_threads); let pipeline = view::create_pipeline(device, out_format); Ok(ShapeMesh { tree, pipeline, shape, thread_pool: pool, new_meshes: rx, mesh_tx: tx, active_jobs: 0, batch_timings: Timings::default(), finished_jobs: 0, }) }
pub(crate) fn draw( &self, draw_ctx: DrawContext<'_>, camera: &Camera, ) { let it = self.tree.iter() .filter_map(|n| n.leaf_data().map(|data| (data, n.span()))); for (leaf_data, _span) in it { match leaf_data { &MeshStatus::Ready(ref view) | &MeshStatus::Requested { old_view: Some(ref view) } => { view.draw(draw_ctx, camera, &self.pipeline); } _ => (), } } } pub fn get_focii(&self, camera: &Camera, focus_points: u8) -> Vec<Point3<f32>> { const EPSILON: f32 = 0.000_001; const MAX_ITERS: u64 = 100; let (top_left, bottom_right) = camera.near_plane_bb(); let (frustum_width, frustum_height) = camera.projection.near_plane_dimension(); let size_horizontal = frustum_width / focus_points as f32; let size_vertical = frustum_height / focus_points as f32; let center_diff = (bottom_right - top_left) / (2.0 * focus_points as f32); let inv_view_trans = camera.inv_view_transform(); iter::square(focus_points as u32) .map(|(x, y)| { let center = top_left + Vector3::new( x as f32 * size_horizontal, y as f32 * size_vertical, 0.0, ) + center_diff; Point3::from_homogeneous( inv_view_trans * center.to_homogeneous() ) }) .filter_map(|p| { let mut pos = camera.position; let dir = (p - camera.position).normalize(); for _ in 0..MAX_ITERS { let distance = self.shape.min_distance_from(pos); pos += dir * distance; if distance < EPSILON { return Some(pos); } } None }) .collect() } } enum MeshStatus { Requested { old_view: Option<MeshView>, }, Ready(MeshView), } #[derive(Copy, Clone)] #[repr(C)] pub struct Vertex { position: [f32; 3], normal: [f32; 3], distance_from_surface: f32, } unsafe impl bytemuck::Pod for Vertex {} unsafe impl bytemuck::Zeroable for Vertex {}
pub fn update(&mut self, device: Arc<wgpu::Device>, camera: &Camera) { const FOCUS_POINTS: u8 = 5; let focii = self.get_focii(camera, FOCUS_POINTS); for focus in focii { if let Some(mut leaf) = self.tree.leaf_around_mut(focus) { if let Some(MeshStatus::Ready(_)) = leaf.leaf_data().unwrap() { let dist = camera.position.distance(focus); let span = leaf.span(); let threshold = 2.0 * (span.end.x - span.start.x).abs(); if dist < threshold { leaf.split(None); } } } } let jobs_before = self.active_jobs; let finished_jobs_before = self.finished_jobs; for (center, (view, timings)) in self.new_meshes.try_iter() { self.active_jobs -= 1; self.finished_jobs += 1; self.batch_timings = self.batch_timings + timings; *self.tree .leaf_around_mut(center) .unwrap() .leaf_data_mut() .unwrap() = Some(MeshStatus::Ready(view)); } let empty_leaves = self.tree.iter_mut() .filter_map(|n| n.into_leaf()) .filter(|&(_, ref leaf_data)| leaf_data.is_none()); for (span, leaf_data) in empty_leaves { const RESOLUTION: u32 = 64; let tx = self.mesh_tx.clone(); let shape = self.shape.clone(); let device = device.clone(); self.thread_pool.execute(move || { let (buf, timings) = MeshBuffer::generate_for_box(&span, &*shape, RESOLUTION); let view = MeshView::new(&device, &buf.vertices, &buf.indices); let _ = tx.send((span.center(), (view, timings))); }); self.active_jobs += 1; let old_view = match leaf_data.take() { Some(MeshStatus::Ready(view)) => Some(view), _ => None, }; *leaf_data = Some(MeshStatus::Requested { old_view }); } if jobs_before != self.active_jobs { trace!("Currently active sample jobs: {}", self.active_jobs); } const PRINT_EVERY_FINISHED_JOBS: u64 = 64; if self.finished_jobs % PRINT_EVERY_FINISHED_JOBS == 0 && self.finished_jobs > 0 && finished_jobs_before != self.finished_jobs { debug!( "Finished {} new jobs in: {}", PRINT_EVERY_FINISHED_JOBS, self.batch_timings, ); self.batch_timings = Timings::default(); } }
function_block-full_function
[ { "content": "/// Creates 8 equally sized children spans of a passed parent span. The spans are defined\n\n/// in a way that will be no gaps between them due to floating point precision errors.\n\npub fn create_spans(parent_span: Range<Point3<f32>>) -> [Range<Point3<f32>>; 8] {\n\n let start = parent_span.start;\n\n let end = parent_span.end;\n\n let center = start + (parent_span.end - start) / 2.0;\n\n [\n\n start .. center,\n\n Point3 { z: center.z, .. start } .. Point3 { z: end.z, .. center },\n\n Point3 { y: center.y, .. start } .. Point3 { y: end.y, .. center },\n\n Point3 { x: start.x, .. center } .. Point3 { x: center.x, .. end },\n\n Point3 { x: center.x, .. start } .. Point3 { x: end.x, .. center },\n\n Point3 { y: start.y, .. center } .. Point3 { y: center.y, .. end },\n\n Point3 { z: start.z, .. center } .. Point3 { z: center.z, .. end },\n\n center .. end,\n\n ]\n\n}\n", "file_path": "src/octree/mod.rs", "rank": 0, "score": 125471.97347973654 }, { "content": "/// Describes a 3D object that can be rendered by this application.\n\n///\n\n/// Unlike in standard real time 3D graphics, the object is not represented\n\n/// with a triangle mesh, but via a small set of functions (actually, only one\n\n/// function is important). It's usually called distance field or distance\n\n/// function as the only way to get information about our object is by querying\n\n/// the so called distance estimator (DE). This function returns an\n\n/// approximation of the distance from a given point to the surface of the\n\n/// mesh. See `min_distance_from()` for more information.\n\npub trait Shape: Sync + Send + 'static {\n\n /// Returns a lower bound of the distance from `p` to the closest surface\n\n /// point of the shape.\n\n ///\n\n /// If `p` is inside the shape, the returned distance has to be negative.\n\n /// The value must converge towards the real distance when approaching the\n\n /// shape. That means that there has to be a constant c such that for every\n\n /// point p the real distance is <= c * min_distance_from(p):\n\n ///\n\n /// ∃ c ∈ ℝ ∀ p ∈ ℝ³ distance_from(p) <= c * min_distance_from(p)\n\n ///\n\n /// This also implies that min_distance_from(p) can only return 0 iff p\n\n /// lies on the shape's surface.\n\n fn min_distance_from(&self, p: Point3<f32>) -> f32;\n\n\n\n fn bounding_box(&self) -> Range<Point3<f32>>;\n\n\n\n // TODO: this method is hacky...\n\n /// Returns a string containing the GLSL definition of the distance\n\n /// estimator.\n", "file_path": "src/shape/mod.rs", "rank": 1, "score": 110722.21212084079 }, { "content": "#[bench]\n\nfn mandel_10points_i8_b5(b: &mut Bencher) {\n\n let m = Mandelbulb::classic(8, 5.0);\n\n b.iter(|| {\n\n BENCH_POINTS.iter()\n\n .map(|&p| m.min_distance_from(black_box(p.into())))\n\n .sum::<f32>()\n\n });\n\n}\n", "file_path": "src/shape/bench.rs", "rank": 2, "score": 101980.76349511955 }, { "content": "pub trait SpanExt {\n\n fn center(&self) -> Point3<f32>;\n\n fn contains(&self, p: Point3<f32>) -> bool;\n\n}\n\n\n\nimpl SpanExt for Span {\n\n fn center(&self) -> Point3<f32> {\n\n self.start + (self.end - self.start) / 2.0\n\n }\n\n\n\n fn contains(&self, p: Point3<f32>) -> bool {\n\n let s = self.start;\n\n let e = self.end;\n\n\n\n s.x <= p.x && s.y <= p.y && s.z <= p.z\n\n && p.x < e.x && p.y < e.y && p.z < e.z\n\n }\n\n}\n\n\n\n/// Recursively partitions three dimensional space into eight octants. In this\n", "file_path": "src/octree/mod.rs", "rank": 3, "score": 89838.95083702038 }, { "content": "pub fn load_program_with_shape<F: Facade, S: ShaderSource>(\n\n facade: &F,\n\n src: S,\n\n shape: &dyn Shape,\n\n) -> Result<Program> {\n\n let de_shader = shape.de_shader();\n\n let vert_buf = load_file(src.vert_path(), \"vert\")?\n\n .replace(\"// INCLUDE(DE)\", &de_shader);\n\n let frag_buf = load_file(src.frag_path(), \"frag\")?\n\n .replace(\"// INCLUDE(DE)\", &de_shader);\n\n\n\n link_program(facade, &vert_buf, &frag_buf)\n\n}\n\n\n\n\n", "file_path": "src/util/gl.rs", "rank": 4, "score": 88373.09189269223 }, { "content": "#[derive(Debug, Clone, Copy)]\n\nstruct Vec3(__m128);\n\n\n\n#[cfg(target_arch = \"x86_64\")]\n\nimpl Vec3 {\n\n #[inline(always)]\n\n fn new(x: f32, y: f32, z: f32) -> Self {\n\n let r = unsafe { _mm_set_ps(0.0, z, y, x) };\n\n Self(r)\n\n }\n\n\n\n #[inline(always)]\n\n fn x(self) -> f32 {\n\n unsafe { _mm_cvtss_f32(self.0) }\n\n }\n\n\n\n #[inline(always)]\n\n fn y(self) -> f32 {\n\n unsafe { f32::from_bits(_mm_extract_ps(self.0, 1) as u32) }\n\n }\n\n\n", "file_path": "src/shape/mandelbulb.rs", "rank": 5, "score": 75225.78095195466 }, { "content": "pub fn cube(size: u32) -> CubeIter {\n\n CubeIter {\n\n size,\n\n x: 0,\n\n y: 0,\n\n z: 0,\n\n }\n\n}\n\n\n\npub struct CubeIter {\n\n size: u32,\n\n x: u32,\n\n y: u32,\n\n z: u32,\n\n}\n\n\n\nimpl Iterator for CubeIter {\n\n type Item = (u32, u32, u32);\n\n\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n", "file_path": "src/util/iter.rs", "rank": 6, "score": 74001.0602537452 }, { "content": "pub fn square(size: u32) -> SquareIter {\n\n SquareIter {\n\n size,\n\n x: 0,\n\n y: 0,\n\n }\n\n}\n\n\n\npub struct SquareIter {\n\n size: u32,\n\n x: u32,\n\n y: u32,\n\n}\n\n\n\nimpl Iterator for SquareIter {\n\n type Item = (u32, u32);\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n if self.x == self.size {\n\n None\n", "file_path": "src/util/iter.rs", "rank": 7, "score": 74001.0602537452 }, { "content": "pub trait ToArr {\n\n type Output;\n\n\n\n fn to_arr(&self) -> Self::Output;\n\n}\n\n\n\nmacro_rules! to_arr_impl_gen_into_type {\n\n ($ty:ident, $out:ty) => {\n\n impl<T: BaseNum> ToArr for $ty <T> {\n\n type Output = $out;\n\n\n\n fn to_arr(&self) -> Self::Output {\n\n (*self).into()\n\n }\n\n }\n\n }\n\n}\n\n\n\nto_arr_impl_gen_into_type!(Matrix4, [[T; 4]; 4]);\n\nto_arr_impl_gen_into_type!(Point3, [T; 3]);\n\nto_arr_impl_gen_into_type!(Vector3, [T; 3]);\n\n\n\nmacro_rules! include_shader {\n\n ($name:literal) => {\n\n wgpu::include_spirv!(concat!(env!(\"OUT_DIR\"), \"/shaders/\", $name, \".spirv\"))\n\n };\n\n}\n", "file_path": "src/util/mod.rs", "rank": 8, "score": 72679.71451111833 }, { "content": "pub trait DurationExt {\n\n fn display_ms(&self) -> DisplayMs;\n\n}\n\n\n\nimpl DurationExt for Duration {\n\n fn display_ms(&self) -> DisplayMs {\n\n DisplayMs(*self)\n\n }\n\n}\n\n\n\npub struct DisplayMs(Duration);\n\n\n\nimpl fmt::Display for DisplayMs {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n let secs = self.0.as_secs();\n\n let nanos = self.0.subsec_nanos();\n\n\n\n let ms = (secs as f64) * 1000.0 + ((nanos / 1000) as f64 / 1000.0);\n\n format!(\"{:.3}ms\", ms).fmt(f)\n\n }\n\n}\n", "file_path": "src/util/time.rs", "rank": 9, "score": 71510.15045860832 }, { "content": "pub fn load_program<F: Facade, S: ShaderSource>(\n\n facade: &F,\n\n src: S\n\n) -> Result<Program> {\n\n let vert_buf = load_file(src.vert_path(), \"vert\")?;\n\n let frag_buf = load_file(src.frag_path(), \"frag\")?;\n\n\n\n link_program(facade, &vert_buf, &frag_buf)\n\n}\n\n\n", "file_path": "src/util/gl.rs", "rank": 10, "score": 68827.68864988095 }, { "content": "/// Types that manage an internal camera and change its properties as reaction\n\n/// to input events.\n\npub trait CamControl: EventHandler {\n\n /// Returns the internal camera\n\n fn camera(&self) -> Camera;\n\n\n\n /// Returns a mutable reference to the camera's projection properties. This\n\n /// should only be called to change the projection. To use the projection\n\n /// for rendering, call `camera()` instead.\n\n fn projection_mut(&mut self) -> &mut Projection;\n\n\n\n /// Is called regularly to update the internal camera. `delta` is the time\n\n /// in seconds since the last time this method was called.\n\n fn update(&mut self, _delta: f32, _shape: &dyn Shape) {}\n\n\n\n /// Returns `self` as `EventHandler` trait object.\n\n fn as_event_handler(&mut self) -> &mut dyn EventHandler;\n\n\n\n /// Adjusts the internal camera to match the given one as close as\n\n /// possible (it might not be completely possible). This is used for\n\n /// a smooth transition between two controls.\n\n ///\n", "file_path": "src/control/mod.rs", "rank": 11, "score": 64633.89793977789 }, { "content": "use std::mem;\n\n\n\nuse cgmath::Matrix4;\n\nuse wgpu::util::DeviceExt;\n\n\n\nuse crate::{\n\n camera::Camera,\n\n util::ToArr,\n\n wgpu::{DrawContext, DEPTH_BUFFER_FORMAT},\n\n};\n\nuse super::Vertex;\n\n\n\n\n\npub struct MeshView {\n\n vbuf: wgpu::Buffer,\n\n ibuf: wgpu::Buffer,\n\n num_indices: u32,\n\n}\n\n\n\nimpl MeshView {\n", "file_path": "src/mesh/view.rs", "rank": 13, "score": 61424.05978728866 }, { "content": " /// Creates all required non-global resources to draw the mesh stored in\n\n /// the `MeshBuffer`.\n\n pub fn new(device: &wgpu::Device, vertices: &[Vertex], indices: &[u32]) -> Self {\n\n let vbuf = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {\n\n label: Some(\"Shape mesh vertex buffer\"),\n\n contents: bytemuck::cast_slice(&vertices),\n\n usage: wgpu::BufferUsage::VERTEX,\n\n });\n\n\n\n let ibuf = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {\n\n label: Some(\"Shape mesh index Buffer\"),\n\n contents: bytemuck::cast_slice(&indices),\n\n usage: wgpu::BufferUsage::INDEX,\n\n });\n\n\n\n Self {\n\n vbuf,\n\n ibuf,\n\n num_indices: indices.len() as u32,\n\n }\n", "file_path": "src/mesh/view.rs", "rank": 14, "score": 61417.41370738789 }, { "content": " }\n\n\n\n pub(crate) fn draw(\n\n &self,\n\n draw_ctx: DrawContext<'_>,\n\n camera: &Camera,\n\n pipeline: &wgpu::RenderPipeline,\n\n ) {\n\n let transform_mat = camera.proj_transform() * camera.view_transform();\n\n\n\n let mut encoder = draw_ctx.device.create_command_encoder(\n\n &wgpu::CommandEncoderDescriptor { label: None }\n\n );\n\n\n\n {\n\n let mut rpass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {\n\n color_attachments: &[wgpu::RenderPassColorAttachmentDescriptor {\n\n attachment: &draw_ctx.frame.view,\n\n resolve_target: None,\n\n ops: wgpu::Operations {\n", "file_path": "src/mesh/view.rs", "rank": 15, "score": 61414.45505689584 }, { "content": " 0,\n\n bytemuck::cast_slice(&transform_mat.to_arr()),\n\n );\n\n rpass.pop_debug_group();\n\n\n\n rpass.insert_debug_marker(\"Draw!\");\n\n rpass.draw_indexed(0..self.num_indices, 0, 0..1);\n\n }\n\n\n\n draw_ctx.queue.submit(Some(encoder.finish()));\n\n }\n\n}\n\n\n\npub(crate) fn create_pipeline(\n\n device: &wgpu::Device,\n\n out_format: wgpu::TextureFormat,\n\n) -> wgpu::RenderPipeline {\n\n let vs_module = device.create_shader_module(include_shader!(\"surface.vert\"));\n\n let fs_module = device.create_shader_module(include_shader!(\"surface.frag\"));\n\n\n", "file_path": "src/mesh/view.rs", "rank": 16, "score": 61410.843705881955 }, { "content": " let pipeline_layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {\n\n label: None,\n\n bind_group_layouts: &[],\n\n push_constant_ranges: &[wgpu::PushConstantRange {\n\n stages: wgpu::ShaderStage::VERTEX,\n\n range: 0..mem::size_of::<Matrix4<f32>>() as u32,\n\n }],\n\n });\n\n\n\n device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {\n\n label: Some(\"Shape mesh render pipeline\"),\n\n layout: Some(&pipeline_layout),\n\n vertex_stage: wgpu::ProgrammableStageDescriptor {\n\n module: &vs_module,\n\n entry_point: \"main\",\n\n },\n\n fragment_stage: Some(wgpu::ProgrammableStageDescriptor {\n\n module: &fs_module,\n\n entry_point: \"main\",\n\n }),\n", "file_path": "src/mesh/view.rs", "rank": 17, "score": 61410.81159134588 }, { "content": " load: wgpu::LoadOp::Load,\n\n store: true,\n\n },\n\n }],\n\n depth_stencil_attachment: Some(wgpu::RenderPassDepthStencilAttachmentDescriptor {\n\n attachment: draw_ctx.depth_buffer,\n\n depth_ops: Some(wgpu::Operations {\n\n load: wgpu::LoadOp::Load,\n\n store: true,\n\n }),\n\n stencil_ops: None,\n\n }),\n\n });\n\n\n\n rpass.push_debug_group(\"Prepare data for draw.\");\n\n rpass.set_pipeline(pipeline);\n\n rpass.set_index_buffer(self.ibuf.slice(..));\n\n rpass.set_vertex_buffer(0, self.vbuf.slice(..));\n\n rpass.set_push_constants(\n\n wgpu::ShaderStage::VERTEX,\n", "file_path": "src/mesh/view.rs", "rank": 18, "score": 61406.34692276374 }, { "content": " vertex_buffers: &[wgpu::VertexBufferDescriptor {\n\n stride: mem::size_of::<Vertex>() as wgpu::BufferAddress,\n\n step_mode: wgpu::InputStepMode::Vertex,\n\n attributes: &[\n\n wgpu::VertexAttributeDescriptor {\n\n format: wgpu::VertexFormat::Float3,\n\n offset: 0,\n\n shader_location: 0,\n\n },\n\n wgpu::VertexAttributeDescriptor {\n\n format: wgpu::VertexFormat::Float3,\n\n offset: mem::size_of::<f32>() as u64 * 3,\n\n shader_location: 1,\n\n },\n\n wgpu::VertexAttributeDescriptor {\n\n format: wgpu::VertexFormat::Float,\n\n offset: mem::size_of::<f32>() as u64 * 6,\n\n shader_location: 1,\n\n },\n\n ],\n\n }],\n\n },\n\n sample_count: 1,\n\n sample_mask: !0,\n\n alpha_to_coverage_enabled: false,\n\n })\n\n}\n", "file_path": "src/mesh/view.rs", "rank": 19, "score": 61406.178992766894 }, { "content": " rasterization_state: Some(wgpu::RasterizationStateDescriptor {\n\n front_face: wgpu::FrontFace::Ccw,\n\n cull_mode: wgpu::CullMode::Back,\n\n ..Default::default()\n\n }),\n\n primitive_topology: wgpu::PrimitiveTopology::TriangleList,\n\n color_states: &[wgpu::ColorStateDescriptor {\n\n format: out_format,\n\n color_blend: wgpu::BlendDescriptor::REPLACE,\n\n alpha_blend: wgpu::BlendDescriptor::REPLACE,\n\n write_mask: wgpu::ColorWrite::ALL,\n\n }],\n\n depth_stencil_state: Some(wgpu::DepthStencilStateDescriptor {\n\n format: DEPTH_BUFFER_FORMAT,\n\n depth_write_enabled: true,\n\n depth_compare: wgpu::CompareFunction::Less,\n\n stencil: wgpu::StencilStateDescriptor::default(),\n\n }),\n\n vertex_state: wgpu::VertexStateDescriptor {\n\n index_format: wgpu::IndexFormat::Uint32,\n", "file_path": "src/mesh/view.rs", "rank": 20, "score": 61401.795760387075 }, { "content": "fn rotate_inner_p8_scalar(p: Vec3) -> Vec3 {\n\n let x = p.x();\n\n let y = p.y();\n\n let z = p.z();\n\n\n\n let x2 = x * x;\n\n let x4 = x2 * x2;\n\n let x6 = x4 * x2;\n\n let x8 = x4 * x4;\n\n\n\n let y2 = y * y;\n\n let y4 = y2 * y2;\n\n let y6 = y4 * y2;\n\n let y8 = y4 * y4;\n\n\n\n let z2 = z * z;\n\n let z4 = z2 * z2;\n\n let z6 = z4 * z2;\n\n let z8 = z4 * z4;\n\n\n", "file_path": "src/shape/mandelbulb.rs", "rank": 21, "score": 61251.12821141927 }, { "content": "use super::Vertex;\n\n\n\n\n\npub struct MeshBuffer {\n\n pub(crate) vertices: Vec<Vertex>,\n\n pub(crate) indices: Vec<u32>,\n\n}\n\n\n\nimpl MeshBuffer {\n\n pub fn generate_for_box(\n\n span: &Span,\n\n shape: &dyn Shape,\n\n resolution: u32,\n\n ) -> (Self, Timings) {\n\n assert!(span.start.x < span.end.x);\n\n assert!(span.start.y < span.end.y);\n\n assert!(span.start.z < span.end.z);\n\n assert!(resolution != 0);\n\n assert!(resolution.is_power_of_two());\n\n\n", "file_path": "src/mesh/buffer.rs", "rank": 22, "score": 61170.31807403496 }, { "content": "use std::{\n\n time::{Duration, Instant},\n\n fmt,\n\n ops,\n\n};\n\n\n\nuse cgmath::{prelude::*, Point3, Vector3};\n\n\n\nuse crate::{\n\n prelude::*,\n\n math::lerp,\n\n shape::Shape,\n\n octree::Span,\n\n util::{\n\n ToArr,\n\n iter::cube,\n\n grid::GridTable,\n\n time::DurationExt,\n\n },\n\n};\n", "file_path": "src/mesh/buffer.rs", "rank": 23, "score": 61169.96164213952 }, { "content": "\n\n lerp(p0 + corner_offsets[from], p0 + corner_offsets[to], weight_from)\n\n });\n\n\n\n // As described in the article above, we simply use the centroid\n\n // of all edge crossings.\n\n let (count, total_displacement) = edge_crossings.fold(\n\n (0, Vector3::zero()),\n\n |(count, sum), p| (count + 1, sum + p.to_vec()));\n\n let p = Point3::origin() + (total_displacement / count as f32);\n\n\n\n // Now we only calculate some meta data which might be used to\n\n // color the vertex.\n\n let dist_p = shape.min_distance_from(p);\n\n\n\n let normal = {\n\n let delta = 0.7 * (span.end - span.start) / resolution as f32;\n\n Vector3::new(\n\n shape.min_distance_from(p + Vector3::unit_x() * delta.x)\n\n - shape.min_distance_from(p + Vector3::unit_x() * -delta.x),\n", "file_path": "src/mesh/buffer.rs", "rank": 24, "score": 61160.98189917717 }, { "content": " };\n\n\n\n trace!(\n\n \"Generated {:6} points, {:6} faces in {}\",\n\n vertices.len(),\n\n indices.len() / 6,\n\n timings,\n\n );\n\n\n\n (MeshBuffer { vertices, indices }, timings)\n\n }\n\n}\n\n\n\n\n\n/// Stores some information about how long various passes of the mesh\n\n/// generation algorithm were running as well as how many vertices and faces\n\n/// were created.\n\n#[derive(Default, Clone, Copy)]\n\npub struct Timings {\n\n first: Duration,\n", "file_path": "src/mesh/buffer.rs", "rank": 25, "score": 61159.74982425374 }, { "content": " let step = (span.end - span.start) / resolution as f32;\n\n let corner_offsets = [\n\n Vector3::new( 0.0, 0.0, 0.0),\n\n Vector3::new( 0.0, 0.0, step.z),\n\n Vector3::new( 0.0, step.y, 0.0),\n\n Vector3::new( 0.0, step.y, step.z),\n\n Vector3::new(step.x, 0.0, 0.0),\n\n Vector3::new(step.x, 0.0, step.z),\n\n Vector3::new(step.x, step.y, 0.0),\n\n Vector3::new(step.x, step.y, step.z),\n\n ];\n\n\n\n let points = GridTable::fill_with(resolution, |x, y, z| {\n\n // The estimated minimal distances of all eight corners calculated\n\n // in the prior step.\n\n let distances = [\n\n dists[(x , y , z )],\n\n dists[(x , y , z + 1)],\n\n dists[(x , y + 1, z )],\n\n dists[(x , y + 1, z + 1)],\n", "file_path": "src/mesh/buffer.rs", "rank": 26, "score": 61156.209602258816 }, { "content": " // ===========\n\n //\n\n // We already have all vertices, now we need to generate the faces\n\n // of our resulting mesh. For each edge crossing the surface of our\n\n // shape, we will generate one face. This face's vertices are the\n\n // vertices inside the four cells the edge is adjacent to.\n\n //\n\n let mut indices = Vec::new();\n\n for (x, y, z) in cube(resolution) {\n\n // We iterate over all edges by iterating over all lower corners of\n\n // all cells.\n\n //\n\n // About all those `unwrap()` calls: if the edge is crossing the\n\n // surface (which is checked in the if conditions below), then we\n\n // generated a vertex for all of the adjacent cells (as they,\n\n // by definition, also cross the surface). So the Options we access\n\n // are always `Some()`.\n\n\n\n let base_sign = dists[(x, y, z)].is_sign_positive();\n\n\n", "file_path": "src/mesh/buffer.rs", "rank": 27, "score": 61155.56000216501 }, { "content": " shape.min_distance_from(p + Vector3::unit_y() * delta.y)\n\n - shape.min_distance_from(p + Vector3::unit_y() * -delta.y),\n\n shape.min_distance_from(p + Vector3::unit_z() * delta.z)\n\n - shape.min_distance_from(p + Vector3::unit_z() * -delta.z),\n\n ).normalize()\n\n };\n\n\n\n vertices.push(Vertex {\n\n position: p.to_vec().to_arr(),\n\n normal: normal.to_arr(),\n\n distance_from_surface: dist_p,\n\n });\n\n\n\n vertices.len() as u32 - 1\n\n });\n\n\n\n let before_third = Instant::now();\n\n\n\n\n\n // Third step:\n", "file_path": "src/mesh/buffer.rs", "rank": 28, "score": 61155.44465837938 }, { "content": " resolution: u32,\n\n ) -> (Self, Timings) {\n\n // Adjust span to avoid holes in between two boxes\n\n let span = {\n\n let overflow = (span.end - span.start) / resolution as f32;\n\n span.start + -overflow .. span.end + overflow\n\n };\n\n\n\n let before_first = Instant::now();\n\n\n\n // First Step:\n\n // ===========\n\n //\n\n // We partition our box into regular cells. For each corner in between\n\n // the cells we calculate and save the estimated minimal distance from\n\n // the shape.\n\n let across_span = span.end - span.start;\n\n let dists = GridTable::fill_with(resolution + 1, |x, y, z| {\n\n let v = Vector3::new(x as f32, y as f32, z as f32) / (resolution as f32);\n\n let p = span.start + across_span.mul_element_wise(v);\n", "file_path": "src/mesh/buffer.rs", "rank": 29, "score": 61155.408023986405 }, { "content": " second: Duration,\n\n third: Duration,\n\n vertices: u32,\n\n faces: u32,\n\n}\n\n\n\nimpl fmt::Display for Timings {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n let all = self.first + self.second + self.third;\n\n write!(\n\n f,\n\n \"{:>11} ({:>11}, {:>11}, {:>11}) => [{:6} verts, {:6} faces]\",\n\n all.display_ms(),\n\n self.first.display_ms(),\n\n self.second.display_ms(),\n\n self.third.display_ms(),\n\n self.vertices,\n\n self.faces,\n\n )\n\n }\n", "file_path": "src/mesh/buffer.rs", "rank": 30, "score": 61154.924452994084 }, { "content": "\n\n shape.min_distance_from(p)\n\n });\n\n\n\n let before_second = Instant::now();\n\n\n\n\n\n // Second Step:\n\n // ============\n\n //\n\n // Next, we will iterate over all cells of the box (unlike before\n\n // where we iterated over corners). For each cell crossing the shape's\n\n // surface, we will generate one vertex. The `points` grid table holds\n\n // the index of the vertex corresponding to the cell, or `None` if the\n\n // cell does not cross the surface.\n\n //\n\n let mut vertices = Vec::new();\n\n\n\n // The world space distance between two corners/between the\n\n // center points of two cells.\n", "file_path": "src/mesh/buffer.rs", "rank": 31, "score": 61154.8488509475 }, { "content": " };\n\n\n\n if no_shape_crossing {\n\n // FIXME\n\n // This is a bit hacky, but we will never access this number\n\n return u32::MAX;\n\n }\n\n\n\n // World position of this cell's lower corner\n\n let p0 = span.start + Vector3::new(x as f32, y as f32, z as f32)\n\n .mul_element_wise(step);\n\n\n\n // We want to iterate over all 12 edges of the cell. Here, we list\n\n // all edges by specifying their corner indices.\n\n const EDGES: [(u8, u8); 12] = [\n\n // Edges whose endpoints differ in the x coordinate (first\n\n // corner id is -x, second is +x).\n\n (0, 4), // -y -z\n\n (1, 5), // -y +z\n\n (2, 6), // +y -z\n", "file_path": "src/mesh/buffer.rs", "rank": 32, "score": 61154.158908613106 }, { "content": "}\n\n\n\nimpl ops::Add for Timings {\n\n type Output = Self;\n\n fn add(self, other: Self) -> Self {\n\n Timings {\n\n first: self.first + other.first,\n\n second: self.second + other.second,\n\n third: self.third + other.third,\n\n vertices: self.vertices + other.vertices,\n\n faces: self.faces + other.faces,\n\n }\n\n }\n\n}\n", "file_path": "src/mesh/buffer.rs", "rank": 33, "score": 61154.04422262379 }, { "content": " dists[(x + 1, y , z )],\n\n dists[(x + 1, y , z + 1)],\n\n dists[(x + 1, y + 1, z )],\n\n dists[(x + 1, y + 1, z + 1)],\n\n ];\n\n\n\n // First, check if the current cell is only partially inside the\n\n // shape (if the cell intersects the shape's surface). If that's\n\n // not the case, we won't generate a vertex for this cell.\n\n let no_shape_crossing = {\n\n let first = distances[0].is_sign_positive();\n\n let mut all_same = true;\n\n for d in &distances[1..] {\n\n if d.is_sign_positive() != first {\n\n all_same = false;\n\n break;\n\n }\n\n }\n\n\n\n all_same\n", "file_path": "src/mesh/buffer.rs", "rank": 34, "score": 61153.84246436828 }, { "content": " // from the shape results in a mesh more closely representing\n\n // the shape.\n\n .map(|(from, to)| {\n\n // Here we want to make sure that `d_from` is negative and\n\n // `d_to` is positive.\n\n //\n\n // Remember: we already know that both distances have\n\n // different signs!\n\n let (d_from, d_to) = if distances[from] < 0.0 {\n\n (distances[from], distances[to])\n\n } else {\n\n (-distances[from], -distances[to])\n\n };\n\n\n\n // This condition is only true if `d_from == -0.0`. In\n\n // theory this might happen, so we better deal with it.\n\n let weight_from = if d_to == d_from {\n\n 0.5\n\n } else {\n\n // Here we calculate the weight (a number between 0 and\n", "file_path": "src/mesh/buffer.rs", "rank": 35, "score": 61153.59651070164 }, { "content": " // require more queries to the shape (our current bottleneck for\n\n // mandelbulb).\n\n //\n\n // Instead, we simply weight both endpoints of the edge by the\n\n // already calculated distances. Improving this might be worth\n\n // experimenting (see #1).\n\n let edge_crossings = EDGES.iter().cloned()\n\n .map(|(from, to)| (from as usize, to as usize))\n\n\n\n // We are only interested in the edges with shape crossing. The\n\n // edge crosses the shape iff the endpoints' estimated minimal\n\n // distances have different signs (\"minus\" means: inside the\n\n // shape).\n\n .filter(|&(from, to)| {\n\n distances[from].is_sign_positive() != distances[to].is_sign_positive()\n\n })\n\n\n\n // Next, we convert the edge into a vertex on said edge. We\n\n // could just use the center point of the two endpoints. But\n\n // weighting each endpoint with the estimated minimal distance\n", "file_path": "src/mesh/buffer.rs", "rank": 36, "score": 61152.999801662125 }, { "content": " v1, v2, v3,\n\n ]\n\n } else {\n\n // ccw\n\n [\n\n v0, v1, v2,\n\n v1, v3, v2,\n\n ]\n\n }\n\n );\n\n }\n\n }\n\n\n\n let after_third = Instant::now();\n\n let timings = Timings {\n\n first: before_second - before_first,\n\n second: before_third - before_second,\n\n third: after_third - before_third,\n\n vertices: vertices.len() as u32,\n\n faces: indices.len() as u32 / 6,\n", "file_path": "src/mesh/buffer.rs", "rank": 37, "score": 61151.074032229335 }, { "content": " Self::naive_surface_nets(span, shape, resolution)\n\n }\n\n\n\n /// Implementation of the \"Surface Nets\" algorithm.\n\n ///\n\n /// In particular, in this implementation the position of the vertex inside\n\n /// the 3D-cell is simply the centroid of all edge crossings. This rather\n\n /// easy version is described [in this article][1] (\"naive surface nets\").\n\n ///\n\n /// The article will also help understand this algorithm. Compared to\n\n /// other algorithms for rendering iso surfaces, this one is relatively\n\n /// easy to implement while still working fairly nice.\n\n ///\n\n /// In the future we might want to switch to the \"Dual Contouring\" scheme\n\n /// as it preserves sharp features of the shape (see #2).\n\n ///\n\n /// [1]: https://0fps.net/2012/07/12/smooth-voxel-terrain-part-2/\n\n fn naive_surface_nets(\n\n span: &Span,\n\n shape: &dyn Shape,\n", "file_path": "src/mesh/buffer.rs", "rank": 38, "score": 61151.03901693944 }, { "content": " // Edge from the current corner pointing in +x direction\n\n if y > 0 && z > 0 && base_sign != dists[(x + 1, y, z)].is_sign_positive() {\n\n let v0 = points[(x, y - 1, z - 1)];\n\n let v1 = points[(x, y - 1, z )];\n\n let v2 = points[(x, y , z - 1)];\n\n let v3 = points[(x, y , z )];\n\n\n\n indices.extend_from_slice(&\n\n // distance negative, triangle cw\n\n if dists[(x, y, z)] < 0.0 {\n\n [\n\n v0, v2, v1,\n\n v1, v2, v3,\n\n ]\n\n } else {\n\n // ccw\n\n [\n\n v0, v1, v2,\n\n v1, v3, v2,\n\n ]\n", "file_path": "src/mesh/buffer.rs", "rank": 39, "score": 61147.58885141643 }, { "content": " [\n\n v0, v2, v1,\n\n v1, v2, v3,\n\n ]\n\n }\n\n );\n\n }\n\n\n\n // Edge from the current corner pointing in +z direction\n\n if x > 0 && y > 0 && base_sign != dists[(x, y, z + 1)].is_sign_positive() {\n\n let v0 = points[(x - 1, y - 1, z)];\n\n let v1 = points[(x - 1, y , z)];\n\n let v2 = points[(x, y - 1, z)];\n\n let v3 = points[(x, y , z)];\n\n\n\n indices.extend_from_slice(&\n\n // distance negative, triangle cw\n\n if dists[(x, y, z)] < 0.0 {\n\n [\n\n v0, v2, v1,\n", "file_path": "src/mesh/buffer.rs", "rank": 40, "score": 61147.58885141643 }, { "content": " (3, 7), // +y +z\n\n\n\n // Edges whose endpoints differ in the y coordinate (first\n\n // corner id is -y, second is +y).\n\n (0, 2), // -x -z\n\n (1, 3), // -x +z\n\n (4, 6), // +x -z\n\n (5, 7), // +x +z\n\n\n\n // Edges whose endpoints differ in the z coordinate (first\n\n // corner id is -z, second is +z).\n\n (0, 1), // -x -y\n\n (2, 3), // -x +y\n\n (4, 5), // +x -y\n\n (6, 7), // +x +y\n\n ];\n\n\n\n // Get all edge crossings. These are points where the edges of the\n\n // current cell intersect the surface... more or less. We do NOT\n\n // find the correct crossing point by ray marching, as this would\n", "file_path": "src/mesh/buffer.rs", "rank": 41, "score": 61147.58885141643 }, { "content": " }\n\n );\n\n }\n\n\n\n // Edge from the current corner pointing in +y direction\n\n if x > 0 && z > 0 && base_sign != dists[(x, y + 1, z)].is_sign_positive() {\n\n let v0 = points[(x - 1, y, z - 1)];\n\n let v1 = points[(x - 1, y, z )];\n\n let v2 = points[(x, y, z - 1)];\n\n let v3 = points[(x, y, z )];\n\n\n\n indices.extend_from_slice(&\n\n // distance negative, triangle cw\n\n if dists[(x, y, z)] < 0.0 {\n\n [\n\n v0, v1, v2,\n\n v1, v3, v2,\n\n ]\n\n } else {\n\n // ccw\n", "file_path": "src/mesh/buffer.rs", "rank": 42, "score": 61147.58885141643 }, { "content": " // 1 inclusive) for the `from` endpoint. `delta` is\n\n // the difference between the two distances.\n\n //\n\n // First we will shift the distance to \"the right\",\n\n // making it positive. Then, we scale it by delta.\n\n //\n\n // - d_from + delta is always >= 0.0\n\n // - d_from + delta is always <= delta\n\n // ==> `(d_from + delta) / delta` is always in 0...1\n\n //\n\n // For d_from == 0 and d_to > 0:\n\n // - d_from + delta == delta\n\n // ==> result is: delta / delta == 1\n\n //\n\n // For d_from < 0 and d_to == 0:\n\n // - d_from + delta == 0\n\n // ==> result is: 0 / delta == 0\n\n let delta = d_to - d_from;\n\n (d_from + delta) / delta\n\n };\n", "file_path": "src/mesh/buffer.rs", "rank": 43, "score": 61147.58885141643 }, { "content": "use std::ops::Range;\n\nuse cgmath::Point3;\n\n\n\n#[macro_use]\n\nmod util;\n\nmod mandelbulb;\n\nmod sphere;\n\n\n\n#[cfg(test)]\n\nmod bench;\n\n\n\npub use self::mandelbulb::Mandelbulb;\n\npub use self::sphere::Sphere;\n\n\n\n/// Describes a 3D object that can be rendered by this application.\n\n///\n\n/// Unlike in standard real time 3D graphics, the object is not represented\n\n/// with a triangle mesh, but via a small set of functions (actually, only one\n\n/// function is important). It's usually called distance field or distance\n\n/// function as the only way to get information about our object is by querying\n\n/// the so called distance estimator (DE). This function returns an\n\n/// approximation of the distance from a given point to the surface of the\n\n/// mesh. See `min_distance_from()` for more information.\n", "file_path": "src/shape/mod.rs", "rank": 44, "score": 60397.27077574902 }, { "content": " }\n\n\n\n /// Combines `min_distance_from()` and `max_distance_from()`: returns a\n\n /// tuple of the lower and upper bound (in that order).\n\n ///\n\n /// This method is here for optimization purposes only. If you are\n\n /// interested in the lower *and* upper bound, you should call this method\n\n /// as shapes may implement it more efficiently than calling\n\n /// `min_distance_from()` and `max_distance_from()` independently.\n\n fn bounded_distance_from(&self, p: Point3<f32>) -> (f32, Option<f32>) {\n\n (self.min_distance_from(p), self.max_distance_from(p))\n\n }\n\n\n\n /// Returns true iff the given point lies in the shape.\n\n fn contains(&self, p: Point3<f32>) -> bool {\n\n self.min_distance_from(p) < 0.0\n\n }\n\n\n\n /// Calls `min_distance_from()` for each given point and returns the\n\n /// results as vector. This is for use through a trait object to reduce\n", "file_path": "src/shape/mod.rs", "rank": 45, "score": 60388.715757970655 }, { "content": " /// the virtual call overhead.\n\n ///\n\n /// This method and the other two `batch_` methods should be implemented\n\n /// by using the `impl_batch_methods` macro, if it's not possible to\n\n /// improve performance by writing a custom implementation.\n\n fn batch_min_distance_from(&self, points: &[Point3<f32>]) -> Vec<f32>;\n\n\n\n /// Calls `max_distance_from()` for each given point and returns the\n\n /// results as vector. See `batch_min_distance_from()` for more\n\n /// information.\n\n ///\n\n /// This will panic when `max_distance_from()` returns `None`.\n\n fn batch_max_distance_from(&self, points: &[Point3<f32>]) -> Vec<f32>;\n\n\n\n /// Calls `bounded_distance_from()` for each given point and returns the\n\n /// results as vector. See `batch_min_distance_from()` for more\n\n /// information.\n\n ///\n\n /// This will panic when `max_distance_from()` returns `None`.\n\n fn batch_bounded_distance_from(&self, points: &[Point3<f32>]) -> Vec<(f32, f32)>;\n", "file_path": "src/shape/mod.rs", "rank": 46, "score": 60387.67602568775 }, { "content": " ///\n\n /// The GLSL function needs to have this signature:\n\n ///\n\n /// ```\n\n /// float shape_de(vec3 point)\n\n /// ```\n\n fn de_shader(&self) -> String;\n\n\n\n /// Returns an upper bound of the distance from `p` to the closest surface\n\n /// point of the shape, or `None` if no such estimate can be made.\n\n ///\n\n /// Whether or not this function returns `None` might only depend on the\n\n /// implementer (the `self` parameter) and *not* on `p`! So if this\n\n /// function returns `None` once, the calling code can assume that this\n\n /// shape can never return an upper bound.\n\n ///\n\n /// Similar to `min_distance_from()` this upper bound must converge to the\n\n /// real value as we approach the surface.\n\n fn max_distance_from(&self, _p: Point3<f32>) -> Option<f32> {\n\n None\n", "file_path": "src/shape/mod.rs", "rank": 47, "score": 60386.43502931542 }, { "content": "\n\n}\n\n\n\n\n\n// Some points close to the surface of the mandelbulb which are used for\n\n// benchmarking.\n\n#[cfg(test)]\n\nconst BENCH_POINTS: [[f32; 3]; 20] = [\n\n [-0.73772496, -0.002343091, -0.7382717],\n\n [-0.7484558, -0.8255949, -0.0026540023],\n\n [-1.0951594, -0.0014639703, -0.0027306266],\n\n [-0.60622436, -0.16786861, 0.7227598],\n\n [-0.6000897, -0.5997089, 0.028461732],\n\n [-0.6077231, -0.8336551, -0.004541016],\n\n [-0.05153041, -0.5906257, -0.7647207],\n\n [-0.73772484, -0.0030531297, -0.7382715],\n\n [-1.09658, -0.032518614, 0.026089936],\n\n [-0.74845594, -0.8255949, -0.0033077204],\n\n [-0.0031473506, 0.59545904, 0.7711717],\n\n [0.59178185, -0.009300065, 0.70574695],\n", "file_path": "src/shape/mod.rs", "rank": 48, "score": 60384.36198541924 }, { "content": " [0.5934337, -0.0065053166, -0.8548532],\n\n [0.5906368, 0.5906708, 0.0002929632],\n\n [0.5909915, 0.6001409, -0.4285654],\n\n [-0.004541016, 0.5956404, 0.36293367],\n\n [-0.00073693885, 0.5916996, -0.8447121],\n\n [0.59545904, -0.004541016, 0.35817686],\n\n [0.59545904, -0.004541016, -0.3581769],\n\n [0.60028464, -0.36826742, 0.6579103],\n\n\n\n];\n", "file_path": "src/shape/mod.rs", "rank": 49, "score": 60381.06174574407 }, { "content": "#[inline(always)]\n\nfn rotate<const P: u8>(p: Vec3) -> Vec3 {\n\n // Handle special case (general formula is not able to handle points on\n\n // the z axis).\n\n if p.is_on_z_axis() {\n\n return rotate_on_z_axis::<P>(p);\n\n }\n\n\n\n\n\n // For some integer powers there are formulas without trigonometric\n\n // functions. This improves performance a lot (see #17).\n\n match P {\n\n 8 => rotate_inner_p8_scalar(p),\n\n // 8 => unsafe { rotate_inner_p8_simd(p) },\n\n _ => rotate_inner_px_generic::<P>(p),\n\n }\n\n}\n\n\n", "file_path": "src/shape/mandelbulb.rs", "rank": 62, "score": 58571.59769434438 }, { "content": "#[inline(never)]\n\n#[cold]\n\nfn rotate_on_z_axis<const P: u8>(p: Vec3) -> Vec3 {\n\n let old_radius = p.magnitude();\n\n let theta = (p.z() / old_radius).acos();\n\n\n\n // Scale and rotate the point\n\n let new_radius = old_radius.powi(P.into());\n\n let theta = theta * P as f32;\n\n\n\n // Convert back to cartesian coordinates\n\n Vec3::new(0.0, 0.0, new_radius * theta.cos())\n\n}\n\n\n", "file_path": "src/shape/mandelbulb.rs", "rank": 63, "score": 56843.89510059098 }, { "content": "fn rotate_inner_px_generic<const P: u8>(p: Vec3) -> Vec3 {\n\n let old_radius = p.magnitude();\n\n\n\n // Convert to spherical coordinates\n\n let theta = (p.z() / old_radius).acos();\n\n let phi = f32::atan2(p.y(), p.x());\n\n\n\n // Scale and rotate the point\n\n let new_radius = old_radius.powi(P.into());\n\n let theta = theta * P as f32;\n\n let phi = phi * P as f32;\n\n\n\n // Convert back to cartesian coordinates\n\n new_radius * Vec3::new(\n\n theta.sin() * phi.cos(),\n\n phi.sin() * theta.sin(),\n\n theta.cos(),\n\n )\n\n}\n\n\n", "file_path": "src/shape/mandelbulb.rs", "rank": 64, "score": 53740.170893395836 }, { "content": "struct App {\n\n window: Rc<Window>,\n\n wgpu: Wgpu,\n\n\n\n control: KeySwitcher<OrbitControl, FlyControl>,\n\n fps_timer: FpsTimer,\n\n last_update: Instant,\n\n\n\n sky: Sky,\n\n shape: Arc<dyn Shape>,\n\n mesh: ShapeMesh,\n\n}\n\n\n\nimpl App {\n\n async fn new(window: Rc<Window>) -> Result<Self> {\n\n let wgpu = Wgpu::new(&window).await.context(\"failed to initialize wgpu\")?;\n\n\n\n // Initialize our projection parameters.\n\n let proj = Projection::new(Rad(1.0), 0.000_04..10.0, window.inner_size().into());\n\n\n", "file_path": "src/app.rs", "rank": 65, "score": 52413.056526306944 }, { "content": "#[derive(Copy, Clone)]\n\nstruct Vertex {\n\n pos: [f32; 3],\n\n unit_pos: [f32; 2],\n\n}\n\n\n\nimplement_vertex!(Vertex, pos, unit_pos);\n", "file_path": "src/sky/sun.rs", "rank": 66, "score": 50951.55015612146 }, { "content": "#[derive(Copy, Clone)]\n\n#[repr(C)]\n\nstruct Vertex {\n\n pos: [f32; 3],\n\n}\n\n\n\n// `Vertex` is inhabited, allows any bitpattern, has no padding, all fields are\n\n// `Pod`, and is `repr(C)`.\n\nunsafe impl bytemuck::Pod for Vertex {}\n\nunsafe impl bytemuck::Zeroable for Vertex {}\n\n\n\n// We represent the sky by a diamond shaped mesh. The shaders will interpret the\n\n// data they get accordingly to make it look like a perfect sky sphere.\n\nconst VERTICES: &[Vertex] = &[\n\n Vertex { pos: [-SKY_DISTANCE, 0.0, 0.0] }, // -x\n\n Vertex { pos: [ 0.0, -SKY_DISTANCE, 0.0] }, // -y\n\n Vertex { pos: [ SKY_DISTANCE, 0.0, 0.0] }, // +x\n\n Vertex { pos: [ 0.0, SKY_DISTANCE, 0.0] }, // +y\n\n Vertex { pos: [ 0.0, 0.0, -SKY_DISTANCE] }, // -z\n\n Vertex { pos: [ 0.0, 0.0, SKY_DISTANCE] }, // +z\n\n];\n\n\n", "file_path": "src/sky/dome.rs", "rank": 67, "score": 50951.55015612146 }, { "content": "fn main() {\n\n // Init logger implementation\n\n env_logger::init();\n\n\n\n // Create whole app and run it, if it succeeds\n\n let res = futures::executor::block_on(app::run());\n\n\n\n // Pretty print error chain\n\n if let Err(e) = res {\n\n eprintln!(\"Cantucci error: {:?}\", e);\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 68, "score": 49226.05472752839 }, { "content": "fn main() -> Result<()> {\n\n compile_shaders()?;\n\n\n\n Ok(())\n\n}\n\n\n\n\n\nconst SHADERS: &[&str] = &[\n\n \"dome.vert\",\n\n \"dome.frag\",\n\n \"surface.vert\",\n\n \"surface.frag\",\n\n];\n\n\n", "file_path": "build.rs", "rank": 69, "score": 47484.1930464411 }, { "content": "fn compile_shaders() -> Result<()> {\n\n let out_dir = Path::new(&std::env::var(\"OUT_DIR\").unwrap()).join(\"shaders\");\n\n if !out_dir.exists() {\n\n fs::create_dir(&out_dir)?;\n\n }\n\n\n\n let mut compiler = Compiler::new().unwrap();\n\n\n\n for filename in SHADERS {\n\n let full_path = Path::new(&std::env::var(\"CARGO_MANIFEST_DIR\").unwrap())\n\n .join(\"src\")\n\n .join(\"shaders\")\n\n .join(filename);\n\n let out_path = out_dir.join(format!(\"{}.spirv\", filename));\n\n if !full_path.exists() {\n\n bail!(\"shader file '{}' does not exist\", full_path.display());\n\n }\n\n\n\n // If the spirv file is newer than the source file, we skip this shader.\n\n // Cargo makes sure the build script is only rerun if any of the shader\n", "file_path": "build.rs", "rank": 70, "score": 46037.8971681263 }, { "content": "/// Ability to handle and react to to certain input events.\n\npub trait EventHandler {\n\n fn handle_event(&mut self, e: &Event<()>) -> EventResponse;\n\n}\n\n\n\nimpl<F: FnMut(&Event<()>) -> EventResponse> EventHandler for F {\n\n fn handle_event(&mut self, e: &Event<()>) -> EventResponse {\n\n self(e)\n\n }\n\n}\n\n\n\n/// Handler that handles events intended to quit the program.\n\npub struct QuitHandler;\n\n\n\nimpl EventHandler for QuitHandler {\n\n fn handle_event(&mut self, e: &Event<()>) -> EventResponse {\n\n match e {\n\n Event::WindowEvent {\n\n event: WindowEvent::CloseRequested\n\n | WindowEvent::Destroyed\n\n | WindowEvent::KeyboardInput {\n", "file_path": "src/event.rs", "rank": 71, "score": 45293.37242693329 }, { "content": "pub trait ShaderSource {\n\n fn vert_path(&self) -> &Path;\n\n fn frag_path(&self) -> &Path;\n\n}\n\n\n\nimpl<'a> ShaderSource for &'a str {\n\n fn vert_path(&self) -> &Path { Path::new(self) }\n\n fn frag_path(&self) -> &Path { Path::new(self) }\n\n}\n\nimpl<V: AsRef<Path>, F: AsRef<Path>> ShaderSource for (V, F) {\n\n fn vert_path(&self) -> &Path { self.0.as_ref() }\n\n fn frag_path(&self) -> &Path { self.1.as_ref() }\n\n}\n", "file_path": "src/util/gl.rs", "rank": 72, "score": 44085.86937084383 }, { "content": "fn link_program<F: Facade>(\n\n facade: &F,\n\n vert_buf: &str,\n\n frag_buf: &str,\n\n) -> Result<Program> {\n\n debug!(\"Linking program ...\");\n\n\n\n trace!(\"Vertex shader:\\n{}\", vert_buf);\n\n trace!(\"Fragment shader:\\n{}\", frag_buf);\n\n\n\n Program::from_source(\n\n facade,\n\n &vert_buf,\n\n &frag_buf,\n\n None\n\n ).map_err(|e| {\n\n warn!(\"Linking program failed. Additional information:\");\n\n warn!(\"{}\", e);\n\n\n\n e.into()\n\n })\n\n}\n\n\n", "file_path": "src/util/gl.rs", "rank": 73, "score": 40971.052559388605 }, { "content": "pub trait Lerp<F: LerpFactor> {\n\n fn lerp(self, other: Self, t: F) -> Self;\n\n}\n\n\n\nmacro_rules! impl_lerp {\n\n ($self_type:ty, $factor:ty) => {\n\n impl Lerp<$factor> for $self_type {\n\n fn lerp(self, other: Self, t: $factor) -> Self {\n\n self * (1.0 - t) + other * t\n\n }\n\n }\n\n }\n\n}\n\n\n\nimpl_lerp!(Vector3<f32>, f32);\n\nimpl_lerp!(f32, f32);\n\nimpl_lerp!(Rad<f32>, f32);\n\n\n\nimpl Lerp<f32> for Point3<f32> {\n\n fn lerp(self, other: Self, t: f32) -> Self {\n\n self * (1.0 - t) + other.to_vec() * t\n\n }\n\n}\n", "file_path": "src/math.rs", "rank": 74, "score": 40250.12342513931 }, { "content": "/// Clamps theta into the allowed range\n\nfn clamp_theta(theta: Rad<f32>) -> Rad<f32> {\n\n Rad(theta.0.clamp(THETA_SAFE_EPSILON.0, std::f32::consts::PI - THETA_SAFE_EPSILON.0))\n\n}\n\n\n\n/// Represents a specific projection that can be transformed by the selected\n\n/// rendering method.\n\n#[derive(Clone, Copy, PartialEq, Debug)]\n\npub struct Projection {\n\n /// Field of view in the y direction (in range [0, π/2]).\n\n pub fov: Rad<f32>,\n\n\n\n /// Ratio between the width and the height. The field of view in the x\n\n /// direction is `self.fov * aspect_ratio`.\n\n aspect_ratio: f32,\n\n\n\n /// Everything closer to the camera than this won't be rendered.\n\n pub near_plane: f32,\n\n\n\n /// Everything farther away from the camera than this won't be rendered.\n\n pub far_plane: f32,\n", "file_path": "src/camera.rs", "rank": 75, "score": 35892.2212835765 }, { "content": "#![allow(unused_imports)] // TODO\n\n\n\npub(crate) use log::{trace, debug, info, warn, error};\n\npub(crate) use anyhow::{anyhow, bail, Context as _, Error, Result};\n", "file_path": "src/prelude.rs", "rank": 76, "score": 33098.59683382086 }, { "content": "fn load_file(file_name: &Path, ext: &str) -> Result<String> {\n\n let shader_folder = Path::new(SHADER_FOLDER);\n\n let path = shader_folder.join(file_name).with_extension(ext);\n\n debug!(\"Loading shader '{}' ...\", path.display());\n\n\n\n let mut buf = String::new();\n\n File::open(path).and_then(|mut f| f.read_to_string(&mut buf))?;\n\n\n\n Ok(buf)\n\n}\n\n\n", "file_path": "src/util/gl.rs", "rank": 77, "score": 32677.968814341526 }, { "content": "pub trait LerpFactor: Zero + One + PartialOrd + Sub<Output=Self> {}\n\nimpl LerpFactor for f64 {}\n\nimpl LerpFactor for f32 {}\n\n\n", "file_path": "src/math.rs", "rank": 78, "score": 32372.495465845288 }, { "content": "use std::time::Duration;\n\nuse std::fmt;\n\n\n", "file_path": "src/util/time.rs", "rank": 79, "score": 31449.95606283989 }, { "content": "use std::ops::Range;\n\nuse cgmath::{prelude::*, Point3, Vector3};\n\n\n\nuse super::Shape;\n\n\n\n#[derive(Clone)]\n\npub struct Sphere {\n\n center: Point3<f32>,\n\n radius: f32,\n\n}\n\n\n\nimpl Sphere {\n\n #[allow(dead_code)] // TODO\n\n pub fn new(center: Point3<f32>, radius: f32) -> Self {\n\n Sphere {\n\n center,\n\n radius,\n\n }\n\n }\n\n}\n", "file_path": "src/shape/sphere.rs", "rank": 80, "score": 30342.673606669374 }, { "content": "\n\n Mandelbulb {\n\n max_iters,\n\n bailout,\n\n }\n\n }\n\n}\n\n\n\nimpl Mandelbulb<8> {\n\n pub fn classic(max_iters: u64, bailout: f32) -> Self {\n\n Self::new(max_iters, bailout)\n\n }\n\n}\n\n\n\nimpl<const P: u8> Shape for Mandelbulb<P> {\n\n // fn contains(&self, p: Point3<f32>) -> bool {\n\n // let mut z = p;\n\n\n\n // for _ in 0..self.max_iters {\n\n // // If the radius is bigger than BAILOUT, this point will diverge\n", "file_path": "src/shape/mandelbulb.rs", "rank": 81, "score": 30337.560871469756 }, { "content": "use std::{\n\n arch::x86_64::*,\n\n ops::{Add, Mul, Range, Sub},\n\n};\n\nuse cgmath::Point3;\n\n\n\nuse super::Shape;\n\n\n\n/// Represents the 3D version of the classical mandelbulb described [here][1].\n\n///\n\n/// [1]: http://www.skytopia.com/project/fractal/mandelbulb.html\n\n#[derive(Clone)]\n\npub struct Mandelbulb<const P: u8> {\n\n max_iters: u64,\n\n bailout: f32,\n\n}\n\n\n\nimpl<const P: u8> Mandelbulb<P> {\n\n pub fn new(max_iters: u64, bailout: f32) -> Self {\n\n assert!(max_iters >= 1);\n", "file_path": "src/shape/mandelbulb.rs", "rank": 82, "score": 30337.272657579917 }, { "content": "\n\nimpl Shape for Sphere {\n\n // Overwrite default method for performance\n\n fn contains(&self, p: Point3<f32>) -> bool {\n\n (self.center - p).magnitude2() <= (self.radius * self.radius)\n\n }\n\n\n\n fn bounding_box(&self) -> Range<Point3<f32>> {\n\n let off = Vector3::new(self.radius, self.radius, self.radius);\n\n self.center + -off .. self.center + off\n\n }\n\n\n\n fn min_distance_from(&self, p: Point3<f32>) -> f32 {\n\n (self.center - p).magnitude() - self.radius\n\n }\n\n\n\n fn max_distance_from(&self, p: Point3<f32>) -> Option<f32> {\n\n Some(self.min_distance_from(p))\n\n }\n\n\n", "file_path": "src/shape/sphere.rs", "rank": 83, "score": 30333.00757073769 }, { "content": "impl Mul<f32> for Vec3 {\n\n type Output = Self;\n\n fn mul(self, other: f32) -> Self {\n\n Self(unsafe { _mm_mul_ps(self.0, Self::new(other, other, other).0) })\n\n }\n\n}\n\n\n\nimpl Mul<Vec3> for f32 {\n\n type Output = Vec3;\n\n fn mul(self, other: Vec3) -> Vec3 {\n\n other * self\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod bench {\n\n use test::{Bencher, black_box};\n\n use super::Vec3;\n\n use super::super::BENCH_POINTS;\n\n\n", "file_path": "src/shape/mandelbulb.rs", "rank": 84, "score": 30331.309207362272 }, { "content": "use test::{Bencher, black_box};\n\n\n\nuse super::{BENCH_POINTS, Mandelbulb, Shape};\n\n\n\n\n\n// #[bench]\n\n// fn mandel_single_i8_b5(b: &mut Bencher) {\n\n// let m = Mandelbulb::classic(8, 5.0);\n\n// let p = black_box(POINTS[0]);\n\n// b.iter(|| m.min_distance_from(p));\n\n// }\n\n\n\n#[bench]\n", "file_path": "src/shape/bench.rs", "rank": 85, "score": 30328.675161035593 }, { "content": " // if (z - CENTER).magnitude() > self.bailout {\n\n // return false;\n\n // }\n\n\n\n // z = rotate::<P>(z) + (p - CENTER);\n\n // }\n\n\n\n // // The point didn't diverge within `max_iters`, so we assume it's in\n\n // // the set\n\n // true\n\n // }\n\n\n\n fn bounding_box(&self) -> Range<Point3<f32>> {\n\n // TODO: This value was found by experimenting... we should prove this\n\n // value\n\n Point3::new(-1.2, -1.2, -1.2) .. Point3::new(1.2, 1.2, 1.2)\n\n }\n\n\n\n fn min_distance_from(&self, p: Point3<f32>) -> f32 {\n\n let p = Vec3::new(p.x, p.y, p.z);\n", "file_path": "src/shape/mandelbulb.rs", "rank": 86, "score": 30327.456382471824 }, { "content": " #[bench]\n\n fn rotate_inner_generic(b: &mut Bencher) {\n\n b.iter(|| {\n\n for &[x, y, z] in &BENCH_POINTS {\n\n black_box(super::rotate_inner_px_generic::<8>(Vec3::new(x, y, z)));\n\n }\n\n });\n\n }\n\n\n\n #[bench]\n\n fn rotate_inner_p8_scalar(b: &mut Bencher) {\n\n b.iter(|| {\n\n for &[x, y, z] in &BENCH_POINTS {\n\n black_box(super::rotate_inner_p8_scalar(Vec3::new(x, y, z)));\n\n }\n\n });\n\n }\n\n\n\n #[cfg(all(\n\n target_arch = \"x86_64\",\n", "file_path": "src/shape/mandelbulb.rs", "rank": 87, "score": 30326.039391560258 }, { "content": "macro_rules! impl_batch_methods {\n\n () => {\n\n fn batch_min_distance_from(&self, points: &[Point3<f32>]) -> Vec<f32> {\n\n points.iter().map(|&p| self.min_distance_from(p)).collect()\n\n }\n\n\n\n fn batch_max_distance_from(&self, points: &[Point3<f32>]) -> Vec<f32> {\n\n points.iter().map(|&p| self.max_distance_from(p).unwrap()).collect()\n\n }\n\n\n\n fn batch_bounded_distance_from(&self, points: &[Point3<f32>]) -> Vec<(f32, f32)> {\n\n points.iter().map(|&p| {\n\n let (min, max) = self.bounded_distance_from(p);\n\n (min, max.unwrap())\n\n }).collect()\n\n }\n\n }\n\n}\n", "file_path": "src/shape/util.rs", "rank": 88, "score": 30325.372934109197 }, { "content": " target_feature = \"sse4.1\"\n\n ))]\n\n #[bench]\n\n fn rotate_inner_p8_simd(b: &mut Bencher) {\n\n b.iter(|| {\n\n for &[x, y, z] in &BENCH_POINTS {\n\n black_box(unsafe { super::rotate_inner_p8_simd(Vec3::new(x, y, z)) });\n\n }\n\n });\n\n }\n\n}\n", "file_path": "src/shape/mandelbulb.rs", "rank": 89, "score": 30325.327785110327 }, { "content": " fn de_shader(&self) -> String {\n\n let s = include_str!(\"shape.frag\")\n\n .replace(\"{X}\", &self.center.x.to_string())\n\n .replace(\"{Y}\", &self.center.y.to_string())\n\n .replace(\"{Z}\", &self.center.z.to_string())\n\n .replace(\"{RADIUS}\", &self.radius.to_string());\n\n\n\n s\n\n }\n\n\n\n impl_batch_methods!();\n\n}\n", "file_path": "src/shape/sphere.rs", "rank": 90, "score": 30325.28949779937 }, { "content": " let mut z = p;\n\n let mut dr = 1.0;\n\n let mut r = 0.0;\n\n\n\n for _ in 0..self.max_iters {\n\n // TODO: this here should return the magnitude² as we need it ...\n\n r = z.magnitude();\n\n if r > self.bailout {\n\n break;\n\n }\n\n\n\n // ... here in the ^7 thingy.\n\n dr = r.powi(P as i32 - 1) * (P as f32) * dr + 1.0;\n\n z = rotate::<P>(z) + p;\n\n }\n\n\n\n let ln_r = r.ln() * r;\n\n 0.5 * ln_r / dr\n\n }\n\n\n", "file_path": "src/shape/mandelbulb.rs", "rank": 91, "score": 30323.62465411667 }, { "content": " #[inline(always)]\n\n fn is_on_z_axis(self) -> bool {\n\n self.x() == 0.0 && self.y() == 0.0\n\n }\n\n}\n\n\n\nimpl Add for Vec3 {\n\n type Output = Self;\n\n fn add(self, other: Self) -> Self {\n\n Self(unsafe { _mm_add_ps(self.0, other.0) })\n\n }\n\n}\n\n\n\nimpl Sub for Vec3 {\n\n type Output = Self;\n\n fn sub(self, other: Self) -> Self {\n\n Self(unsafe { _mm_sub_ps(self.0, other.0) })\n\n }\n\n}\n\n\n", "file_path": "src/shape/mandelbulb.rs", "rank": 92, "score": 30323.24794087346 }, { "content": " let rxy2 = x2 + y2;\n\n let rxy4 = rxy2 * rxy2;\n\n let rxy6 = rxy2 * rxy4;\n\n let rxy8 = rxy4 * rxy4;\n\n\n\n let a = 1.0 + (\n\n z8\n\n - 28.0 * z6 * rxy2\n\n + 70.0 * z4 * rxy4\n\n - 28.0 * z2 * rxy6\n\n ) / rxy8;\n\n\n\n\n\n Vec3::new(\n\n a * (\n\n x8\n\n - 28.0 * x6 * y2\n\n + 70.0 * x4 * y4\n\n - 28.0 * x2 * y6\n\n - y8\n", "file_path": "src/shape/mandelbulb.rs", "rank": 93, "score": 30322.34518636799 }, { "content": "\n\n #[inline(always)]\n\n fn is_on_z_axis(self) -> bool {\n\n unsafe {\n\n let mask = _mm_set_epi32(0, 0, 0x7FFF_FFFF, 0x7FFF_FFFF);\n\n _mm_test_all_zeros(_mm_castps_si128(self.0), mask) == 1\n\n }\n\n }\n\n}\n\n\n\n#[cfg(all(\n\n target_arch = \"x86_64\",\n\n not(target_feature = \"sse4.1\")\n\n))]\n\nimpl Vec3 {\n\n #[inline(always)]\n\n fn magnitude(self) -> f32 {\n\n (self.x().powi(2) + self.y().powi(2) + self.z().powi(2)).sqrt()\n\n }\n\n\n", "file_path": "src/shape/mandelbulb.rs", "rank": 94, "score": 30322.180928771675 }, { "content": " #[inline(always)]\n\n fn z(self) -> f32 {\n\n unsafe { f32::from_bits(_mm_extract_ps(self.0, 2) as u32) }\n\n }\n\n\n\n}\n\n\n\n#[cfg(all(\n\n target_arch = \"x86_64\",\n\n target_feature = \"sse4.1\"\n\n))]\n\nimpl Vec3 {\n\n #[inline(always)]\n\n fn magnitude(self) -> f32 {\n\n unsafe {\n\n let len_squared = _mm_dp_ps(self.0, self.0, 0b0111_0001);\n\n let len = _mm_sqrt_ss(len_squared);\n\n _mm_cvtss_f32(len)\n\n }\n\n }\n", "file_path": "src/shape/mandelbulb.rs", "rank": 95, "score": 30322.157488957306 }, { "content": " fn de_shader(&self) -> String {\n\n let s = include_str!(\"mandelbulb.frag\")\n\n .replace(\"{BAILOUT}\", &self.bailout.to_string())\n\n .replace(\"{MAX_ITERS}\", &self.max_iters.to_string())\n\n .replace(\"{POWER}\", &P.to_string());\n\n\n\n s\n\n }\n\n\n\n impl_batch_methods!();\n\n}\n\n\n\n/// This operation rotates the point as triplex number. This is equivalent to\n\n/// the squaring in the original 2D mandelbrot. First we convert the point\n\n/// to spherical coordinates, then we rotate and convert them back.\n\n#[inline(always)]\n", "file_path": "src/shape/mandelbulb.rs", "rank": 96, "score": 30322.06812498076 }, { "content": " use core::arch::x86_64::*;\n\n\n\n let p = p.0;\n\n\n\n // We first calculate a bunch of powers of x, y, z and (x² + y²). The last\n\n // value we define as \"w²\". To be precise, we need:\n\n //\n\n // x x² x⁴ x⁶ x⁸\n\n // y y² y⁴ y⁶ y⁸\n\n // z z² z⁴ z⁶ z⁸\n\n // - w² w⁴ w⁶ w⁸\n\n // var = p p2 p4 p6 p8\n\n //\n\n //\n\n // We will calculate higher powers by multiplying. (x, y, z) is stored in\n\n // `p`. The subsequent powers will be stored in p2, p4, p6 and p8. The\n\n // highest component of these SIMD vectors will be wⁿ (except in p).\n\n\n\n // w² = x² + y² (In every position)\n\n let w2_everywhere = _mm_dp_ps(p, p, 0b0011_1111);\n", "file_path": "src/shape/mandelbulb.rs", "rank": 97, "score": 30321.635130653114 }, { "content": "\n\n // First simply multiply p with itself to get x², y² and z². Then we set the\n\n // highest component of `p2` to w² by bitwise or. In the line above we made\n\n // sure that the dest mask of `_mm_dp_ps` writes to the highest component.\n\n // All other are 0.\n\n let p2 = _mm_mul_ps(p, p);\n\n let p2 = _mm_blend_ps(p2, w2_everywhere, 0b1000);\n\n\n\n // Time to create the other powers.\n\n let p4 = _mm_mul_ps(p2, p2);\n\n let p6 = _mm_mul_ps(p4, p2);\n\n let p8 = _mm_mul_ps(p4, p4);\n\n\n\n // For later caculations it is beneficial to have all xs, ys, zs and ws in\n\n // one vector. So what we do here is basically a 4x4 matrix transpose. We do\n\n // this with the powers 2, 4, 6, 8. The original `p` is not involved in\n\n // this.\n\n let (xs, ys, zs, ws) = {\n\n let x2_x4_y2_y4 = _mm_unpacklo_ps(p2, p4);\n\n let x6_x8_y6_y8 = _mm_unpacklo_ps(p6, p8);\n", "file_path": "src/shape/mandelbulb.rs", "rank": 98, "score": 30321.353619682035 }, { "content": " ),\n\n 8.0 * a * x * y * (\n\n x6\n\n - 7.0 * x4 * y2\n\n + 7.0 * x2 * y4\n\n - y6\n\n ),\n\n 8.0 * z\n\n * rxy2.sqrt()\n\n * (z2 - rxy2)\n\n * (z4 - 6.0 * z2 * rxy2 + rxy4),\n\n )\n\n}\n\n\n\n#[cfg(all(\n\n target_arch = \"x86_64\",\n\n target_feature = \"sse4.1\"\n\n))]\n\n#[allow(dead_code)]\n\nunsafe fn rotate_inner_p8_simd(p: Vec3) -> Vec3 {\n", "file_path": "src/shape/mandelbulb.rs", "rank": 99, "score": 30319.774620778175 } ]
Rust
crates/dkg-core/src/node.rs
kafeikui/BLS-DKG-Demo
7e3e46b10715d76e6dfcdf48b8628ccb2c1eb305
use super::{ board::BoardPublisher, primitives::{ phases::{Phase0, Phase1, Phase2, Phase3}, types::{BundledJustification, BundledResponses, BundledShares, DKGOutput}, DKGError, }, }; use async_trait::async_trait; use rand::RngCore; use thiserror::Error; use threshold_bls::group::Curve; #[derive(Debug, Error)] pub enum NodeError { #[error("Could not publish to board")] PublisherError, #[error("DKG Error: {0}")] DKGError(#[from] DKGError), } #[derive(Clone, Debug)] pub enum Phase2Result<C: Curve, P: Phase3<C>> { Output(DKGOutput<C>), GoToPhase3(P), } type NodeResult<T> = std::result::Result<T, NodeError>; #[async_trait(?Send)] pub trait DKGPhase<C: Curve, B: BoardPublisher<C>, T> { type Next; async fn run(self, board: &mut B, arg: T) -> NodeResult<Self::Next> where C: 'async_trait, T: 'async_trait; } #[async_trait(?Send)] impl<C, B, R, P> DKGPhase<C, B, &mut R> for P where C: Curve, B: BoardPublisher<C>, R: RngCore, P: Phase0<C>, { type Next = P::Next; async fn run(self, board: &mut B, rng: &'async_trait mut R) -> NodeResult<Self::Next> where C: 'async_trait, { let (next, shares) = self.encrypt_shares(rng)?; if let Some(sh) = shares { board .publish_shares(sh) .await .map_err(|_| NodeError::PublisherError)?; } Ok(next) } } #[async_trait(?Send)] impl<C, B, P> DKGPhase<C, B, &[BundledShares<C>]> for P where C: Curve, B: BoardPublisher<C>, P: Phase1<C>, { type Next = P::Next; async fn run( self, board: &mut B, shares: &'async_trait [BundledShares<C>], ) -> NodeResult<Self::Next> where C: 'async_trait, { let (next, bundle) = self.process_shares(shares, false)?; if let Some(bundle) = bundle { board .publish_responses(bundle) .await .map_err(|_| NodeError::PublisherError)?; } Ok(next) } } #[async_trait(?Send)] impl<C, B, P> DKGPhase<C, B, &[BundledResponses]> for P where C: Curve, B: BoardPublisher<C>, P: Phase2<C>, { type Next = Phase2Result<C, P::Next>; async fn run( self, board: &mut B, responses: &'async_trait [BundledResponses], ) -> NodeResult<Self::Next> where C: 'async_trait, { match self.process_responses(responses) { Ok(output) => Ok(Phase2Result::Output(output)), Err(next) => { match next { Ok((next, justifications)) => { if let Some(justifications) = justifications { board .publish_justifications(justifications) .await .map_err(|_| NodeError::PublisherError)?; } Ok(Phase2Result::GoToPhase3(next)) } Err(e) => Err(NodeError::DKGError(e)), } } } } } #[async_trait(?Send)] impl<C, B, P> DKGPhase<C, B, &[BundledJustification<C>]> for P where C: Curve, B: BoardPublisher<C>, P: Phase3<C>, { type Next = DKGOutput<C>; async fn run( self, _: &mut B, responses: &'async_trait [BundledJustification<C>], ) -> NodeResult<Self::Next> where C: 'async_trait, { Ok(self.process_justifications(responses)?) } } #[cfg(test)] mod tests { use super::*; use crate::{ primitives::{ group::{Group, Node}, joint_feldman, }, test_helpers::InMemoryBoard, }; use threshold_bls::{ curve::bls12381::{self, PairingCurve as BLS12_381}, curve::zexe::{self as bls12_377, PairingCurve as BLS12_377}, poly::Idx, sig::{BlindThresholdScheme, G1Scheme, G2Scheme, Scheme, SignatureScheme, ThresholdScheme}, }; fn bad_phase0<C: Curve, R: RngCore, P: Phase0<C>>(phase0: P, rng: &mut R) -> P::Next { let (next, _) = phase0.encrypt_shares(rng).unwrap(); next } #[tokio::test] async fn dkg_sign_e2e() { let (t, n) = (3, 5); dkg_sign_e2e_curve::<bls12381::Curve, G1Scheme<BLS12_381>>(n, t).await; dkg_sign_e2e_curve::<bls12381::G2Curve, G2Scheme<BLS12_381>>(n, t).await; dkg_sign_e2e_curve::<bls12_377::G1Curve, G1Scheme<BLS12_377>>(n, t).await; dkg_sign_e2e_curve::<bls12_377::G2Curve, G2Scheme<BLS12_377>>(n, t).await; } async fn dkg_sign_e2e_curve<C, S>(n: usize, t: usize) where C: Curve, S: Scheme<Public = <C as Curve>::Point, Private = <C as Curve>::Scalar> + BlindThresholdScheme + ThresholdScheme + SignatureScheme, { let msg = rand::random::<[u8; 32]>().to_vec(); let outputs = run_dkg::<C, S>(n, t).await; let (token, blinded_msg) = S::blind_msg(&msg[..], &mut rand::thread_rng()); let partial_sigs = outputs .iter() .map(|output| S::sign_blind_partial(&output.share, &blinded_msg[..]).unwrap()) .collect::<Vec<_>>(); let blinded_sig = S::aggregate(t, &partial_sigs).unwrap(); let unblinded_sig = S::unblind_sig(&token, &blinded_sig).unwrap(); let pubkey = outputs[0].public.public_key(); S::verify(&pubkey, &msg, &unblinded_sig).unwrap(); } async fn run_dkg<C, S>(n: usize, t: usize) -> Vec<DKGOutput<C>> where C: Curve, S: Scheme<Public = <C as Curve>::Point, Private = <C as Curve>::Scalar>, { let rng = &mut rand::thread_rng(); let (mut board, phase0s) = setup::<C, S, _>(n, t, rng); let mut phase1s = Vec::new(); for phase0 in phase0s { phase1s.push(phase0.run(&mut board, rng).await.unwrap()); } let shares = board.shares.clone(); let mut phase2s = Vec::new(); for phase1 in phase1s { phase2s.push(phase1.run(&mut board, &shares).await.unwrap()); } let responses = board.responses.clone(); let mut results = Vec::new(); for phase2 in phase2s { results.push(phase2.run(&mut board, &responses).await.unwrap()); } let outputs = results .into_iter() .map(|res| match res { Phase2Result::Output(out) => out, Phase2Result::GoToPhase3(_) => unreachable!("should not get here"), }) .collect::<Vec<_>>(); assert!(is_all_same(outputs.iter().map(|output| &output.public))); outputs } #[tokio::test] async fn not_enough_validator_shares() { let (t, n) = (6, 10); let bad = t + 1; let honest = n - bad; let rng = &mut rand::thread_rng(); let (mut board, phase0s) = setup::<bls12_377::G1Curve, G1Scheme<BLS12_377>, _>(n, t, rng); let mut phase1s = Vec::new(); for (i, phase0) in phase0s.into_iter().enumerate() { let phase1 = if i < bad { bad_phase0(phase0, rng) } else { phase0.run(&mut board, rng).await.unwrap() }; phase1s.push(phase1); } let shares = board.shares.clone(); let mut errs = Vec::new(); for phase1 in phase1s { let err = match phase1.run(&mut board, &shares).await.unwrap_err() { NodeError::DKGError(err) => err, _ => panic!("should get dkg error"), }; errs.push(err); } for err in &errs[..bad] { match err { DKGError::NotEnoughValidShares(got, required) => { assert_eq!(*got, honest); assert_eq!(*required, t); } _ => panic!("should not get here"), }; } for err in &errs[bad..] { match err { DKGError::NotEnoughValidShares(got, required) => { assert_eq!(*got, honest - 1); assert_eq!(*required, t); } _ => panic!("should not get here"), }; } } #[tokio::test] async fn dkg_phase3() { let (t, n) = (5, 8); let bad = 2; let rng = &mut rand::thread_rng(); let (mut board, phase0s) = setup::<bls12_377::G1Curve, G1Scheme<BLS12_377>, _>(n, t, rng); let mut phase1s = Vec::new(); for (i, phase0) in phase0s.into_iter().enumerate() { let phase1 = if i < bad { bad_phase0(phase0, rng) } else { phase0.run(&mut board, rng).await.unwrap() }; phase1s.push(phase1); } let shares = board.shares.clone(); let mut phase2s = Vec::new(); for phase1 in phase1s { phase2s.push(phase1.run(&mut board, &shares).await.unwrap()); } let responses = board.responses.clone(); let mut results = Vec::new(); for phase2 in phase2s { results.push(phase2.run(&mut board, &responses).await.unwrap()); } let phase3s = results .into_iter() .map(|res| match res { Phase2Result::GoToPhase3(p3) => p3, _ => unreachable!("should not get here"), }) .collect::<Vec<_>>(); let justifications = board.justifs.clone(); let mut outputs = Vec::new(); for phase3 in phase3s { outputs.push(phase3.run(&mut board, &justifications).await.unwrap()); } assert!(is_all_same(outputs.iter().map(|output| &output.qual))); assert!(is_all_same( outputs[bad..].iter().map(|output| &output.public) )); let pubkey = &outputs[bad].public; for output in &outputs[..bad] { assert_ne!(&output.public, pubkey); } } fn setup<C, S, R: rand::RngCore>( n: usize, t: usize, rng: &mut R, ) -> (InMemoryBoard<C>, Vec<joint_feldman::DKG<C>>) where C: Curve, S: Scheme<Public = <C as Curve>::Point, Private = <C as Curve>::Scalar>, { let keypairs = (0..n).map(|_| S::keypair(rng)).collect::<Vec<_>>(); let nodes = keypairs .iter() .enumerate() .map(|(i, (_, public))| Node::<C>::new(i as Idx, public.clone())) .collect::<Vec<_>>(); let group = Group::new(nodes, t).unwrap(); let phase0s = keypairs .iter() .map(|(private, _)| joint_feldman::DKG::new(private.clone(), group.clone()).unwrap()) .collect::<Vec<_>>(); let board = InMemoryBoard::<C>::new(); (board, phase0s) } fn is_all_same<T: PartialEq>(mut arr: impl Iterator<Item = T>) -> bool { let first = arr.next().unwrap(); arr.all(|item| item == first) } }
use super::{ board::BoardPublisher, primitives::{ phases::{Phase0, Phase1, Phase2, Phase3}, types::{BundledJustification, BundledResponses, BundledShares, DKGOutput}, DKGError, }, }; use async_trait::async_trait; use rand::RngCore; use thiserror::Error; use threshold_bls::group::Curve; #[derive(Debug, Error)] pub enum NodeError { #[error("Could not publish to board")] PublisherError, #[error("DKG Error: {0}")] DKGError(#[from] DKGError), } #[derive(Clone, Debug)] pub enum Phase2Result<C: Curve, P: Phase3<C>> { Output(DKGOutput<C>), GoToPhase3(P), } type NodeResult<T> = std::result::Result<T, NodeError>; #[async_trait(?Send)] pub trait DKGPhase<C: Curve, B: BoardPublisher<C>, T> { type Next; async fn run(self, board: &mut B, arg: T) -> NodeResult<Self::Next> where C: 'async_trait, T: 'async_trait; } #[async_trait(?Send)] impl<C, B, R, P> DKGPhase<C, B, &mut R> for P where C: Curve, B: BoardPublisher<C>, R: RngCore, P: Phase0<C>, { type Next = P::Next; async fn run(self, board: &mut B, rng: &'async_trait mut R) -> NodeResult<Self::Next> where C: 'async_trait, { let (next, shares) = self.encrypt_shares(rng)?; if let Some(sh) = shares { board .publish_shares(sh) .await .map_err(|_| NodeError::PublisherError)?; } Ok(next) } } #[async_trait(?Send)] impl<C, B, P> DKGPhase<C, B, &[BundledShares<C>]> for P where C: Curve, B: BoardPublisher<C>, P: Phase1<C>, { type Next = P::Next; async fn run( self, board: &mut B, shares: &'async_trait [BundledShares<C>], ) -> NodeResult<Self::Next> where C: 'async_trait, { let (next, bundle) = self.process_shares(shares, false)?; if let Some(bundle) = bundle { board .publish_responses(bundle) .await .map_err(|_| NodeError::PublisherError)?; } Ok(next) } } #[async_trait(?Send)] impl<C, B, P> DKGPhase<C, B, &[BundledResponses]> for P where C: Curve, B: BoardPublisher<C>, P: Phase2<C>, { type Next = Phase2Result<C, P::Next>; async fn run( self, board: &mut B, responses: &'async_trait [BundledResponses], ) -> NodeResult<Self::Next> where C: 'async_trait, { match self.process_responses(responses) { Ok(output) => Ok(Phase2Result::Output(output)), Err(next) => { match next { Ok((next, justifications)) => { if let Some(justifications) = justifications { board .publish_justifications(justifications) .await .map_err(|_| NodeError::PublisherError)?; } Ok(Phase2Result::GoToPhase3(next)) } Err(e) => Err(NodeError::DKGError(e)), } } } } } #[async_trait(?Send)] impl<C, B, P> DKGPhase<C, B, &[BundledJustification<C>]> for P where C: Curve, B: BoardPublisher<C>, P: Phase3<C>, { type Next = DKGOutput<C>; async fn run( self, _: &mut B, responses: &'async_trait [BundledJustification<C>], ) -> NodeResult<Self::Next> where C: 'async_trait, { Ok(self.process_justifications(responses)?) } } #[cfg(test)] mod tests { use super::*; use crate::{ primitives::{ group::{Group, Node}, joint_feldman, }, test_helpers::InMemoryBoard, }; use threshold_bls::{ curve::bls12381::{self, PairingCurve as BLS12_381}, curve::zexe::{self as bls12_377, PairingCurve as BLS12_377}, poly::Idx, sig::{BlindThresholdScheme, G1Scheme, G2Scheme, Scheme, SignatureScheme, ThresholdScheme}, }; fn bad_phase0<C: Curve, R: RngCore, P: Phase0<C>>(phase0: P, rng: &mut R) -> P::Next { let (next, _) = phase0.encrypt_shares(rng).unwrap(); next } #[tokio::test] async fn dkg_sign_e2e() { let (t, n) = (3, 5); dkg_sign_e2e_curve::<bls12381::Curve, G1Scheme<BLS12_381>>(n, t).await; dkg_sign_e2e_curve::<bls12381::G2Curve, G2Scheme<BLS12_381>>(n, t).await; dkg_sign_e2e_curve::<bls12_377::G1Curve, G1Scheme<BLS12_377>>(n, t).await; dkg_sign_e2e_curve::<bls12_377::G2Curve, G2Scheme<BLS12_377>>(n, t).await; } async fn dkg_sign_e2e_curve<C, S>(n: usize, t: usize) where C: Curve, S: Scheme<Public = <C as Curve>::Point, Private = <C as Curve>::Scalar> + BlindThresholdScheme + ThresholdScheme + SignatureScheme, { let msg = rand::random::<[u8; 32]>().to_vec(); let outputs = run_dkg::<C, S>(n, t).await; let (token, blinded_msg) = S::blind_msg(&msg[..], &mut rand::thread_rng()); let partial_sigs = outputs .iter() .map(|output| S::sign_blind_partial(&output.share, &blinded_msg[..]).unwrap()) .collect::<Vec<_>>(); let blinded_sig = S::aggregate(t, &partial_sigs).unwrap(); let unblinded_sig = S::unblind_sig(&token, &blinded_sig).unwrap(); let pubkey = outputs[0].public.public_key(); S::verify(&pubkey, &msg, &unblinded_sig).unwrap(); } async fn run_dkg<C, S>(n: usize, t: usize) -> Vec<DKGOutput<C>> where C: Curve, S: Scheme<Public = <C as Curve>::Point, Private = <C as Curve>::Scalar>, { let rng = &mut rand::thread_rng();
#[tokio::test] async fn not_enough_validator_shares() { let (t, n) = (6, 10); let bad = t + 1; let honest = n - bad; let rng = &mut rand::thread_rng(); let (mut board, phase0s) = setup::<bls12_377::G1Curve, G1Scheme<BLS12_377>, _>(n, t, rng); let mut phase1s = Vec::new(); for (i, phase0) in phase0s.into_iter().enumerate() { let phase1 = if i < bad { bad_phase0(phase0, rng) } else { phase0.run(&mut board, rng).await.unwrap() }; phase1s.push(phase1); } let shares = board.shares.clone(); let mut errs = Vec::new(); for phase1 in phase1s { let err = match phase1.run(&mut board, &shares).await.unwrap_err() { NodeError::DKGError(err) => err, _ => panic!("should get dkg error"), }; errs.push(err); } for err in &errs[..bad] { match err { DKGError::NotEnoughValidShares(got, required) => { assert_eq!(*got, honest); assert_eq!(*required, t); } _ => panic!("should not get here"), }; } for err in &errs[bad..] { match err { DKGError::NotEnoughValidShares(got, required) => { assert_eq!(*got, honest - 1); assert_eq!(*required, t); } _ => panic!("should not get here"), }; } } #[tokio::test] async fn dkg_phase3() { let (t, n) = (5, 8); let bad = 2; let rng = &mut rand::thread_rng(); let (mut board, phase0s) = setup::<bls12_377::G1Curve, G1Scheme<BLS12_377>, _>(n, t, rng); let mut phase1s = Vec::new(); for (i, phase0) in phase0s.into_iter().enumerate() { let phase1 = if i < bad { bad_phase0(phase0, rng) } else { phase0.run(&mut board, rng).await.unwrap() }; phase1s.push(phase1); } let shares = board.shares.clone(); let mut phase2s = Vec::new(); for phase1 in phase1s { phase2s.push(phase1.run(&mut board, &shares).await.unwrap()); } let responses = board.responses.clone(); let mut results = Vec::new(); for phase2 in phase2s { results.push(phase2.run(&mut board, &responses).await.unwrap()); } let phase3s = results .into_iter() .map(|res| match res { Phase2Result::GoToPhase3(p3) => p3, _ => unreachable!("should not get here"), }) .collect::<Vec<_>>(); let justifications = board.justifs.clone(); let mut outputs = Vec::new(); for phase3 in phase3s { outputs.push(phase3.run(&mut board, &justifications).await.unwrap()); } assert!(is_all_same(outputs.iter().map(|output| &output.qual))); assert!(is_all_same( outputs[bad..].iter().map(|output| &output.public) )); let pubkey = &outputs[bad].public; for output in &outputs[..bad] { assert_ne!(&output.public, pubkey); } } fn setup<C, S, R: rand::RngCore>( n: usize, t: usize, rng: &mut R, ) -> (InMemoryBoard<C>, Vec<joint_feldman::DKG<C>>) where C: Curve, S: Scheme<Public = <C as Curve>::Point, Private = <C as Curve>::Scalar>, { let keypairs = (0..n).map(|_| S::keypair(rng)).collect::<Vec<_>>(); let nodes = keypairs .iter() .enumerate() .map(|(i, (_, public))| Node::<C>::new(i as Idx, public.clone())) .collect::<Vec<_>>(); let group = Group::new(nodes, t).unwrap(); let phase0s = keypairs .iter() .map(|(private, _)| joint_feldman::DKG::new(private.clone(), group.clone()).unwrap()) .collect::<Vec<_>>(); let board = InMemoryBoard::<C>::new(); (board, phase0s) } fn is_all_same<T: PartialEq>(mut arr: impl Iterator<Item = T>) -> bool { let first = arr.next().unwrap(); arr.all(|item| item == first) } }
let (mut board, phase0s) = setup::<C, S, _>(n, t, rng); let mut phase1s = Vec::new(); for phase0 in phase0s { phase1s.push(phase0.run(&mut board, rng).await.unwrap()); } let shares = board.shares.clone(); let mut phase2s = Vec::new(); for phase1 in phase1s { phase2s.push(phase1.run(&mut board, &shares).await.unwrap()); } let responses = board.responses.clone(); let mut results = Vec::new(); for phase2 in phase2s { results.push(phase2.run(&mut board, &responses).await.unwrap()); } let outputs = results .into_iter() .map(|res| match res { Phase2Result::Output(out) => out, Phase2Result::GoToPhase3(_) => unreachable!("should not get here"), }) .collect::<Vec<_>>(); assert!(is_all_same(outputs.iter().map(|output| &output.public))); outputs }
function_block-function_prefix_line
[ { "content": "/// Creates the encrypted shares with the given secret polynomial to the given\n\n/// group.\n\npub fn create_share_bundle<C: Curve, R: RngCore>(\n\n dealer_idx: Idx,\n\n secret: &PrivatePoly<C>,\n\n public: &PublicPoly<C>,\n\n group: &Group<C>,\n\n rng: &mut R,\n\n) -> DKGResult<BundledShares<C>> {\n\n let shares = group\n\n .nodes\n\n .iter()\n\n .map(|n| {\n\n // println!(\"{}\", n.id());\n\n // evaluate the secret polynomial at the node's id\n\n let sec = secret.eval(n.id() as Idx);\n\n\n\n // serialize the evaluation\n\n let buff = bincode::serialize(&sec.value)?;\n\n\n\n // encrypt it\n\n let cipher = ecies::encrypt::<C, _>(n.key(), &buff, rng);\n", "file_path": "crates/dkg-core/src/primitives/common.rs", "rank": 0, "score": 335564.32011739013 }, { "content": "/// Encrypts the message with a public key (curve point) and returns a ciphertext\n\npub fn encrypt<C: Curve, R: RngCore>(to: &C::Point, msg: &[u8], rng: &mut R) -> EciesCipher<C> {\n\n let eph_secret = C::Scalar::rand(rng);\n\n\n\n let mut ephemeral = C::Point::one();\n\n ephemeral.mul(&eph_secret);\n\n\n\n // dh = eph(yG) = eph * public\n\n let mut dh = to.clone();\n\n dh.mul(&eph_secret);\n\n\n\n // derive an ephemeral key from the public key\n\n let ephemeral_key = derive::<C>(&dh);\n\n\n\n // instantiate the AEAD scheme\n\n let aead = ChaCha20Poly1305::new(ephemeral_key.into());\n\n\n\n // generate a random nonce\n\n let mut nonce: [u8; NONCE_LEN] = [0u8; NONCE_LEN];\n\n rng.fill_bytes(&mut nonce);\n\n\n", "file_path": "crates/threshold-bls/src/ecies.rs", "rank": 1, "score": 331172.2965999844 }, { "content": "/// Phase3 is the trait abstracting the final stage of a distributed key\n\n/// generation protocol. At this stage, the share holders process the potential\n\n/// justifications, and look if they can finish the protocol.\n\npub trait Phase3<C: Curve>: Debug {\n\n fn process_justifications(\n\n self,\n\n justifs: &[BundledJustification<C>],\n\n ) -> Result<DKGOutput<C>, DKGError>;\n\n}\n", "file_path": "crates/dkg-core/src/primitives/phases.rs", "rank": 2, "score": 306814.71198249655 }, { "content": "/// Phase2 is the trait abstracting the third stage of a distributed key\n\n/// generation computation. At this stage, every participant process the\n\n/// responses, look if they can finish the protocol. If not, dealers look if\n\n/// they have to produce some justifications.\n\n///\n\n/// The return method of this trait is first the `DKGOutput` if the protocol can\n\n/// be finished already. If not, the call returns an error which either contains\n\n/// the next phase and potential justifications or a fatal error that makes this\n\n/// node unable to continue participating in the protocol.\n\npub trait Phase2<C: Curve>: Clone + Debug + Serialize + for<'a> Deserialize<'a> {\n\n type Next: Phase3<C>;\n\n\n\n #[allow(clippy::type_complexity)]\n\n fn process_responses(\n\n self,\n\n responses: &[BundledResponses],\n\n ) -> Result<DKGOutput<C>, DKGResult<(Self::Next, Option<BundledJustification<C>>)>>;\n\n}\n\n\n", "file_path": "crates/dkg-core/src/primitives/phases.rs", "rank": 4, "score": 265946.578435933 }, { "content": "/// Phase1 is the trait abstracting the second step of a distributed key\n\n/// generation computation. At this stage, the \"share holders\" nodes decrypt the\n\n/// shares and create responses to broadcast to both dealers and share holders.\n\npub trait Phase1<C: Curve>: Clone + Debug + Serialize + for<'a> Deserialize<'a> {\n\n type Next: Phase2<C>;\n\n\n\n fn process_shares(\n\n self,\n\n bundles: &[BundledShares<C>],\n\n publish_all: bool,\n\n ) -> DKGResult<(Self::Next, Option<BundledResponses>)>;\n\n}\n\n\n", "file_path": "crates/dkg-core/src/primitives/phases.rs", "rank": 5, "score": 265940.201175888 }, { "content": "pub fn get_justification<C: Curve>(\n\n dealer_idx: Idx,\n\n secret: &PrivatePoly<C>,\n\n public: &PublicPoly<C>,\n\n statuses: &StatusMatrix,\n\n) -> Option<BundledJustification<C>> {\n\n // If there were any complaints against our deal, then we should re-evaluate our\n\n // secret polynomial at the indexes where the complaints were, and publish these\n\n // as justifications (i.e. indicating that we are still behaving correctly).\n\n if !statuses.all_true(dealer_idx) {\n\n let justifications = statuses\n\n .get_for_dealer(dealer_idx)\n\n .iter()\n\n .enumerate()\n\n .filter_map(|(i, success)| {\n\n if !success {\n\n // reveal the share\n\n let id = i as Idx;\n\n Some(Justification {\n\n share_idx: id,\n", "file_path": "crates/dkg-core/src/primitives/common.rs", "rank": 6, "score": 241599.74931240655 }, { "content": "/// returns the correct shares destined to the given holder index\n\npub fn internal_process_justifications<C: Curve>(\n\n holder_idx: Idx,\n\n dealers: &Group<C>,\n\n statuses: &mut StatusMatrix,\n\n publics: &PublicInfo<C>,\n\n justifs: &[BundledJustification<C>],\n\n) -> ShareInfo<C> {\n\n let mut valid_shares = ShareInfo::<C>::new();\n\n justifs\n\n .iter()\n\n .filter(|b| dealers.contains_index(b.dealer_idx))\n\n // get only the bundles for which we have a public polynomial for\n\n // i.e. only justif for polynomials that have been broadcasted in the\n\n // first phase\n\n .filter_map(|b| publics.get(&b.dealer_idx).map(|public| (b, public)))\n\n .for_each(|(bundle, public)| {\n\n bundle\n\n .justifications\n\n .iter()\n\n // ignore incorrect shares\n", "file_path": "crates/dkg-core/src/primitives/common.rs", "rank": 7, "score": 236864.41860055295 }, { "content": "/// Processes the shares and returns the private share of the user and a public\n\n/// polynomial, as well as the status matrix of the protocol.\n\n///\n\n/// Depending on which variant of the DKG protocol is used, the status\n\n/// matrix responses which correspond to our index may be used in the\n\n/// following way:\n\n///\n\n/// - All responses get broadcast: You assume that shares of other nodes are\n\n/// not good unless you hear otherwise. - Broadcast only responses which\n\n/// are complaints: You assume that shares of other nodes are good unless\n\n/// you hear otherwise.\n\npub fn process_shares_get_all<C: Curve>(\n\n dealers: &Group<C>,\n\n share_holders: &Group<C>,\n\n my_idx: Idx,\n\n my_private: &C::Scalar,\n\n bundles: &[BundledShares<C>],\n\n) -> DKGResult<(ShareInfo<C>, PublicInfo<C>, StatusMatrix)> {\n\n // there are \"old_n\" dealers and for each dealer, \"new_n\" share holders\n\n let mut statuses = StatusMatrix::new(dealers.len(), share_holders.len(), Status::Success);\n\n\n\n // set by default all the shares we could receive as complaint - that puts\n\n // us on the conservative side of only explicitely allowing correct shares.\n\n (0..dealers.len())\n\n .filter(|&dealer_idx| dealer_idx != my_idx as usize)\n\n .for_each(|dealer_idx| {\n\n statuses.set(dealer_idx as Idx, my_idx, Status::Complaint);\n\n });\n\n\n\n let mut publics = PublicInfo::<C>::new();\n\n let valid_shares = bundles\n", "file_path": "crates/dkg-core/src/primitives/common.rs", "rank": 8, "score": 236847.4400983063 }, { "content": "pub fn decrypt_and_check_share<C: Curve>(\n\n private_key: &C::Scalar,\n\n own_idx: Idx,\n\n dealer_idx: Idx,\n\n public: &PublicPoly<C>,\n\n share: &EncryptedShare<C>,\n\n) -> Result<C::Scalar, DKGError> {\n\n let buff = ecies::decrypt::<C>(private_key, &share.secret).map_err(|err| {\n\n println!(\"ERROR {:?}\", err);\n\n ShareError::InvalidCiphertext(dealer_idx, err)\n\n })?;\n\n\n\n let clear_share: C::Scalar = bincode::deserialize(&buff)?;\n\n\n\n if !share_correct::<C>(own_idx, &clear_share, public) {\n\n println!(\"INCORRECT\");\n\n return Err(ShareError::InvalidShare(dealer_idx).into());\n\n }\n\n\n\n Ok(clear_share)\n\n}\n\n\n", "file_path": "crates/dkg-core/src/primitives/common.rs", "rank": 9, "score": 236833.56321530064 }, { "content": "#[async_trait(?Send)]\n\npub trait BoardPublisher<C>\n\nwhere\n\n C: Curve,\n\n{\n\n /// Error raised when trying to publish data to the board\n\n type Error;\n\n\n\n /// Publishes the shares to the board\n\n async fn publish_shares(&mut self, shares: BundledShares<C>) -> Result<(), Self::Error>\n\n where\n\n C: 'async_trait;\n\n\n\n /// Publishes the responses to the board\n\n async fn publish_responses(&mut self, responses: BundledResponses) -> Result<(), Self::Error>\n\n where\n\n C: 'async_trait;\n\n\n\n /// Publishes the justifications to the board\n\n async fn publish_justifications(\n\n &mut self,\n", "file_path": "crates/dkg-core/src/board.rs", "rank": 10, "score": 236601.5260564802 }, { "content": "pub fn keygen<R>(opts: KeygenOpts, rng: &mut R) -> Result<()>\n\nwhere\n\n R: RngCore,\n\n{\n\n let wallet = Wallet::new(rng);\n\n let output = CeloKeypairJson {\n\n private_key: hex::encode(bincode::serialize(wallet.private_key())?),\n\n address: wallet.address(),\n\n };\n\n\n\n if let Some(path) = opts.path {\n\n let f = File::create(path)?;\n\n serde_json::to_writer(&f, &output)?;\n\n } else {\n\n serde_json::to_writer(std::io::stdout(), &output)?;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "crates/dkg-cli/src/actions.rs", "rank": 11, "score": 233888.63970095385 }, { "content": "/// Checks if the commitment to the share corresponds to the public polynomial's\n\n/// evaluated at the given point.\n\npub fn share_correct<C: Curve>(idx: Idx, share: &C::Scalar, public: &PublicPoly<C>) -> bool {\n\n let mut commit = C::Point::one();\n\n commit.mul(&share);\n\n let pub_eval = public.eval(idx);\n\n pub_eval.value == commit\n\n}\n\n\n", "file_path": "crates/dkg-core/src/primitives/common.rs", "rank": 12, "score": 230169.1015627339 }, { "content": "/// Phase0 is the trait abstracting the first step of a distributed key\n\n/// generation computation. At this stage, the \"dealer\" nodes create their\n\n/// shares and encrypt them to the \"share holders\".\n\npub trait Phase0<C: Curve>: Clone + Debug + Serialize + for<'a> Deserialize<'a> {\n\n type Next: Phase1<C>;\n\n\n\n fn encrypt_shares<R: RngCore>(\n\n self,\n\n rng: &mut R,\n\n ) -> DKGResult<(Self::Next, Option<BundledShares<C>>)>;\n\n}\n\n\n", "file_path": "crates/dkg-core/src/primitives/phases.rs", "rank": 13, "score": 223937.3687216847 }, { "content": "/// The minimum allowed threshold is 51%\n\npub fn minimum_threshold(n: usize) -> usize {\n\n (((n as f64) / 2.0) + 1.0) as usize\n\n}\n\n\n\n/// The default threshold is 66%\n\n#[allow(dead_code)]\n\npub(crate) fn default_threshold(n: usize) -> usize {\n\n (((n as f64) * 2.0 / 3.0) + 1.0) as usize\n\n}\n", "file_path": "crates/dkg-core/src/primitives/mod.rs", "rank": 14, "score": 220827.93682765952 }, { "content": "/// The `Scheme` trait contains the basic information of the groups over\n\n/// which the signing operations takes places and a way to create a valid key\n\n/// pair.\n\n///\n\n/// The Scheme trait is necessary to implement for \"simple\" signature scheme as\n\n/// well for threshold based signature scheme.\n\npub trait Scheme: Debug {\n\n /// `Private` represents the field over which private keys are represented.\n\n type Private: Scalar<RHS = Self::Private>;\n\n /// `Public` represents the group over which the public keys are\n\n /// represented.\n\n type Public: Point<RHS = Self::Private> + Serialize + DeserializeOwned;\n\n /// `Signature` represents the group over which the signatures are reresented.\n\n type Signature: Point<RHS = Self::Private> + Serialize + DeserializeOwned;\n\n\n\n /// Returns a new fresh keypair usable by the scheme.\n\n fn keypair<R: RngCore>(rng: &mut R) -> (Self::Private, Self::Public) {\n\n let private = Self::Private::rand(rng);\n\n\n\n let mut public = Self::Public::one();\n\n public.mul(&private);\n\n\n\n (private, public)\n\n }\n\n}\n\n\n", "file_path": "crates/threshold-bls/src/sig/sig.rs", "rank": 15, "score": 219084.55457430336 }, { "content": "/// set_statuses set the status of the given responses on the status matrix.\n\npub fn set_statuses<C: Curve>(\n\n holder_idx: Idx,\n\n dealers: &Group<C>,\n\n holders: &Group<C>,\n\n statuses: &mut StatusMatrix,\n\n responses: &[BundledResponses],\n\n) {\n\n // makes sure the API doesn't take into account our own responses!\n\n let not_from_me = responses.iter().filter(|r| r.share_idx != holder_idx);\n\n let valid_idx = not_from_me.filter(|r| {\n\n let good_holder = holders.contains_index(r.share_idx);\n\n let good_dealers = !r\n\n .responses\n\n .iter()\n\n .any(|resp| !dealers.contains_index(resp.dealer_idx));\n\n good_dealers && good_holder\n\n });\n\n\n\n for bundle in valid_idx {\n\n let holder_index = bundle.share_idx;\n\n for response in bundle.responses.iter() {\n\n let dealer_index = response.dealer_idx;\n\n statuses.set(dealer_index, holder_index, response.status);\n\n }\n\n }\n\n}\n\n\n", "file_path": "crates/dkg-core/src/primitives/common.rs", "rank": 16, "score": 211229.18481150296 }, { "content": "/// Decrypts the message with a secret key (curve scalar) and returns the cleartext\n\npub fn decrypt<C: Curve>(private: &C::Scalar, cipher: &EciesCipher<C>) -> Result<Vec<u8>, AError> {\n\n // dh = private * (eph * G) = private * ephPublic\n\n let mut dh = cipher.ephemeral.clone();\n\n dh.mul(&private);\n\n\n\n let ephemeral_key = derive::<C>(&dh);\n\n\n\n let aead = ChaCha20Poly1305::new((ephemeral_key).into());\n\n\n\n aead.decrypt(&cipher.nonce.into(), &cipher.aead[..])\n\n}\n\n\n", "file_path": "crates/threshold-bls/src/ecies.rs", "rank": 17, "score": 210406.26479601226 }, { "content": "fn compute_resharing_output<C: Curve>(\n\n info: ReshareInfo<C>,\n\n shares: ShareInfo<C>,\n\n publics: PublicInfo<C>,\n\n statuses: RefCell<StatusMatrix>,\n\n) -> DKGResult<DKGOutput<C>> {\n\n // to compute the final share, we interpolate all the valid shares received\n\n let mut shares_eval: Vec<Eval<C::Scalar>> = shares\n\n .into_iter()\n\n .map(|(idx, sh)| Eval {\n\n value: sh,\n\n index: idx,\n\n })\n\n .collect();\n\n\n\n // only take the first t shares sorted\n\n shares_eval.sort_by(|a, b| a.index.cmp(&b.index));\n\n let shares_indexes = shares_eval.iter().map(|e| e.index).collect::<Vec<Idx>>();\n\n let shortened_evals = shares_eval\n\n .into_iter()\n", "file_path": "crates/dkg-core/src/primitives/resharing.rs", "rank": 18, "score": 197174.08907512878 }, { "content": "/// A curve equipped with a bilinear pairing operation.\n\npub trait PairingCurve: Debug {\n\n type Scalar: Scalar<RHS = Self::Scalar>;\n\n\n\n type G1: Point<RHS = Self::Scalar>;\n\n\n\n type G2: Point<RHS = Self::Scalar>;\n\n\n\n type GT: Element;\n\n\n\n /// Perfors a pairing operation between the 2 group elements\n\n fn pair(a: &Self::G1, b: &Self::G2) -> Self::GT;\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\n/// Helper which binds together a scalar with a group type to form a curve\n\npub struct CurveFrom<S: Scalar, P: Point> {\n\n s: PhantomData<S>,\n\n p: PhantomData<P>,\n\n}\n\n\n", "file_path": "crates/threshold-bls/src/group.rs", "rank": 19, "score": 196520.33692328658 }, { "content": "fn setup<C, S, R: rand::RngCore>(\n\n n: usize,\n\n t: usize,\n\n rng: &mut R,\n\n) -> (InMemoryBoard<C>, Vec<joint_feldman::DKG<C>>)\n\nwhere\n\n C: Curve,\n\n // We need to bind the Curve's Point and Scalars to the Scheme\n\n S: Scheme<Public = <C as Curve>::Point, Private = <C as Curve>::Scalar>,\n\n{\n\n // generate a keypair per participant\n\n let keypairs = (0..n).map(|_| S::keypair(rng)).collect::<Vec<_>>();\n\n // keypairs\n\n // .iter()\n\n // .for_each(|(private, public)| println!(\"{} {}\", private, public));\n\n\n\n let nodes = keypairs\n\n .iter()\n\n .enumerate()\n\n .map(|(i, (_, public))| {\n", "file_path": "crates/randcast-mock-demo/src/main.rs", "rank": 20, "score": 191372.79037451127 }, { "content": "/// A group holds functionalities to create scalar and points related; it is\n\n/// similar to the Engine definition, just much more simpler.\n\npub trait Curve: Clone + Debug + Send + Sync {\n\n /// The curve's scalar\n\n type Scalar: Scalar<RHS = Self::Scalar>;\n\n\n\n /// The curve's point\n\n type Point: Point<RHS = Self::Scalar>;\n\n\n\n /// scalar returns the identity element of the field.\n\n fn scalar() -> Self::Scalar {\n\n Self::Scalar::new()\n\n }\n\n\n\n /// point returns the default additive generator of the group.\n\n fn point() -> Self::Point {\n\n Self::Point::one()\n\n }\n\n}\n\n\n", "file_path": "crates/threshold-bls/src/group.rs", "rank": 21, "score": 178256.53727331362 }, { "content": "fn write_output<C: Curve, W: Write>(writer: W, out: &DKGOutput<C>) -> Result<()> {\n\n let output = OutputJson {\n\n public_key: hex::encode(&bincode::serialize(&out.public.public_key())?),\n\n public_polynomial: hex::encode(&bincode::serialize(&out.public)?),\n\n share: hex::encode(&bincode::serialize(&out.share)?),\n\n };\n\n serde_json::to_writer(writer, &output)?;\n\n Ok(())\n\n}\n", "file_path": "crates/dkg-cli/src/actions.rs", "rank": 22, "score": 176493.31418702693 }, { "content": "pub fn compute_bundle_response(\n\n my_idx: Idx,\n\n statuses: &StatusMatrix,\n\n publish_all: bool,\n\n) -> Option<BundledResponses> {\n\n let responses = statuses\n\n .get_for_share(my_idx)\n\n .into_iter()\n\n .enumerate()\n\n .map(|(i, b)| Response {\n\n dealer_idx: i as Idx,\n\n status: Status::from(b),\n\n });\n\n\n\n let responses = if !publish_all {\n\n // only get the complaints\n\n responses\n\n .filter(|r| !r.status.is_success())\n\n .collect::<Vec<_>>()\n\n } else {\n", "file_path": "crates/dkg-core/src/primitives/common.rs", "rank": 23, "score": 172946.72787874733 }, { "content": "/// SignatureScheme is the trait that defines the operations of a sinature\n\n/// scheme, namely `sign` and `verify`. Below is an example of using the\n\n/// signature scheme based on BLS, using the BLS12-381 curves.\n\n///\n\n/// ```\n\n/// # #[cfg(feature = \"bls12_381\")]\n\n/// # {\n\n/// use rand::prelude::*;\n\n/// use threshold_bls::{sig::{SignatureScheme, Scheme, G2Scheme}, group::{Element, Point}};\n\n/// use threshold_bls::curve::bls12381::PairingCurve as PC;\n\n///\n\n/// let msg = vec![1,9,6,9];\n\n/// let (private,public) = G2Scheme::<PC>::keypair(&mut thread_rng());\n\n/// let signature = G2Scheme::<PC>::sign(&private,&msg).unwrap();\n\n/// match G2Scheme::<PC>::verify(&public, &msg, &signature) {\n\n/// Ok(_) => println!(\"signature is correct!\"),\n\n/// Err(e) => println!(\"signature is invalid: {}\",e),\n\n/// };\n\n/// # }\n\n/// ```\n\n/// Note signature scheme handles the format of the signature itself.\n\npub trait SignatureScheme: Scheme {\n\n /// Error produced when signing a message\n\n type Error: Error;\n\n\n\n /// Signs the message with the provided private key and returns a serialized signature\n\n fn sign(private: &Self::Private, msg: &[u8]) -> Result<Vec<u8>, Self::Error>;\n\n\n\n /// Verifies that the signature on the provided message was produced by the public key\n\n fn verify(public: &Self::Public, msg: &[u8], sig: &[u8]) -> Result<(), Self::Error>;\n\n}\n\n\n\n/// BlindScheme is a signature scheme where the message can be blinded before\n\n/// signing so the signer does not know the real message. The signature can\n\n/// later be \"unblinded\" as to reveal a valid signature over the initial\n\n/// message.\n\n///\n\n/// ```\n\n/// # #[cfg(feature = \"bls12_381\")]\n\n/// # {\n\n/// use rand::prelude::*;\n", "file_path": "crates/threshold-bls/src/sig/sig.rs", "rank": 24, "score": 172438.90034355884 }, { "content": "/// // the owner of the message can then unblind the signature to reveal a\n\n/// // regular signature that can be verified using the regular method of the\n\n/// // SignatureScheme.\n\n/// let clear_sig = G2Scheme::<PC>::unblind_sig(&token,&blinded_sig).unwrap();\n\n/// match G2Scheme::<PC>::verify(&public, &msg, &clear_sig) {\n\n/// Ok(_) => println!(\"signature is correct!\"),\n\n/// Err(e) => println!(\"signature is invalid: {}\",e),\n\n/// };\n\n/// # }\n\n/// ```\n\npub trait BlindScheme: Scheme {\n\n /// The blinding factor which will be used to unblind the message\n\n type Token: Serialize + DeserializeOwned;\n\n\n\n /// Error during blinding or unblinding\n\n type Error: Error;\n\n\n\n /// Blinds the provided message using randomness from the provided RNG and returns\n\n /// the blinding factor and the blinded message.\n\n fn blind_msg<R: RngCore>(msg: &[u8], rng: &mut R) -> (Self::Token, Vec<u8>);\n\n\n\n /// Given the blinding factor that was used to blind the provided message, it will\n\n /// unblind it and return the cleartext message\n\n fn unblind_sig(t: &Self::Token, blinded_message: &[u8]) -> Result<Vec<u8>, Self::Error>;\n\n\n\n /// blind_sign is the method that signs the given blinded message and\n\n /// returns a blinded signature.\n\n fn blind_sign(private: &Self::Private, blinded_msg: &[u8]) -> Result<Vec<u8>, Self::Error>;\n\n\n\n /// blind_verify takes the blinded message and the blinded signature and\n", "file_path": "crates/threshold-bls/src/sig/sig.rs", "rank": 25, "score": 172427.860851672 }, { "content": "/// ThresholdScheme is a threshold-based `t-n` signature scheme. The security of\n\n/// such a scheme means at least `t` participants are required produce a \"partial\n\n/// signature\" to then produce a regular signature.\n\n/// The `dkg-core` module allows participants to create a distributed private/public key\n\n/// that can be used with implementations `ThresholdScheme`.\n\npub trait ThresholdScheme: Scheme {\n\n /// Error produced when partially signing, aggregating or verifying\n\n type Error: Error;\n\n\n\n /// Partially signs a message with a share of the private key\n\n fn partial_sign(private: &Share<Self::Private>, msg: &[u8]) -> Result<Partial, Self::Error>;\n\n\n\n /// Verifies a partial signature on a message against the public polynomial\n\n fn partial_verify(\n\n public: &Poly<Self::Public>,\n\n msg: &[u8],\n\n partial: &[u8],\n\n ) -> Result<(), Self::Error>;\n\n\n\n /// Aggregates all partials signature together. Note that this method does\n\n /// not verify if the partial signatures are correct or not; it only\n\n /// aggregates them.\n\n fn aggregate(threshold: usize, partials: &[Partial]) -> Result<Vec<u8>, Self::Error>;\n\n}\n\n\n", "file_path": "crates/threshold-bls/src/sig/sig.rs", "rank": 26, "score": 172418.0341067642 }, { "content": " /// BLSScheme is an internal trait that encompasses the common work between a\n\n /// BLS signature over G1 or G2.\n\n pub trait BLSScheme: Scheme {\n\n /// Returns sig = msg^{private}. The message MUST be hashed before this call.\n\n fn internal_sign(\n\n private: &Self::Private,\n\n msg: &[u8],\n\n should_hash: bool,\n\n ) -> Result<Vec<u8>, BLSError> {\n\n let mut h = if should_hash {\n\n let mut h = Self::Signature::new();\n\n h.map(msg).map_err(|_| BLSError::HashingError)?;\n\n h\n\n } else {\n\n bincode::deserialize_from(msg)?\n\n };\n\n\n\n h.mul(private);\n\n\n\n let serialized = bincode::serialize(&h)?;\n\n Ok(serialized)\n\n }\n", "file_path": "crates/threshold-bls/src/sig/bls.rs", "rank": 27, "score": 172415.1163472794 }, { "content": "fn deserialize_field<'de, D, C>(deserializer: D) -> Result<C, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n C: CanonicalDeserialize + ConstantSerializedSize,\n\n{\n\n struct FieldVisitor<C>(PhantomData<C>);\n\n\n\n impl<'de, C> Visitor<'de> for FieldVisitor<C>\n\n where\n\n C: CanonicalDeserialize + ConstantSerializedSize,\n\n {\n\n type Value = C;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"a valid group element\")\n\n }\n\n\n\n fn visit_seq<S>(self, mut seq: S) -> Result<C, S::Error>\n\n where\n\n S: SeqAccess<'de>,\n", "file_path": "crates/threshold-bls/src/curve/zexe.rs", "rank": 28, "score": 168021.1679241759 }, { "content": "fn deserialize_group<'de, D, C>(deserializer: D) -> Result<C, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n C: ProjectiveCurve,\n\n C::Affine: CanonicalDeserialize + ConstantSerializedSize,\n\n{\n\n struct GroupVisitor<C>(PhantomData<C>);\n\n\n\n impl<'de, C> Visitor<'de> for GroupVisitor<C>\n\n where\n\n C: ProjectiveCurve,\n\n C::Affine: CanonicalDeserialize + ConstantSerializedSize,\n\n {\n\n type Value = C;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"a valid group element\")\n\n }\n\n\n\n fn visit_seq<S>(self, mut seq: S) -> Result<C, S::Error>\n", "file_path": "crates/threshold-bls/src/curve/zexe.rs", "rank": 29, "score": 168021.16792417588 }, { "content": "fn serialize_field<S, C>(c: &C, s: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n C: CanonicalSerialize,\n\n{\n\n let len = c.serialized_size();\n\n let mut bytes = Vec::with_capacity(len);\n\n c.serialize(&mut bytes)\n\n .map_err(SerializationError::custom)?;\n\n\n\n let mut tup = s.serialize_tuple(len)?;\n\n for byte in &bytes {\n\n tup.serialize_element(byte)?;\n\n }\n\n tup.end()\n\n}\n\n\n", "file_path": "crates/threshold-bls/src/curve/zexe.rs", "rank": 30, "score": 167951.3818979518 }, { "content": "fn serialize_group<S, C>(c: &C, s: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n C: ProjectiveCurve,\n\n C::Affine: CanonicalSerialize,\n\n{\n\n let affine = c.into_affine();\n\n let len = affine.serialized_size();\n\n let mut bytes = Vec::with_capacity(len);\n\n affine\n\n .serialize(&mut bytes)\n\n .map_err(SerializationError::custom)?;\n\n\n\n let mut tup = s.serialize_tuple(len)?;\n\n for byte in &bytes {\n\n tup.serialize_element(byte)?;\n\n }\n\n tup.end()\n\n}\n\n\n", "file_path": "crates/threshold-bls/src/curve/zexe.rs", "rank": 31, "score": 167951.3818979518 }, { "content": "// we verify that the public polynomial is created with the public\n\n// share of the dealer,i.e. it's actually a resharing\n\n// if it returns false, we must set the dealer's shares as being complaint, all\n\n// of them since he is not respecting the protocol\n\nfn check_public_resharing<C: Curve>(\n\n dealer_idx: Idx,\n\n deal_poly: &PublicPoly<C>,\n\n group_poly: &PublicPoly<C>,\n\n) -> bool {\n\n // evaluation of the public key the dealer gives us which should be\n\n // the commitment of its current share\n\n let given = deal_poly.public_key();\n\n // computing the current share commitment of the dealer\n\n let expected = &group_poly.eval(dealer_idx).value;\n\n expected == given\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::primitives::{\n\n common::tests::{check2, full_dkg, id_out, id_resp, invalid2, invalid_shares, setup_group},\n\n default_threshold,\n\n };\n", "file_path": "crates/dkg-core/src/primitives/resharing.rs", "rank": 32, "score": 166821.60754006685 }, { "content": "/// BlindThreshold is ThresholdScheme that allows to verify a partially blinded\n\n/// signature as well blinded message, to aggregate them into one blinded signature\n\n/// such that it can be unblinded after and verified as a regular signature.\n\npub trait BlindThresholdScheme: BlindScheme {\n\n type Error: Error;\n\n\n\n /// sign_blind_partial partially signs a blinded message and returns a\n\n /// partial blind signature over it.\n\n fn sign_blind_partial(\n\n private: &Share<Self::Private>,\n\n blinded_msg: &[u8],\n\n ) -> Result<Partial, <Self as BlindThresholdScheme>::Error>;\n\n\n\n /// Given the blinding factor that was used to blind a message that was blind partially\n\n /// signed, it will unblind it and return the cleartext signature\n\n fn unblind_partial_sig(\n\n t: &Self::Token,\n\n partial: &[u8],\n\n ) -> Result<Partial, <Self as BlindThresholdScheme>::Error>;\n\n\n\n /// verify_blind_partial checks if a given blinded partial signature is\n\n /// correct given the blinded message. This can be called by any third party\n\n /// given the two parameters which are not private (since they are blinded).\n\n fn verify_blind_partial(\n\n public: &Poly<Self::Public>,\n\n blind_msg: &[u8],\n\n blind_partial: &[u8],\n\n ) -> Result<(), <Self as BlindThresholdScheme>::Error>;\n\n}\n", "file_path": "crates/threshold-bls/src/sig/sig.rs", "rank": 33, "score": 145616.52312489907 }, { "content": "/// Derives an ephemeral key from the provided public key\n\nfn derive<C: Curve>(dh: &C::Point) -> [u8; KEY_LEN] {\n\n let serialized = bincode::serialize(dh).expect(\"could not serialize element\");\n\n\n\n // no salt is fine since we use ephemeral - static DH\n\n let h = Hkdf::<Sha256>::new(None, &serialized);\n\n let mut ephemeral_key = [0u8; KEY_LEN];\n\n h.expand(&DOMAIN, &mut ephemeral_key)\n\n .expect(\"hkdf should not fail\");\n\n\n\n debug_assert!(ephemeral_key.len() == KEY_LEN);\n\n\n\n ephemeral_key\n\n}\n\n\n\n#[cfg(feature = \"bls12_381\")]\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::curve::bls12381::{Curve, Scalar, G1};\n\n use rand::thread_rng;\n", "file_path": "crates/threshold-bls/src/ecies.rs", "rank": 35, "score": 136507.84886950708 }, { "content": "#[derive(Clone, Debug, Serialize, Deserialize)]\n\n#[serde(bound = \"C::Scalar: DeserializeOwned\")]\n\nstruct ReshareInfo<C: Curve> {\n\n private_key: C::Scalar,\n\n public_key: C::Point,\n\n // our previous index in the group - it can be none if we are a new member\n\n prev_index: Option<Idx>,\n\n // previous group on which to reshare\n\n prev_group: Group<C>,\n\n // previous group distributed public polynomial\n\n prev_public: Poly<C::Point>,\n\n // secret and public polynomial of a dealer\n\n secret: Option<Poly<C::Scalar>>,\n\n public: Option<Poly<C::Point>>,\n\n\n\n // our new index in the group - it can be none if we are a leaving member\n\n new_index: Option<Idx>,\n\n // new group that is receiving the refreshed shares\n\n new_group: Group<C>,\n\n}\n\n\n\nimpl<C: Curve> ReshareInfo<C> {\n", "file_path": "crates/dkg-core/src/primitives/resharing.rs", "rank": 36, "score": 119600.46783765152 }, { "content": "/// Element represents an element of a group with the additive notation\n\n/// which is also equipped with a multiplication transformation.\n\n/// Two implementations are for Scalar which forms a ring so RHS is the same\n\n/// and Point which can be multiplied by a scalar of its prime field.\n\npub trait Element:\n\n Clone + Display + Debug + Eq + Serialize + for<'a> Deserialize<'a> + PartialEq + Send + Sync\n\n{\n\n /// The right-hand-side argument for multiplication\n\n type RHS;\n\n\n\n /// Returns the zero element of the group\n\n fn new() -> Self;\n\n\n\n /// Returns the one element of the group\n\n fn one() -> Self;\n\n\n\n /// Adds the RHS element to the LHS element in place\n\n fn add(&mut self, s2: &Self);\n\n\n\n /// Multiplies the LHS element by the RHS element in place\n\n fn mul(&mut self, mul: &Self::RHS);\n\n\n\n /// Samples a random element using the provided RNG\n\n fn rand<R: RngCore>(rng: &mut R) -> Self;\n\n\n\n /// Returns the zero element of the group\n\n fn zero() -> Self {\n\n Self::new()\n\n }\n\n}\n\n\n", "file_path": "crates/threshold-bls/src/group.rs", "rank": 37, "score": 119071.63373966425 }, { "content": "pub trait Views {\n\n fn get_last_output(&self) -> u64;\n\n\n\n fn get_node(&self, id_address: String) -> &Node;\n\n\n\n fn get_group(&self, index: usize) -> &Group;\n\n\n\n fn valid_group_indices(&self) -> Vec<usize>;\n\n\n\n fn pending_signature_tasks(&self) -> Vec<&SignatureTask>;\n\n\n\n fn verifiable_signature_rewards(&self) -> Vec<&SignatureReward>;\n\n}\n\n\n\nimpl Internal for Controller {\n\n fn freeze_node(&mut self, id_address: &str, pending_until_block: usize) {\n\n let node = self.nodes.get_mut(id_address).unwrap();\n\n node.state = false;\n\n node.pending_until_block = pending_until_block;\n\n // regroup which this node belongs to\n", "file_path": "crates/randcast-mock-demo/src/contract.rs", "rank": 38, "score": 116904.538737806 }, { "content": "pub trait Transactions {\n\n fn node_register(\n\n &mut self,\n\n id_address: String,\n\n id_public_key: Vec<u8>,\n\n endpoint: String,\n\n reward_address: String,\n\n ) -> bool;\n\n\n\n fn node_quit(&mut self, id_address: String);\n\n\n\n fn node_activate(&mut self, id_address: String);\n\n\n\n fn redeem(&mut self, id_address: String);\n\n\n\n fn claim(&mut self, id_address: String);\n\n\n\n fn commit_dkg(\n\n &mut self,\n\n id_address: String,\n", "file_path": "crates/randcast-mock-demo/src/contract.rs", "rank": 39, "score": 116904.538737806 }, { "content": "fn is_all_same<T: PartialEq>(mut arr: impl Iterator<Item = T>) -> bool {\n\n let first = arr.next().unwrap();\n\n arr.all(|item| item == first)\n\n}\n", "file_path": "crates/randcast-mock-demo/src/main.rs", "rank": 40, "score": 115749.8295157289 }, { "content": "pub trait MockHelper {\n\n fn emit_dkg_task(&self) -> &DKGTask;\n\n\n\n fn emit_signature_task(&self) -> &SignatureTask;\n\n\n\n fn mine(&mut self, block_number: usize);\n\n}\n\n\n", "file_path": "crates/randcast-mock-demo/src/contract.rs", "rank": 41, "score": 114855.78397756847 }, { "content": "/// Scalar can be multiplied by only a Scalar, no other elements.\n\npub trait Scalar: Element {\n\n fn set_int(&mut self, i: u64);\n\n fn inverse(&self) -> Option<Self>;\n\n fn negate(&mut self);\n\n fn sub(&mut self, other: &Self);\n\n // TODO\n\n}\n\n\n", "file_path": "crates/threshold-bls/src/group.rs", "rank": 42, "score": 113720.11575162958 }, { "content": "/// Basic point functionality that can be multiplied by a scalar\n\npub trait Point: Element {\n\n /// Error which may occur while mapping to the group\n\n type Error: Debug;\n\n\n\n /// Maps the provided data to a group element\n\n fn map(&mut self, data: &[u8]) -> Result<(), <Self as Point>::Error>;\n\n}\n\n\n", "file_path": "crates/threshold-bls/src/group.rs", "rank": 43, "score": 113720.11575162958 }, { "content": "type ZG1 = <zexe::Bls12_377 as PairingEngine>::G1Projective;\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]\n\npub struct G1(\n\n #[serde(deserialize_with = \"deserialize_group\")]\n\n #[serde(serialize_with = \"serialize_group\")]\n\n ZG1,\n\n);\n\n\n", "file_path": "crates/threshold-bls/src/curve/zexe.rs", "rank": 44, "score": 101582.78596732418 }, { "content": "type ZG2 = <zexe::Bls12_377 as PairingEngine>::G2Projective;\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]\n\npub struct G2(\n\n #[serde(deserialize_with = \"deserialize_group\")]\n\n #[serde(serialize_with = \"serialize_group\")]\n\n ZG2,\n\n);\n\n\n\n#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]\n\npub struct GT(\n\n #[serde(deserialize_with = \"deserialize_field\")]\n\n #[serde(serialize_with = \"serialize_field\")]\n\n <zexe::Bls12_377 as PairingEngine>::Fqk,\n\n);\n\n\n\nimpl Element for Scalar {\n\n type RHS = Scalar;\n\n\n\n fn new() -> Self {\n", "file_path": "crates/threshold-bls/src/curve/zexe.rs", "rank": 45, "score": 101582.78596732418 }, { "content": "fn parse_bundle<D: serde::de::DeserializeOwned>(bundle: &[Vec<u8>]) -> Result<Vec<D>> {\n\n bundle\n\n .iter()\n\n .filter(|item| !item.is_empty()) // filter out empty items\n\n .map(|item| Ok(bincode::deserialize::<D>(&item)?))\n\n .collect()\n\n}\n\n\n", "file_path": "crates/dkg-cli/src/actions.rs", "rank": 46, "score": 81364.12379479343 }, { "content": "/// Wrappers around the BLS12-381 curve from the [paired](http://docs.rs/paired) crate\n\n#[cfg(feature = \"bls12_381\")]\n\npub mod bls12381;\n\n\n\n/// Wrappers around the BLS12-377 curve from [zexe](https://github.com/scipr-lab/zexe/tree/master/algebra/src/bls12_377)\n\n#[cfg(feature = \"bls12_377\")]\n\npub mod zexe;\n\n\n\nuse thiserror::Error;\n\n\n\n/// Error which unifies all curve specific errors from different libraries\n\n#[derive(Debug, Error)]\n\npub enum CurveError {\n\n #[cfg(feature = \"bls12_377\")]\n\n #[error(\"Zexe Error: {0}\")]\n\n BLS12_377(zexe::ZexeError),\n\n\n\n #[cfg(feature = \"bls12_381\")]\n\n #[error(\"Bellman Error: {0}\")]\n\n BLS12_381(bls12381::BellmanError),\n\n}\n", "file_path": "crates/threshold-bls/src/curve/mod.rs", "rank": 47, "score": 79412.03037386324 }, { "content": "/// Primitives for grouping together vectors of nodes with an associated threshold\n\npub mod group;\n\npub use group::*;\n\n\n\npub(crate) mod phases;\n\npub use phases::*;\n\n\n\npub mod types;\n\npub use types::*;\n\n\n\n/// 2D binary array utilities for tracking successful (or not) participation in the DKG\n\npub(crate) mod status;\n\n\n\npub mod joint_feldman;\n\n\n\npub mod resharing;\n\n\n\nmod common;\n\n\n\nmod errors;\n\npub use errors::{DKGError, DKGResult, ShareError};\n\n\n\n/// The minimum allowed threshold is 51%\n", "file_path": "crates/dkg-core/src/primitives/mod.rs", "rank": 48, "score": 78206.81605409553 }, { "content": "use crate::primitives::{group::Group, status::Status};\n\n\n\nuse serde::{de::DeserializeOwned, Deserialize, Serialize};\n\nuse std::fmt::Debug;\n\nuse threshold_bls::{\n\n ecies::EciesCipher,\n\n group::Curve,\n\n poly::{Idx, PublicPoly},\n\n sig::Share,\n\n};\n\n\n\n/// DKGOutput is the final output of the DKG protocol in case it runs\n\n/// successfully.\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n\n#[serde(bound = \"C::Scalar: DeserializeOwned\")]\n\npub struct DKGOutput<C: Curve> {\n\n /// The list of nodes that successfully ran the protocol until the end\n\n pub qual: Group<C>,\n\n /// The distributed public key\n\n pub public: PublicPoly<C>,\n", "file_path": "crates/dkg-core/src/primitives/types.rs", "rank": 49, "score": 78147.78357480241 }, { "content": " pub share_idx: Idx,\n\n /// A vector of responses from each share creator\n\n pub responses: Vec<Response>,\n\n}\n\n\n\n/// A `Justification` contains the share of the share holder that issued a\n\n/// complaint, in plaintext.\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\n#[serde(bound = \"C::Scalar: DeserializeOwned\")]\n\npub struct Justification<C: Curve> {\n\n /// The share holder's index\n\n pub share_idx: Idx,\n\n /// The plaintext share\n\n pub share: C::Scalar,\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\n#[serde(bound = \"C::Scalar: DeserializeOwned\")]\n\n/// A BundledJustification is broadcast by a dealer and contains the justifications\n\n/// they have received along with their corresponding Public polynomial\n", "file_path": "crates/dkg-core/src/primitives/types.rs", "rank": 50, "score": 78139.99851771707 }, { "content": "pub struct BundledJustification<C: Curve> {\n\n /// The dealer's index\n\n pub dealer_idx: Idx,\n\n /// The justifications\n\n pub justifications: Vec<Justification<C>>,\n\n /// The public polynomial\n\n pub public: PublicPoly<C>,\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\n/// A response which gets generated when processing the shares from Phase 1\n\npub struct Response {\n\n /// The index of the dealer (the person that created the share)\n\n pub dealer_idx: Idx,\n\n /// The status of the response (whether it suceeded or if there were complaints)\n\n pub status: Status,\n\n}\n", "file_path": "crates/dkg-core/src/primitives/types.rs", "rank": 51, "score": 78139.49879434863 }, { "content": "/// `share_idx`-th participant. When receiving the share, if the participant has\n\n/// the same specified index, the corresponding dkg state decrypts the share using\n\n/// the participant's private key.\n\n#[derive(Clone, Debug, Serialize, Deserialize)]\n\n#[serde(bound = \"C::Scalar: DeserializeOwned\")]\n\npub struct EncryptedShare<C: Curve> {\n\n /// The index of the participant this share belongs to\n\n pub share_idx: Idx,\n\n /// The ECIES encrypted share\n\n pub secret: EciesCipher<C>,\n\n}\n\n\n\n/// A `BundledResponses` is sent during the second phase of the protocol by all\n\n/// participants that have received invalid or inconsistent shares (all statuses\n\n/// are `Complaint`). The bundles contains the index of the recipient of the\n\n/// shares, the one that created the response. Each `Response` contains the\n\n/// index of the participant that created the share (a *dealer*),\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct BundledResponses {\n\n /// share_idx is the index of the node that received the shares\n", "file_path": "crates/dkg-core/src/primitives/types.rs", "rank": 52, "score": 78139.2636845378 }, { "content": " /// The private share which corresponds to the participant's index\n\n pub share: Share<C::Scalar>,\n\n}\n\n\n\n/// BundledShares holds all encrypted shares a dealer creates during the first\n\n/// phase of the protocol.\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\n#[serde(bound = \"C::Scalar: DeserializeOwned\")]\n\npub struct BundledShares<C: Curve> {\n\n /// The dealer's index\n\n pub dealer_idx: Idx,\n\n /// The encrypted shared created by the dealer\n\n pub shares: Vec<EncryptedShare<C>>,\n\n /// The commitment of the secret polynomial created by the dealer.\n\n /// In the context of using a blockchain as a broadcast channel,\n\n /// it can be posted only once.\n\n pub public: PublicPoly<C>,\n\n}\n\n\n\n/// EncryptedShare holds the ECIES encryption of a share destined to the\n", "file_path": "crates/dkg-core/src/primitives/types.rs", "rank": 53, "score": 78138.12561371048 }, { "content": "\n\n /// NotEnoughValidShares is raised when the DKG has not successfully\n\n /// processed enough shares because they were invalid. In that case, the DKG\n\n /// can not continue, the protocol MUST be aborted.\n\n #[error(\"only has {0}/{1} valid shares\")]\n\n NotEnoughValidShares(usize, usize),\n\n\n\n #[error(\"only has {0}/{1} required justifications\")]\n\n NotEnoughJustifications(usize, usize),\n\n\n\n /// Rejected is raised when the participant is rejected from the final\n\n /// output\n\n #[error(\"this participant is rejected from the qualified set\")]\n\n Rejected,\n\n\n\n /// BincodeError is raised when de(serialization) by bincode fails\n\n #[error(\"de(serialization failed: {0})\")]\n\n BincodeError(#[from] bincode::Error),\n\n\n\n /// ShareError is raised when a share is being processed\n", "file_path": "crates/dkg-core/src/primitives/errors.rs", "rank": 54, "score": 78103.34832162828 }, { "content": "use thiserror::Error;\n\nuse threshold_bls::{ecies::EciesError, poly, poly::Idx};\n\n\n\n/// Result type alias which returns `DKGError`\n\npub type DKGResult<A> = Result<A, DKGError>;\n\n\n\n#[derive(Debug, Error)]\n\n/// Errors which may occur during the DKG\n\npub enum DKGError {\n\n /// PublicKeyNotFound is raised when the private key given to the DKG init\n\n /// function does not yield a public key that is included in the group.\n\n #[error(\"public key not found in list of participants\")]\n\n PublicKeyNotFound,\n\n\n\n /// InvalidThreshold is raised when creating a group and specifying an\n\n /// invalid threshold. Either the threshold is too low, inferior to\n\n /// what `minimum_threshold()` returns or is too large (i.e. larger than the\n\n /// number of nodes).\n\n #[error(\"threshold {0} is not in range [{1},{2}]\")]\n\n InvalidThreshold(usize, usize, usize),\n", "file_path": "crates/dkg-core/src/primitives/errors.rs", "rank": 55, "score": 78102.81060134023 }, { "content": "/// Error which may occur while processing a share in Phase 1\n\npub enum ShareError {\n\n /// InvalidCipherText returns the error raised when decrypting the encrypted\n\n /// share.\n\n #[error(\"[dealer: {0}] Invalid ciphertext\")]\n\n InvalidCiphertext(Idx, EciesError),\n\n /// InvalidShare is raised when the share does not corresponds to the public\n\n /// polynomial associated.\n\n #[error(\"[dealer: {0}] Share does not match associated public polynomial\")]\n\n InvalidShare(Idx),\n\n /// InvalidPublicPolynomial is raised when the public polynomial does not\n\n /// have the correct degree. Each public polynomial in the scheme must have\n\n /// a degree equals to `threshold - 1` set for the DKG protocol.\n\n /// The two fields are (1) the degree of the polynomial and (2) the\n\n /// second is the degree it should be,i.e. `threshold - 1`.\n\n #[error(\"[dealer: {0}] polynomial does not have the correct degree, got: {1}, expected {2}\")]\n\n InvalidPublicPolynomial(Idx, usize, usize),\n\n}\n", "file_path": "crates/dkg-core/src/primitives/errors.rs", "rank": 56, "score": 78102.23272335972 }, { "content": " #[error(transparent)]\n\n ShareError(#[from] ShareError),\n\n\n\n /// NotDealer is raised when one attempts to call a method of a\n\n /// dealer during a resharing when it is not a member of the current group.\n\n #[error(\"this participant is not a dealer\")]\n\n NotDealer,\n\n\n\n /// NotShareHolder is raised when one attemps to call a method of a share\n\n /// holder during a resharing when it is a not a share holder in the new\n\n /// group.\n\n #[error(\"this participant is not a share holder\")]\n\n NotShareHolder,\n\n\n\n #[error(\"invalid recovery during resharing: {0}\")]\n\n InvalidRecovery(#[from] poly::PolyError),\n\n}\n\n\n\n#[derive(Debug, Error)]\n\n#[allow(clippy::enum_variant_names)]\n", "file_path": "crates/dkg-core/src/primitives/errors.rs", "rank": 57, "score": 78096.34010680858 }, { "content": "// Generates the bindings under `src/`\n\nfn main() {\n\n // Only re-run the builder script if the contract changes\n\n println!(\"cargo:rerun-if-changed={}\", PATH);\n\n\n\n // compile the DKG contract (requires solc on the builder's system)\n\n let contracts = Solc::new(PATH).build_raw().expect(\"could not compile\");\n\n let contract = contracts.get(\"DKG\").expect(\"contract not found\");\n\n\n\n let abi = contract.abi.clone();\n\n\n\n let mut f = File::create(\"dkg.bin\").expect(\"could not create DKG bytecode file\");\n\n f.write_all(contract.bin.as_bytes())\n\n .expect(\"could not write DKG bytecode to the file\");\n\n\n\n // generate type-safe bindings to it\n\n let bindings = Abigen::new(\"DKG\", abi)\n\n .expect(\"could not instantiate Abigen\")\n\n .generate()\n\n .expect(\"could not generate bindings\");\n\n bindings\n\n .write_to_file(\"./src/dkg_contract.rs\")\n\n .expect(\"could not write bindings to file\");\n\n}\n", "file_path": "crates/dkg-cli/build.rs", "rank": 58, "score": 69712.07811180361 }, { "content": "trait Internal {\n\n fn freeze_node(&mut self, id_address: &str, pending_until_block: usize);\n\n\n\n fn calculate_hash<T: Hash>(t: &T) -> u64;\n\n}\n\n\n", "file_path": "crates/randcast-mock-demo/src/contract.rs", "rank": 59, "score": 67534.28622351155 }, { "content": "struct DKGNode {\n\n todo!\n\n}\n", "file_path": "crates/randcast-mock-demo/src/node.rs", "rank": 60, "score": 51869.223118822425 }, { "content": " justifications: BundledJustification<C>,\n\n ) -> Result<(), Self::Error>\n\n where\n\n C: 'async_trait;\n\n}\n\n\n\n// Board implementation for all `Write` implementers, leveraging serde/bincode\n\n#[async_trait(?Send)]\n\nimpl<C, W> BoardPublisher<C> for W\n\nwhere\n\n C: Curve,\n\n W: Write,\n\n{\n\n /// Error raised when trying to publish data to the board\n\n type Error = bincode::Error;\n\n\n\n async fn publish_shares(&mut self, shares: BundledShares<C>) -> Result<(), Self::Error>\n\n where\n\n C: 'async_trait,\n\n {\n", "file_path": "crates/dkg-core/src/board.rs", "rank": 61, "score": 46777.8185981864 }, { "content": " serialize_into(self, &shares)\n\n }\n\n\n\n async fn publish_responses(&mut self, responses: BundledResponses) -> Result<(), Self::Error>\n\n where\n\n C: 'async_trait,\n\n {\n\n serialize_into(self, &responses)\n\n }\n\n\n\n async fn publish_justifications(\n\n &mut self,\n\n justifications: BundledJustification<C>,\n\n ) -> Result<(), Self::Error>\n\n where\n\n C: 'async_trait,\n\n {\n\n serialize_into(self, &justifications)\n\n }\n\n}\n", "file_path": "crates/dkg-core/src/board.rs", "rank": 62, "score": 46773.23060818576 }, { "content": "/// # Board\n\n///\n\n/// A board is where DKG participants publish their data for the corresponding DKG\n\n/// phase.\n\nuse super::primitives::types::{BundledJustification, BundledResponses, BundledShares};\n\nuse async_trait::async_trait;\n\nuse bincode::serialize_into;\n\nuse std::io::Write;\n\nuse threshold_bls::group::Curve;\n\n\n\n/// Trait which must be implemented for writing to the board. This trait assumes\n\n/// an authenticated channel.\n\n#[async_trait(?Send)]\n", "file_path": "crates/dkg-core/src/board.rs", "rank": 63, "score": 46770.74218052514 }, { "content": "mod blind;\n\npub use blind::{BlindError, Token};\n\n\n\nmod bls;\n\npub use bls::{BLSError, G1Scheme, G2Scheme};\n\n\n\nmod tblind;\n\npub use tblind::BlindThresholdError;\n\n\n\nmod tbls;\n\npub use tbls::{Share, ThresholdError};\n\n\n\n#[allow(clippy::module_inception)]\n\nmod sig;\n\npub use sig::*;\n", "file_path": "crates/threshold-bls/src/sig/mod.rs", "rank": 86, "score": 45386.65342508458 }, { "content": "use super::board::BoardPublisher;\n\nuse super::primitives::types::{BundledJustification, BundledResponses, BundledShares};\n\nuse async_trait::async_trait;\n\nuse threshold_bls::group::Curve;\n\n\n\n/// An in-memory board used for testing\n\npub struct InMemoryBoard<C: Curve> {\n\n pub shares: Vec<BundledShares<C>>,\n\n pub responses: Vec<BundledResponses>,\n\n pub justifs: Vec<BundledJustification<C>>,\n\n}\n\n\n\nimpl<C: Curve> InMemoryBoard<C> {\n\n #[allow(unused)]\n\n pub fn new() -> Self {\n\n Self {\n\n shares: vec![],\n\n responses: vec![],\n\n justifs: vec![],\n\n }\n", "file_path": "crates/dkg-core/src/test_helpers.rs", "rank": 87, "score": 45318.30243942777 }, { "content": " }\n\n}\n\n\n\n#[async_trait(?Send)]\n\nimpl<C: Curve> BoardPublisher<C> for InMemoryBoard<C> {\n\n type Error = ();\n\n\n\n async fn publish_shares(&mut self, bundle: BundledShares<C>) -> Result<(), Self::Error>\n\n where\n\n C: 'async_trait,\n\n {\n\n self.shares.push(bundle);\n\n Ok(())\n\n }\n\n\n\n async fn publish_responses(&mut self, bundle: BundledResponses) -> Result<(), Self::Error>\n\n where\n\n C: 'async_trait,\n\n {\n\n self.responses.push(bundle);\n", "file_path": "crates/dkg-core/src/test_helpers.rs", "rank": 88, "score": 45308.93824562864 }, { "content": " Ok(())\n\n }\n\n\n\n async fn publish_justifications(\n\n &mut self,\n\n bundle: BundledJustification<C>,\n\n ) -> Result<(), Self::Error>\n\n where\n\n C: 'async_trait,\n\n {\n\n self.justifs.push(bundle);\n\n Ok(())\n\n }\n\n}\n", "file_path": "crates/dkg-core/src/test_helpers.rs", "rank": 89, "score": 45300.62566278948 }, { "content": "#[cfg(test)]\n\npub mod tests {\n\n use crate::{\n\n poly::{Idx, Poly},\n\n schemes::bls12_381::G1Scheme as SigScheme,\n\n sig::{Scheme, Share, SignatureScheme, ThresholdScheme},\n\n };\n\n\n\n #[test]\n\n fn test_bls381() {\n\n let (n, t) = (5, 3);\n\n // create the private key polynomial\n\n let private_poly = Poly::<<SigScheme as Scheme>::Private>::new(t - 1);\n\n\n\n // Evaluate it at `n` points to generate the shares\n\n let shares = (0..n)\n\n .map(|i| {\n\n let eval = private_poly.eval(i as Idx);\n\n Share {\n\n index: eval.index,\n", "file_path": "crates/threshold-bls/src/test_bls.rs", "rank": 90, "score": 45291.74700584111 }, { "content": " private: eval.value,\n\n }\n\n })\n\n .collect::<Vec<_>>();\n\n\n\n // Get the public polynomial\n\n let public_poly = private_poly.commit();\n\n let threshold_public_key = public_poly.public_key();\n\n\n\n // Generate the partial signatures\n\n let msg = b\"hello\";\n\n\n\n let partials = shares\n\n .iter()\n\n .map(|s| SigScheme::partial_sign(s, &msg[..]).unwrap())\n\n .collect::<Vec<_>>();\n\n\n\n // each partial sig can be partially verified against the public polynomial\n\n partials.iter().for_each(|partial| {\n\n SigScheme::partial_verify(&public_poly, &msg[..], &partial).unwrap();\n", "file_path": "crates/threshold-bls/src/test_bls.rs", "rank": 91, "score": 45279.36308080214 }, { "content": " });\n\n\n\n // generate the threshold sig\n\n let threshold_sig = SigScheme::aggregate(t, &partials).unwrap();\n\n println!(\"{:#?}\", threshold_sig);\n\n\n\n SigScheme::verify(&threshold_public_key, &msg[..], &threshold_sig).unwrap();\n\n println!(\"finish.\")\n\n }\n\n}\n", "file_path": "crates/threshold-bls/src/test_bls.rs", "rank": 92, "score": 45273.11737274262 }, { "content": "use crate::group::{self, Element, PairingCurve as PC, Point, Scalar as Sc};\n\nuse ff::{Field, PrimeField};\n\nuse groupy::CurveProjective;\n\nuse paired::bls12_381::{Bls12, Fq12, Fr, FrRepr, G1 as PG1, G2 as PG2};\n\nuse paired::Engine;\n\nuse rand_core::RngCore;\n\nuse std::result::Result;\n\nuse thiserror::Error;\n\n\n\npub type Scalar = Fr;\n\npub type G1 = PG1;\n\npub type G2 = PG2;\n\npub type GT = Fq12;\n\n\n\n#[derive(Debug, Error)]\n\npub enum BellmanError {\n\n #[error(\"decoding: invalid length {0}/{1}\")]\n\n InvalidLength(usize, usize),\n\n #[error(\"IO Error: {0}\")]\n\n IoError(#[from] std::io::Error),\n", "file_path": "crates/threshold-bls/src/curve/bls12381.rs", "rank": 93, "score": 44560.795375035916 }, { "content": " }\n\n\n\n fn rand<R: RngCore>(mut rng: &mut R) -> Self {\n\n Self(ZG2::rand(&mut rng))\n\n }\n\n\n\n fn add(&mut self, s2: &Self) {\n\n self.0.add_assign(s2.0);\n\n }\n\n\n\n fn mul(&mut self, mul: &Scalar) {\n\n self.0.mul_assign(mul.0)\n\n }\n\n}\n\n\n\n/// Implementation of Point using G2 from BLS12-377\n\nimpl Point for G2 {\n\n type Error = ZexeError;\n\n\n\n fn map(&mut self, data: &[u8]) -> Result<(), ZexeError> {\n", "file_path": "crates/threshold-bls/src/curve/zexe.rs", "rank": 94, "score": 44558.47883102837 }, { "content": " }\n\n\n\n fn one() -> Self {\n\n ff::Field::one()\n\n }\n\n fn add(&mut self, s2: &Self) {\n\n self.add_assign(s2);\n\n }\n\n fn mul(&mut self, mul: &GT) {\n\n self.mul_assign(mul)\n\n }\n\n\n\n fn rand<R: RngCore>(rng: &mut R) -> Self {\n\n ff::Field::random(rng)\n\n }\n\n}\n\n\n\n/// alias to BLS12-381's G1 group\n\npub type Curve = group::G1Curve<PairingCurve>;\n\n\n", "file_path": "crates/threshold-bls/src/curve/bls12381.rs", "rank": 95, "score": 44558.24211949621 }, { "content": " }\n\n\n\n fn rand<R: RngCore>(rng: &mut R) -> Self {\n\n G2::random(rng)\n\n }\n\n\n\n fn add(&mut self, s2: &Self) {\n\n self.add_assign(s2);\n\n }\n\n\n\n fn mul(&mut self, mul: &Scalar) {\n\n self.mul_assign(FrRepr::from(*mul))\n\n }\n\n}\n\n\n\n/// Implementation of Point using G1 from BLS12-381\n\nimpl Point for G1 {\n\n type Error = ();\n\n\n\n fn map(&mut self, data: &[u8]) -> Result<(), ()> {\n", "file_path": "crates/threshold-bls/src/curve/bls12381.rs", "rank": 96, "score": 44556.81954255238 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use serde::{de::DeserializeOwned, Serialize};\n\n use static_assertions::assert_impl_all;\n\n\n\n assert_impl_all!(G1: Serialize, DeserializeOwned, Clone);\n\n assert_impl_all!(G2: Serialize, DeserializeOwned, Clone);\n\n assert_impl_all!(GT: Serialize, DeserializeOwned, Clone);\n\n assert_impl_all!(Scalar: Serialize, DeserializeOwned, Clone);\n\n\n\n #[test]\n\n fn serialize_group() {\n\n serialize_group_test::<G1>(48);\n\n serialize_group_test::<G2>(96);\n\n }\n\n\n\n fn serialize_group_test<E: Element>(size: usize) {\n\n let rng = &mut rand::thread_rng();\n\n let sig = E::rand(rng);\n", "file_path": "crates/threshold-bls/src/curve/zexe.rs", "rank": 97, "score": 44555.805307594965 }, { "content": "\n\n fn sub(&mut self, other: &Self) {\n\n self.sub_assign(other);\n\n }\n\n}\n\n\n\n/// G1 points can be multiplied by Fr elements\n\nimpl Element for G1 {\n\n type RHS = Scalar;\n\n\n\n fn new() -> Self {\n\n groupy::CurveProjective::zero()\n\n }\n\n\n\n fn one() -> Self {\n\n groupy::CurveProjective::one()\n\n }\n\n\n\n fn rand<R: RngCore>(rng: &mut R) -> Self {\n\n G1::random(rng)\n", "file_path": "crates/threshold-bls/src/curve/bls12381.rs", "rank": 98, "score": 44555.28762105777 }, { "content": " self.mul_assign(mul)\n\n }\n\n fn rand<R: RngCore>(rng: &mut R) -> Self {\n\n Fr::random(rng)\n\n }\n\n}\n\n\n\n/// Implementation of Scalar using field elements used in BLS12-381\n\nimpl Sc for Scalar {\n\n fn set_int(&mut self, i: u64) {\n\n *self = Fr::from_repr(FrRepr::from(i)).unwrap();\n\n }\n\n\n\n fn inverse(&self) -> Option<Self> {\n\n ff::Field::inverse(self)\n\n }\n\n\n\n fn negate(&mut self) {\n\n ff::Field::negate(self);\n\n }\n", "file_path": "crates/threshold-bls/src/curve/bls12381.rs", "rank": 99, "score": 44554.4850162553 } ]
Rust
pageserver/src/layered_repository/filename.rs
libzenith/zenith
4b3b19f4448f650b918230d972e2ec68815dcbdb
use crate::config::PageServerConf; use crate::layered_repository::storage_layer::SegmentTag; use crate::relish::*; use std::fmt; use std::path::PathBuf; use zenith_utils::lsn::Lsn; #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)] pub struct DeltaFileName { pub seg: SegmentTag, pub start_lsn: Lsn, pub end_lsn: Lsn, pub dropped: bool, } impl DeltaFileName { pub fn parse_str(fname: &str) -> Option<Self> { let rel; let mut parts; if let Some(rest) = fname.strip_prefix("rel_") { parts = rest.split('_'); rel = RelishTag::Relation(RelTag { spcnode: parts.next()?.parse::<u32>().ok()?, dbnode: parts.next()?.parse::<u32>().ok()?, relnode: parts.next()?.parse::<u32>().ok()?, forknum: parts.next()?.parse::<u8>().ok()?, }); } else if let Some(rest) = fname.strip_prefix("pg_xact_") { parts = rest.split('_'); rel = RelishTag::Slru { slru: SlruKind::Clog, segno: u32::from_str_radix(parts.next()?, 16).ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_multixact_members_") { parts = rest.split('_'); rel = RelishTag::Slru { slru: SlruKind::MultiXactMembers, segno: u32::from_str_radix(parts.next()?, 16).ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_multixact_offsets_") { parts = rest.split('_'); rel = RelishTag::Slru { slru: SlruKind::MultiXactOffsets, segno: u32::from_str_radix(parts.next()?, 16).ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_filenodemap_") { parts = rest.split('_'); rel = RelishTag::FileNodeMap { spcnode: parts.next()?.parse::<u32>().ok()?, dbnode: parts.next()?.parse::<u32>().ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_twophase_") { parts = rest.split('_'); rel = RelishTag::TwoPhase { xid: parts.next()?.parse::<u32>().ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_control_checkpoint_") { parts = rest.split('_'); rel = RelishTag::Checkpoint; } else if let Some(rest) = fname.strip_prefix("pg_control_") { parts = rest.split('_'); rel = RelishTag::ControlFile; } else { return None; } let segno = parts.next()?.parse::<u32>().ok()?; let seg = SegmentTag { rel, segno }; let start_lsn = Lsn::from_hex(parts.next()?).ok()?; let end_lsn = Lsn::from_hex(parts.next()?).ok()?; let mut dropped = false; if let Some(suffix) = parts.next() { if suffix == "DROPPED" { dropped = true; } else { return None; } } if parts.next().is_some() { return None; } Some(DeltaFileName { seg, start_lsn, end_lsn, dropped, }) } } impl fmt::Display for DeltaFileName { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let basename = match self.seg.rel { RelishTag::Relation(reltag) => format!( "rel_{}_{}_{}_{}", reltag.spcnode, reltag.dbnode, reltag.relnode, reltag.forknum ), RelishTag::Slru { slru: SlruKind::Clog, segno, } => format!("pg_xact_{:04X}", segno), RelishTag::Slru { slru: SlruKind::MultiXactMembers, segno, } => format!("pg_multixact_members_{:04X}", segno), RelishTag::Slru { slru: SlruKind::MultiXactOffsets, segno, } => format!("pg_multixact_offsets_{:04X}", segno), RelishTag::FileNodeMap { spcnode, dbnode } => { format!("pg_filenodemap_{}_{}", spcnode, dbnode) } RelishTag::TwoPhase { xid } => format!("pg_twophase_{}", xid), RelishTag::Checkpoint => "pg_control_checkpoint".to_string(), RelishTag::ControlFile => "pg_control".to_string(), }; write!( f, "{}_{}_{:016X}_{:016X}{}", basename, self.seg.segno, u64::from(self.start_lsn), u64::from(self.end_lsn), if self.dropped { "_DROPPED" } else { "" } ) } } #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)] pub struct ImageFileName { pub seg: SegmentTag, pub lsn: Lsn, } impl ImageFileName { pub fn parse_str(fname: &str) -> Option<Self> { let rel; let mut parts; if let Some(rest) = fname.strip_prefix("rel_") { parts = rest.split('_'); rel = RelishTag::Relation(RelTag { spcnode: parts.next()?.parse::<u32>().ok()?, dbnode: parts.next()?.parse::<u32>().ok()?, relnode: parts.next()?.parse::<u32>().ok()?, forknum: parts.next()?.parse::<u8>().ok()?, }); } else if let Some(rest) = fname.strip_prefix("pg_xact_") { parts = rest.split('_'); rel = RelishTag::Slru { slru: SlruKind::Clog, segno: u32::from_str_radix(parts.next()?, 16).ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_multixact_members_") { parts = rest.split('_'); rel = RelishTag::Slru { slru: SlruKind::MultiXactMembers, segno: u32::from_str_radix(parts.next()?, 16).ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_multixact_offsets_") { parts = rest.split('_'); rel = RelishTag::Slru { slru: SlruKind::MultiXactOffsets, segno: u32::from_str_radix(parts.next()?, 16).ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_filenodemap_") { parts = rest.split('_'); rel = RelishTag::FileNodeMap { spcnode: parts.next()?.parse::<u32>().ok()?, dbnode: parts.next()?.parse::<u32>().ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_twophase_") { parts = rest.split('_'); rel = RelishTag::TwoPhase { xid: parts.next()?.parse::<u32>().ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_control_checkpoint_") { parts = rest.split('_'); rel = RelishTag::Checkpoint; } else if let Some(rest) = fname.strip_prefix("pg_control_") { parts = rest.split('_'); rel = RelishTag::ControlFile; } else { return None; } let segno = parts.next()?.parse::<u32>().ok()?; let seg = SegmentTag { rel, segno }; let lsn = Lsn::from_hex(parts.next()?).ok()?; if parts.next().is_some() { return None; } Some(ImageFileName { seg, lsn }) } } impl fmt::Display for ImageFileName { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let basename = match self.seg.rel { RelishTag::Relation(reltag) => format!( "rel_{}_{}_{}_{}", reltag.spcnode, reltag.dbnode, reltag.relnode, reltag.forknum ), RelishTag::Slru { slru: SlruKind::Clog, segno, } => format!("pg_xact_{:04X}", segno), RelishTag::Slru { slru: SlruKind::MultiXactMembers, segno, } => format!("pg_multixact_members_{:04X}", segno), RelishTag::Slru { slru: SlruKind::MultiXactOffsets, segno, } => format!("pg_multixact_offsets_{:04X}", segno), RelishTag::FileNodeMap { spcnode, dbnode } => { format!("pg_filenodemap_{}_{}", spcnode, dbnode) } RelishTag::TwoPhase { xid } => format!("pg_twophase_{}", xid), RelishTag::Checkpoint => "pg_control_checkpoint".to_string(), RelishTag::ControlFile => "pg_control".to_string(), }; write!( f, "{}_{}_{:016X}", basename, self.seg.segno, u64::from(self.lsn), ) } } pub enum PathOrConf { Path(PathBuf), Conf(&'static PageServerConf), }
use crate::config::PageServerConf; use crate::layered_repository::storage_layer::SegmentTag; use crate::relish::*; use std::fmt; use std::path::PathBuf; use zenith_utils::lsn::Lsn; #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)] pub struct DeltaFileName { pub seg: SegmentTag, pub start_lsn: Lsn, pub end_lsn: Lsn, pub dropped: bool, } impl DeltaFileName { pub fn parse_str(fname: &str) -> Option<Self> { let rel; let mut parts; if let Some(rest) = fname.strip_prefix("rel_") { parts = rest.split('_'); rel = RelishTag::Relation(RelTag { spcnode: parts.next()?.parse::<u32>().ok()?, dbnode: parts.next()?.parse::<u32>().ok()?, relnode: parts.next()?.parse::<u32>().ok()?, forknum: parts.next()?.parse::<u8>().ok()?, }); } else if let Some(rest) = fname.strip_prefix("pg_xact_") { parts = rest.split('_'); rel = RelishTag::Slru { slru: SlruKind::Clog, segno: u32::from_str_radix(parts.next()?, 16).ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_multixact_members_") { parts = rest.split('_'); rel = RelishTag::Slru { slru: SlruKind::MultiXactMembers, segno: u32::from_str_radix(parts.next()?, 16).ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_multixact_offsets_") { parts = rest.split('_'); rel = RelishTag::Slru { slru: SlruKind::MultiXactOffsets, segno: u32::from_str_radix(parts.next()?, 16).ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_filenodemap_") { parts = rest.split('_'); rel = RelishTag::FileNodeMap { spcnode: parts.next()?.parse::<u32>().ok()?, dbnode: parts.next()?.parse::<u32>().ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_twophase_") { parts = rest.split('_'); rel = RelishTag::TwoPhase { xid: parts.next()?.parse::<u32>().ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_control_checkpoint_") { parts = rest.split('_'); rel = RelishTag::Checkpoint; } else if let Some(rest) = fname.strip_prefix("pg_control_") { parts = rest.split('_'); rel = RelishTag::ControlFile; } else { return None; } let segno = parts.next()?.parse::<u32>().ok()?; let seg = SegmentTag { rel, segno }; let start_lsn = Lsn::from_hex(parts.next()?).ok()?; let end_lsn = Lsn::from_hex(parts.next()?).ok()?; let mut dropped = false; if let Some(suffix) = parts.next() { if suffix == "DROPPED" { dropped = true; } else { return None; } } if parts.next().is_some() { return None; } Some(DeltaFileName { seg, start_lsn, end_lsn, dropped, }) } } impl fmt::Display for DeltaFileName { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let basename = match self.seg.rel { RelishTag::Relation(reltag) => format!( "rel_{}_{}_{}_{}", reltag.spcnode, reltag.dbnode, reltag.relnode, reltag.forknum ), RelishTag::Slru { slru: SlruKind::Clog, segno, } => format!("pg_xact_{:04X}", segno), RelishTag::Slru { slru: SlruKind::MultiXactMembers, segno, } => format!("pg_multixact_members_{:04X}", segno), RelishTag::Slru { slru: SlruKind::MultiXactOffsets, segno, } => format!("pg_multixact_offsets_{:04X}", segno), RelishTag::FileNodeMap { spcnode, dbnode } => { format!("pg_filenodemap_{}_{}", spcnode, dbnode) } RelishTag::TwoPhase { xid } => format!("pg_twophase_{}", xid), RelishTag::Checkpoint => "pg_control_checkpoint".to_string(), RelishTag::ControlFile => "pg_control".to_string(), }; write!( f, "{}_{}_{:016X}_{:016X}{}", basename, self.seg.segno, u64::from(self.start_lsn), u64::from(self.end_lsn), if self.dropped { "_DROPPED" } else { "" } ) } } #[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone)] pub struct ImageFileName { pub seg: SegmentTag, pub lsn: Lsn, } impl ImageFileName { pub fn parse_str(fname: &str) -> Option<Self> { let rel; let mut parts; if let Some(rest) = fname.strip_prefix("rel_") { parts = rest.split('_'); rel = RelishTag::Relation(RelTag { spcnode: parts.next()?.parse::<u32>().ok()?, dbnode: parts.next()?.parse::<u32>().ok()?, relnode: parts.next()?.parse::<u32>().ok()?, forknum: parts.next()?.parse::<u8>().ok()?, }); } else if let Some(rest) = fname.strip_prefix("pg_xact_") { parts = rest.split('_'); re
} impl fmt::Display for ImageFileName { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let basename = match self.seg.rel { RelishTag::Relation(reltag) => format!( "rel_{}_{}_{}_{}", reltag.spcnode, reltag.dbnode, reltag.relnode, reltag.forknum ), RelishTag::Slru { slru: SlruKind::Clog, segno, } => format!("pg_xact_{:04X}", segno), RelishTag::Slru { slru: SlruKind::MultiXactMembers, segno, } => format!("pg_multixact_members_{:04X}", segno), RelishTag::Slru { slru: SlruKind::MultiXactOffsets, segno, } => format!("pg_multixact_offsets_{:04X}", segno), RelishTag::FileNodeMap { spcnode, dbnode } => { format!("pg_filenodemap_{}_{}", spcnode, dbnode) } RelishTag::TwoPhase { xid } => format!("pg_twophase_{}", xid), RelishTag::Checkpoint => "pg_control_checkpoint".to_string(), RelishTag::ControlFile => "pg_control".to_string(), }; write!( f, "{}_{}_{:016X}", basename, self.seg.segno, u64::from(self.lsn), ) } } pub enum PathOrConf { Path(PathBuf), Conf(&'static PageServerConf), }
l = RelishTag::Slru { slru: SlruKind::Clog, segno: u32::from_str_radix(parts.next()?, 16).ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_multixact_members_") { parts = rest.split('_'); rel = RelishTag::Slru { slru: SlruKind::MultiXactMembers, segno: u32::from_str_radix(parts.next()?, 16).ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_multixact_offsets_") { parts = rest.split('_'); rel = RelishTag::Slru { slru: SlruKind::MultiXactOffsets, segno: u32::from_str_radix(parts.next()?, 16).ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_filenodemap_") { parts = rest.split('_'); rel = RelishTag::FileNodeMap { spcnode: parts.next()?.parse::<u32>().ok()?, dbnode: parts.next()?.parse::<u32>().ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_twophase_") { parts = rest.split('_'); rel = RelishTag::TwoPhase { xid: parts.next()?.parse::<u32>().ok()?, }; } else if let Some(rest) = fname.strip_prefix("pg_control_checkpoint_") { parts = rest.split('_'); rel = RelishTag::Checkpoint; } else if let Some(rest) = fname.strip_prefix("pg_control_") { parts = rest.split('_'); rel = RelishTag::ControlFile; } else { return None; } let segno = parts.next()?.parse::<u32>().ok()?; let seg = SegmentTag { rel, segno }; let lsn = Lsn::from_hex(parts.next()?).ok()?; if parts.next().is_some() { return None; } Some(ImageFileName { seg, lsn }) }
function_block-function_prefixed
[ { "content": "fn check_slru_segno(rel: &RelishTag, expected_slru: SlruKind, expected_segno: u32) -> bool {\n\n if let RelishTag::Slru { slru, segno } = rel {\n\n *slru == expected_slru && *segno == expected_segno\n\n } else {\n\n false\n\n }\n\n}\n\n\n\n/// An error happened in WAL redo\n\n#[derive(Debug, thiserror::Error)]\n\npub enum WalRedoError {\n\n #[error(transparent)]\n\n IoError(#[from] std::io::Error),\n\n\n\n #[error(\"cannot perform WAL redo now\")]\n\n InvalidState,\n\n #[error(\"cannot perform WAL redo for this request\")]\n\n InvalidRequest,\n\n}\n\n\n", "file_path": "pageserver/src/walredo.rs", "rank": 0, "score": 293113.1356273481 }, { "content": "/// Does the given filename look like an ephemeral file?\n\npub fn is_ephemeral_file(filename: &str) -> bool {\n\n if let Some(rest) = filename.strip_prefix(\"ephemeral-\") {\n\n rest.parse::<u32>().is_ok()\n\n } else {\n\n false\n\n }\n\n}\n\n\n\nimpl FileExt for EphemeralFile {\n\n fn read_at(&self, dstbuf: &mut [u8], offset: u64) -> Result<usize, Error> {\n\n // Look up the right page\n\n let blkno = (offset / PAGE_SZ as u64) as u32;\n\n let off = offset as usize % PAGE_SZ;\n\n let len = min(PAGE_SZ - off, dstbuf.len());\n\n\n\n let read_guard;\n\n let mut write_guard;\n\n\n\n let cache = page_cache::get();\n\n let buf = match cache.read_ephemeral_buf(self.file_id, blkno) {\n", "file_path": "pageserver/src/layered_repository/ephemeral_file.rs", "rank": 1, "score": 283646.4654982095 }, { "content": "#[allow(non_snake_case)]\n\npub fn IsXLogFileName(fname: &str) -> bool {\n\n return fname.len() == XLOG_FNAME_LEN && fname.chars().all(|c| c.is_ascii_hexdigit());\n\n}\n\n\n", "file_path": "postgres_ffi/src/xlog_utils.rs", "rank": 2, "score": 280138.8729062946 }, { "content": "#[allow(non_snake_case)]\n\npub fn IsPartialXLogFileName(fname: &str) -> bool {\n\n fname.ends_with(\".partial\") && IsXLogFileName(&fname[0..fname.len() - 8])\n\n}\n\n\n", "file_path": "postgres_ffi/src/xlog_utils.rs", "rank": 3, "score": 276779.3200050963 }, { "content": "/// If LSN points to the beginning of the page, then shift it to first record,\n\n/// otherwise align on 8-bytes boundary (required for WAL records)\n\npub fn normalize_lsn(lsn: Lsn, seg_sz: usize) -> Lsn {\n\n if lsn.0 % XLOG_BLCKSZ as u64 == 0 {\n\n let hdr_size = if lsn.0 % seg_sz as u64 == 0 {\n\n XLOG_SIZE_OF_XLOG_LONG_PHD\n\n } else {\n\n XLOG_SIZE_OF_XLOG_SHORT_PHD\n\n };\n\n lsn + hdr_size as u64\n\n } else {\n\n lsn.align()\n\n }\n\n}\n\n\n", "file_path": "postgres_ffi/src/xlog_utils.rs", "rank": 4, "score": 275830.15400386555 }, { "content": "/// Call f() to write body of the message and prepend it with 4-byte len as\n\n/// prescribed by the protocol.\n\nfn write_body<F>(buf: &mut BytesMut, f: F) -> io::Result<()>\n\nwhere\n\n F: FnOnce(&mut BytesMut) -> io::Result<()>,\n\n{\n\n let base = buf.len();\n\n buf.extend_from_slice(&[0; 4]);\n\n\n\n f(buf)?;\n\n\n\n let size = i32::from_usize(buf.len() - base)?;\n\n BigEndian::write_i32(&mut buf[base..], size);\n\n Ok(())\n\n}\n\n\n", "file_path": "zenith_utils/src/pq_proto.rs", "rank": 5, "score": 268798.18660514156 }, { "content": "/// Convert Postgres fork number to the right suffix of the relation data file.\n\npub fn forknumber_to_name(forknum: u8) -> Option<&'static str> {\n\n match forknum {\n\n pg_constants::MAIN_FORKNUM => None,\n\n pg_constants::FSM_FORKNUM => Some(\"fsm\"),\n\n pg_constants::VISIBILITYMAP_FORKNUM => Some(\"vm\"),\n\n pg_constants::INIT_FORKNUM => Some(\"init\"),\n\n _ => Some(\"UNKNOWN FORKNUM\"),\n\n }\n\n}\n\n\n", "file_path": "postgres_ffi/src/relfile_utils.rs", "rank": 6, "score": 266831.69230661914 }, { "content": "fn check_forknum(rel: &RelishTag, expected_forknum: u8) -> bool {\n\n if let RelishTag::Relation(RelTag {\n\n forknum,\n\n spcnode: _,\n\n dbnode: _,\n\n relnode: _,\n\n }) = rel\n\n {\n\n *forknum == expected_forknum\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "pageserver/src/walredo.rs", "rank": 7, "score": 261376.1534376089 }, { "content": "/// Check that `line` is inside a text file and put it there if it is not.\n\n/// Create file if it doesn't exist.\n\npub fn line_in_file(path: &Path, line: &str) -> Result<bool> {\n\n let mut file = OpenOptions::new()\n\n .read(true)\n\n .write(true)\n\n .create(true)\n\n .append(false)\n\n .open(path)?;\n\n let buf = io::BufReader::new(&file);\n\n let mut count: usize = 0;\n\n\n\n for l in buf.lines() {\n\n if l? == line {\n\n return Ok(false);\n\n }\n\n count = 1;\n\n }\n\n\n\n write!(file, \"{}{}\", \"\\n\".repeat(count), line)?;\n\n Ok(true)\n\n}\n\n\n", "file_path": "compute_tools/src/config.rs", "rank": 8, "score": 261064.1414466643 }, { "content": "pub fn transaction_id_set_status(xid: u32, status: u8, page: &mut BytesMut) {\n\n trace!(\n\n \"handle_apply_request for RM_XACT_ID-{} (1-commit, 2-abort, 3-sub_commit)\",\n\n status\n\n );\n\n\n\n let byteno: usize = ((xid as u32 % pg_constants::CLOG_XACTS_PER_PAGE as u32)\n\n / pg_constants::CLOG_XACTS_PER_BYTE) as usize;\n\n\n\n let bshift: u8 =\n\n ((xid % pg_constants::CLOG_XACTS_PER_BYTE) * pg_constants::CLOG_BITS_PER_XACT as u32) as u8;\n\n\n\n page[byteno] =\n\n (page[byteno] & !(pg_constants::CLOG_XACT_BITMASK << bshift)) | (status << bshift);\n\n}\n\n\n", "file_path": "postgres_ffi/src/nonrelfile_utils.rs", "rank": 9, "score": 253688.3707450895 }, { "content": "pub fn init(log_filename: impl AsRef<Path>, daemonize: bool) -> Result<File> {\n\n // Don't open the same file for output multiple times;\n\n // the different fds could overwrite each other's output.\n\n let log_file = OpenOptions::new()\n\n .create(true)\n\n .append(true)\n\n .open(&log_filename)\n\n .with_context(|| format!(\"failed to open {:?}\", log_filename.as_ref()))?;\n\n\n\n let default_filter_str = \"info\";\n\n\n\n // We fall back to printing all spans at info-level or above if\n\n // the RUST_LOG environment variable is not set.\n\n let env_filter = tracing_subscriber::EnvFilter::try_from_default_env()\n\n .unwrap_or_else(|_| tracing_subscriber::EnvFilter::new(default_filter_str));\n\n\n\n let base_logger = tracing_subscriber::fmt()\n\n .with_env_filter(env_filter)\n\n .with_target(false) // don't include event targets\n\n .with_ansi(false); // don't use colors in log file;\n", "file_path": "zenith_utils/src/logging.rs", "rank": 10, "score": 245024.39949301354 }, { "content": "// Write Postgres config block wrapped with generated comment section\n\nfn write_zenith_managed_block(file: &mut File, buf: &str) -> Result<()> {\n\n writeln!(file, \"# Managed by Zenith: begin\")?;\n\n writeln!(file, \"{}\", buf)?;\n\n writeln!(file, \"# Managed by Zenith: end\")?;\n\n\n\n Ok(())\n\n}\n", "file_path": "compute_tools/src/config.rs", "rank": 11, "score": 236911.80106654868 }, { "content": "// Check this flag in the thread loops to know when to exit\n\npub fn shutdown_requested() -> bool {\n\n SHUTDOWN_REQUESTED.load(Ordering::Relaxed)\n\n}\n\n\n", "file_path": "pageserver/src/tenant_mgr.rs", "rank": 12, "score": 236591.0876970127 }, { "content": "// See SlruMayDeleteSegment() in slru.c\n\npub fn slru_may_delete_clogsegment(segpage: u32, cutoff_page: u32) -> bool {\n\n let seg_last_page = segpage + pg_constants::SLRU_PAGES_PER_SEGMENT - 1;\n\n\n\n assert_eq!(segpage % pg_constants::SLRU_PAGES_PER_SEGMENT, 0);\n\n\n\n clogpage_precedes(segpage, cutoff_page) && clogpage_precedes(seg_last_page, cutoff_page)\n\n}\n\n\n\n// Multixact utils\n\n\n", "file_path": "postgres_ffi/src/nonrelfile_utils.rs", "rank": 13, "score": 229703.18269533632 }, { "content": "/// A shortcut that configures big-endian binary serialization\n\n///\n\n/// Properties:\n\n/// - Big endian\n\n/// - Fixed integer encoding (i.e. 1u32 is 00000001 not 01)\n\n///\n\n/// Does not allow trailing bytes in deserialization. If this is desired, you\n\n/// may set [`Options::allow_trailing_bytes`] to explicitly accomodate this.\n\npub fn be_coder() -> impl Options {\n\n bincode::DefaultOptions::new()\n\n .with_big_endian()\n\n .with_fixint_encoding()\n\n}\n\n\n", "file_path": "zenith_utils/src/bin_ser.rs", "rank": 14, "score": 226670.85331129323 }, { "content": "/// A shortcut that configures little-ending binary serialization\n\n///\n\n/// Properties:\n\n/// - Little endian\n\n/// - Fixed integer encoding (i.e. 1u32 is 00000001 not 01)\n\n///\n\n/// Does not allow trailing bytes in deserialization. If this is desired, you\n\n/// may set [`Options::allow_trailing_bytes`] to explicitly accomodate this.\n\npub fn le_coder() -> impl Options {\n\n bincode::DefaultOptions::new()\n\n .with_little_endian()\n\n .with_fixint_encoding()\n\n}\n\n\n", "file_path": "zenith_utils/src/bin_ser.rs", "rank": 15, "score": 223662.80987031898 }, { "content": "#[allow(non_snake_case)]\n\npub fn XLogFromFileName(fname: &str, wal_seg_size: usize) -> (XLogSegNo, TimeLineID) {\n\n let tli = u32::from_str_radix(&fname[0..8], 16).unwrap();\n\n let log = u32::from_str_radix(&fname[8..16], 16).unwrap() as XLogSegNo;\n\n let seg = u32::from_str_radix(&fname[16..24], 16).unwrap() as XLogSegNo;\n\n (log * XLogSegmentsPerXLogId(wal_seg_size) + seg, tli)\n\n}\n\n\n", "file_path": "postgres_ffi/src/xlog_utils.rs", "rank": 16, "score": 222270.3892208239 }, { "content": "/// Run `postgres` in a special mode with `--sync-safekeepers` argument\n\n/// and return the reported LSN back to the caller.\n\npub fn sync_safekeepers(pgdata: &str, pgbin: &str) -> Result<String> {\n\n let sync_handle = Command::new(&pgbin)\n\n .args(&[\"--sync-safekeepers\"])\n\n .env(\"PGDATA\", &pgdata) // we cannot use -D in this mode\n\n .stdout(Stdio::piped())\n\n .stderr(Stdio::piped())\n\n .spawn()\n\n .expect(\"postgres --sync-safekeepers failed to start\");\n\n\n\n let sync_output = sync_handle\n\n .wait_with_output()\n\n .expect(\"postgres --sync-safekeepers failed\");\n\n if !sync_output.status.success() {\n\n anyhow::bail!(\n\n \"postgres --sync-safekeepers failed: '{}'\",\n\n String::from_utf8_lossy(&sync_output.stderr)\n\n );\n\n }\n\n\n\n let lsn = String::from(String::from_utf8(sync_output.stdout)?.trim());\n\n\n\n Ok(lsn)\n\n}\n", "file_path": "compute_tools/src/zenith.rs", "rank": 17, "score": 218549.9289563942 }, { "content": "/// Initialize `env_logger` using either `default_level` or\n\n/// `RUST_LOG` environment variable as default log level.\n\npub fn init_logger(default_level: &str) -> Result<()> {\n\n let env = Env::default().filter_or(\"RUST_LOG\", default_level);\n\n\n\n Builder::from_env(env)\n\n .format(|buf, record| {\n\n let thread_handle = std::thread::current();\n\n writeln!(\n\n buf,\n\n \"{} [{}] {}: {}\",\n\n Utc::now().format(\"%Y-%m-%d %H:%M:%S%.3f %Z\"),\n\n thread_handle.name().unwrap_or(\"main\"),\n\n record.level(),\n\n record.args()\n\n )\n\n })\n\n .init();\n\n\n\n Ok(())\n\n}\n", "file_path": "compute_tools/src/logger.rs", "rank": 18, "score": 214503.26286136504 }, { "content": "/// Remove `pgdata` directory and create it again with right permissions.\n\npub fn create_pgdata(pgdata: &str) -> Result<()> {\n\n // Ignore removal error, likely it is a 'No such file or directory (os error 2)'.\n\n // If it is something different then create_dir() will error out anyway.\n\n let _ok = fs::remove_dir_all(pgdata);\n\n fs::create_dir(pgdata)?;\n\n fs::set_permissions(pgdata, fs::Permissions::from_mode(0o700))?;\n\n\n\n Ok(())\n\n}\n", "file_path": "compute_tools/src/pg_helpers.rs", "rank": 19, "score": 214498.1687978888 }, { "content": "/// Sets a prefix which will be used for all common metrics, typically a service\n\n/// name like 'pageserver'. Should be executed exactly once in the beginning of\n\n/// any executable which uses common metrics.\n\npub fn set_common_metrics_prefix(prefix: &'static str) {\n\n // Not unwrap() because metrics may be initialized after multiple threads have been started.\n\n COMMON_METRICS_PREFIX\n\n .set(prefix.into())\n\n .unwrap_or_else(|_| {\n\n eprintln!(\n\n \"set_common_metrics_prefix() was called second time with '{}', exiting\",\n\n prefix\n\n );\n\n std::process::exit(1);\n\n });\n\n}\n\n\n", "file_path": "zenith_metrics/src/lib.rs", "rank": 20, "score": 211759.73414986953 }, { "content": "// A manual implementation using BytesMut, just so we can\n\n// verify that we decode the same way.\n\npub fn decode_header_data(buf: &mut BytesMut) -> HeaderData {\n\n HeaderData {\n\n magic: buf.get_u16_le(),\n\n info: buf.get_u16_le(),\n\n tli: buf.get_u32_le(),\n\n pageaddr: buf.get_u64_le(),\n\n len: buf.get_u32_le(),\n\n }\n\n}\n\n\n", "file_path": "zenith_utils/tests/bin_ser_test.rs", "rank": 21, "score": 209658.7813959608 }, { "content": "pub fn mx_offset_to_member_segment(xid: u32) -> i32 {\n\n (mx_offset_to_member_page(xid) / pg_constants::SLRU_PAGES_PER_SEGMENT) as i32\n\n}\n", "file_path": "postgres_ffi/src/nonrelfile_utils.rs", "rank": 22, "score": 209394.48236430046 }, { "content": "pub fn configure_ssl(key_path: &str, cert_path: &str) -> anyhow::Result<SslConfig> {\n\n let key = {\n\n let key_bytes = std::fs::read(key_path).context(\"SSL key file\")?;\n\n let mut keys = pemfile::pkcs8_private_keys(&mut &key_bytes[..])\n\n .map_err(|_| anyhow!(\"couldn't read TLS keys\"))?;\n\n ensure!(keys.len() == 1, \"keys.len() = {} (should be 1)\", keys.len());\n\n keys.pop().unwrap()\n\n };\n\n\n\n let cert_chain = {\n\n let cert_chain_bytes = std::fs::read(cert_path).context(\"SSL cert file\")?;\n\n pemfile::certs(&mut &cert_chain_bytes[..])\n\n .map_err(|_| anyhow!(\"couldn't read TLS certificates\"))?\n\n };\n\n\n\n let mut config = ServerConfig::new(NoClientAuth::new());\n\n config.set_single_cert(cert_chain, key)?;\n\n config.versions = vec![ProtocolVersion::TLSv1_3];\n\n\n\n Ok(config.into())\n\n}\n", "file_path": "proxy/src/state.rs", "rank": 23, "score": 207762.84353056972 }, { "content": "fn send_proposer_elected(spg: &mut SafekeeperPostgresHandler, term: Term, lsn: Lsn) -> Result<()> {\n\n // add new term to existing history\n\n let history = spg.timeline.get().get_info().acceptor_state.term_history;\n\n let history = history.up_to(lsn.checked_sub(1u64).unwrap());\n\n let mut history_entries = history.0;\n\n history_entries.push(TermSwitchEntry { term, lsn });\n\n let history = TermHistory(history_entries);\n\n\n\n let proposer_elected_request = ProposerAcceptorMessage::Elected(ProposerElected {\n\n term,\n\n start_streaming_at: lsn,\n\n term_history: history,\n\n });\n\n\n\n spg.timeline.get().process_msg(&proposer_elected_request)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "walkeeper/src/json_ctrl.rs", "rank": 24, "score": 206872.33159777502 }, { "content": "/// Prepends a prefix to a common metric name so they are distinguished between\n\n/// different services, see <https://github.com/zenithdb/zenith/pull/681>\n\n/// A call to set_common_metrics_prefix() is necessary prior to calling this.\n\npub fn new_common_metric_name(unprefixed_metric_name: &str) -> String {\n\n // Not unwrap() because metrics may be initialized after multiple threads have been started.\n\n format!(\n\n \"{}_{}\",\n\n COMMON_METRICS_PREFIX.get().unwrap_or_else(|| {\n\n eprintln!(\"set_common_metrics_prefix() was not called, but metrics are used, exiting\");\n\n std::process::exit(1);\n\n }),\n\n unprefixed_metric_name\n\n )\n\n}\n\n\n\nlazy_static! {\n\n static ref DISK_IO_BYTES: IntGaugeVec = register_int_gauge_vec!(\n\n new_common_metric_name(\"disk_io_bytes\"),\n\n \"Bytes written and read from disk, grouped by the operation (read|write)\",\n\n &[\"io_operation\"]\n\n )\n\n .expect(\"Failed to register disk i/o bytes int gauge vec\");\n\n static ref MAXRSS_KB: IntGauge = register_int_gauge!(\n", "file_path": "zenith_metrics/src/lib.rs", "rank": 25, "score": 206612.66619617387 }, { "content": "pub fn mx_offset_to_flags_offset(xid: MultiXactId) -> usize {\n\n ((xid / pg_constants::MULTIXACT_MEMBERS_PER_MEMBERGROUP as u32) as u16\n\n % pg_constants::MULTIXACT_MEMBERGROUPS_PER_PAGE\n\n * pg_constants::MULTIXACT_MEMBERGROUP_SIZE) as usize\n\n}\n\n\n", "file_path": "postgres_ffi/src/nonrelfile_utils.rs", "rank": 26, "score": 204465.42071362492 }, { "content": "pub fn mx_offset_to_flags_bitshift(xid: MultiXactId) -> u16 {\n\n (xid as u16) % pg_constants::MULTIXACT_MEMBERS_PER_MEMBERGROUP\n\n * pg_constants::MXACT_MEMBER_BITS_PER_XACT\n\n}\n\n\n\n/* Location (byte offset within page) of TransactionId of given member */\n", "file_path": "postgres_ffi/src/nonrelfile_utils.rs", "rank": 27, "score": 204465.42071362492 }, { "content": "pub fn mx_offset_to_member_offset(xid: MultiXactId) -> usize {\n\n mx_offset_to_flags_offset(xid)\n\n + (pg_constants::MULTIXACT_FLAGBYTES_PER_GROUP\n\n + (xid as u16 % pg_constants::MULTIXACT_MEMBERS_PER_MEMBERGROUP) * 4) as usize\n\n}\n\n\n", "file_path": "postgres_ffi/src/nonrelfile_utils.rs", "rank": 28, "score": 204465.42071362492 }, { "content": "// Helper function for socket read loops\n\npub fn is_socket_read_timed_out(error: &anyhow::Error) -> bool {\n\n for cause in error.chain() {\n\n if let Some(io_error) = cause.downcast_ref::<io::Error>() {\n\n if io_error.kind() == std::io::ErrorKind::WouldBlock {\n\n return true;\n\n }\n\n }\n\n }\n\n false\n\n}\n\n\n", "file_path": "zenith_utils/src/postgres_backend.rs", "rank": 29, "score": 204040.81589132006 }, { "content": "/// Wait for Postgres to become ready to accept connections:\n\n/// - state should be `ready` in the `pgdata/postmaster.pid`\n\n/// - and we should be able to connect to 127.0.0.1:5432\n\npub fn wait_for_postgres(port: &str, pgdata: &Path) -> Result<()> {\n\n let pid_path = pgdata.join(\"postmaster.pid\");\n\n let mut slept: u64 = 0; // ms\n\n let pause = time::Duration::from_millis(100);\n\n\n\n let timeout = time::Duration::from_millis(200);\n\n let addr = SocketAddr::from_str(&format!(\"127.0.0.1:{}\", port)).unwrap();\n\n\n\n loop {\n\n // Sleep POSTGRES_WAIT_TIMEOUT at max (a bit longer actually if consider a TCP timeout,\n\n // but postgres starts listening almost immediately, even if it is not really\n\n // ready to accept connections).\n\n if slept >= POSTGRES_WAIT_TIMEOUT {\n\n return Err(anyhow!(\"timed out while waiting for Postgres to start\"));\n\n }\n\n\n\n if pid_path.exists() {\n\n // XXX: dumb and the simplest way to get the last line in a text file\n\n // TODO: better use `.lines().last()` later\n\n let stdout = Command::new(\"tail\")\n", "file_path": "compute_tools/src/pg_helpers.rs", "rank": 30, "score": 199145.9586897107 }, { "content": "#[allow(non_snake_case)]\n\npub fn XLogSegNoOffsetToRecPtr(\n\n segno: XLogSegNo,\n\n offset: u32,\n\n wal_segsz_bytes: usize,\n\n) -> XLogRecPtr {\n\n segno * (wal_segsz_bytes as u64) + (offset as u64)\n\n}\n\n\n", "file_path": "postgres_ffi/src/xlog_utils.rs", "rank": 31, "score": 194596.50924440875 }, { "content": "//\n\n// Generate new, empty WAL segment.\n\n// We need this segment to start compute node.\n\n//\n\npub fn generate_wal_segment(segno: u64, system_id: u64) -> Bytes {\n\n let mut seg_buf = BytesMut::with_capacity(pg_constants::WAL_SEGMENT_SIZE as usize);\n\n\n\n let pageaddr = XLogSegNoOffsetToRecPtr(segno, 0, pg_constants::WAL_SEGMENT_SIZE);\n\n let hdr = XLogLongPageHeaderData {\n\n std: {\n\n XLogPageHeaderData {\n\n xlp_magic: XLOG_PAGE_MAGIC as u16,\n\n xlp_info: pg_constants::XLP_LONG_HEADER,\n\n xlp_tli: PG_TLI,\n\n xlp_pageaddr: pageaddr,\n\n xlp_rem_len: 0,\n\n ..Default::default() // Put 0 in padding fields.\n\n }\n\n },\n\n xlp_sysid: system_id,\n\n xlp_seg_size: pg_constants::WAL_SEGMENT_SIZE as u32,\n\n xlp_xlog_blcksz: XLOG_BLCKSZ as u32,\n\n };\n\n\n", "file_path": "postgres_ffi/src/xlog_utils.rs", "rank": 32, "score": 194514.90352314312 }, { "content": "pub fn transaction_id_get_status(xid: u32, page: &[u8]) -> u8 {\n\n let byteno: usize = ((xid as u32 % pg_constants::CLOG_XACTS_PER_PAGE as u32)\n\n / pg_constants::CLOG_XACTS_PER_BYTE) as usize;\n\n\n\n let bshift: u8 =\n\n ((xid % pg_constants::CLOG_XACTS_PER_BYTE) * pg_constants::CLOG_BITS_PER_XACT as u32) as u8;\n\n\n\n ((page[byteno] >> bshift) & pg_constants::CLOG_XACT_BITMASK) as u8\n\n}\n\n\n\n// See CLOGPagePrecedes in clog.c\n\npub const fn clogpage_precedes(page1: u32, page2: u32) -> bool {\n\n let mut xid1 = page1 * pg_constants::CLOG_XACTS_PER_PAGE;\n\n xid1 += pg_constants::FIRST_NORMAL_TRANSACTION_ID + 1;\n\n let mut xid2 = page2 * pg_constants::CLOG_XACTS_PER_PAGE;\n\n xid2 += pg_constants::FIRST_NORMAL_TRANSACTION_ID + 1;\n\n\n\n transaction_id_precedes(xid1, xid2)\n\n && transaction_id_precedes(xid1, xid2 + pg_constants::CLOG_XACTS_PER_PAGE - 1)\n\n}\n\n\n", "file_path": "postgres_ffi/src/nonrelfile_utils.rs", "rank": 33, "score": 194461.1378346632 }, { "content": "/// Similar to [`std::fs::create_dir_all`], except we fsync all\n\n/// newly created directories and the pre-existing parent.\n\npub fn create_dir_all(path: impl AsRef<Path>) -> io::Result<()> {\n\n let mut path = path.as_ref();\n\n\n\n let mut dirs_to_create = Vec::new();\n\n\n\n // Figure out which directories we need to create.\n\n loop {\n\n match path.metadata() {\n\n Ok(metadata) if metadata.is_dir() => break,\n\n Ok(_) => {\n\n return Err(io::Error::new(\n\n io::ErrorKind::AlreadyExists,\n\n format!(\"non-directory found in path: {}\", path.display()),\n\n ));\n\n }\n\n Err(ref e) if e.kind() == io::ErrorKind::NotFound => {}\n\n Err(e) => return Err(e),\n\n }\n\n\n\n dirs_to_create.push(path);\n", "file_path": "zenith_utils/src/crashsafe_dir.rs", "rank": 34, "score": 192678.09626040515 }, { "content": "/// Similar to [`std::fs::create_dir`], except we fsync the\n\n/// created directory and its parent.\n\npub fn create_dir(path: impl AsRef<Path>) -> io::Result<()> {\n\n let path = path.as_ref();\n\n\n\n fs::create_dir(path)?;\n\n File::open(path)?.sync_all()?;\n\n\n\n if let Some(parent) = path.parent() {\n\n File::open(parent)?.sync_all()\n\n } else {\n\n Err(io::Error::new(\n\n io::ErrorKind::InvalidInput,\n\n \"can't find parent\",\n\n ))\n\n }\n\n}\n\n\n", "file_path": "zenith_utils/src/crashsafe_dir.rs", "rank": 35, "score": 192678.09626040515 }, { "content": "/// It follows mostly the same logic as `handle_roles()` excepting that we\n\n/// does not use an explicit transactions block, since major database operations\n\n/// like `CREATE DATABASE` and `DROP DATABASE` do not support it. Statement-level\n\n/// atomicity should be enough here due to the order of operations and various checks,\n\n/// which together provide us idempotency.\n\npub fn handle_databases(spec: &ClusterSpec, client: &mut Client) -> Result<()> {\n\n let existing_dbs: Vec<Database> = get_existing_dbs(client)?;\n\n\n\n // Print a list of existing Postgres databases (only in debug mode)\n\n info!(\"postgres databases:\");\n\n for r in &existing_dbs {\n\n info_println!(\"{} - {}:{}\", \" \".repeat(27 + 5), r.name, r.owner);\n\n }\n\n\n\n // Process delta operations first\n\n if let Some(ops) = &spec.delta_operations {\n\n info!(\"processing delta operations on databases\");\n\n for op in ops {\n\n match op.action.as_ref() {\n\n // We do not check either DB exists or not,\n\n // Postgres will take care of it for us\n\n \"delete_db\" => {\n\n let query: String = format!(\"DROP DATABASE IF EXISTS {}\", &op.name.quote());\n\n\n\n warn!(\"deleting database '{}'\", &op.name);\n", "file_path": "compute_tools/src/spec.rs", "rank": 36, "score": 192572.79281421675 }, { "content": "/// Given a cluster spec json and open transaction it handles roles creation,\n\n/// deletion and update.\n\npub fn handle_roles(spec: &ClusterSpec, client: &mut Client) -> Result<()> {\n\n let mut xact = client.transaction()?;\n\n let existing_roles: Vec<Role> = get_existing_roles(&mut xact)?;\n\n\n\n // Print a list of existing Postgres roles (only in debug mode)\n\n info!(\"postgres roles:\");\n\n for r in &existing_roles {\n\n info_println!(\n\n \"{} - {}:{}\",\n\n \" \".repeat(27 + 5),\n\n r.name,\n\n if r.encrypted_password.is_some() {\n\n \"[FILTERED]\"\n\n } else {\n\n \"(null)\"\n\n }\n\n );\n\n }\n\n\n\n // Process delta operations first\n", "file_path": "compute_tools/src/spec.rs", "rank": 37, "score": 192563.50199941586 }, { "content": "/// Build a list of existing Postgres databases\n\npub fn get_existing_dbs(client: &mut Client) -> Result<Vec<Database>> {\n\n let postgres_dbs = client\n\n .query(\n\n \"SELECT datname, datdba::regrole::text as owner\n\n FROM pg_catalog.pg_database;\",\n\n &[],\n\n )?\n\n .iter()\n\n .map(|row| Database {\n\n name: row.get(\"datname\"),\n\n owner: row.get(\"owner\"),\n\n options: None,\n\n })\n\n .collect();\n\n\n\n Ok(postgres_dbs)\n\n}\n\n\n", "file_path": "compute_tools/src/pg_helpers.rs", "rank": 38, "score": 190151.5124039377 }, { "content": "pub fn decode2<R: Read>(reader: &mut R) -> HeaderData {\n\n HeaderData::des_from(reader).unwrap()\n\n}\n\n\n", "file_path": "zenith_utils/tests/bin_ser_test.rs", "rank": 39, "score": 190151.5124039377 }, { "content": "/// Convert Postgres relation file's fork suffix to fork number.\n\npub fn forkname_to_number(forkname: Option<&str>) -> Result<u8, FilePathError> {\n\n match forkname {\n\n // \"main\" is not in filenames, it's implicit if the fork name is not present\n\n None => Ok(pg_constants::MAIN_FORKNUM),\n\n Some(\"fsm\") => Ok(pg_constants::FSM_FORKNUM),\n\n Some(\"vm\") => Ok(pg_constants::VISIBILITYMAP_FORKNUM),\n\n Some(\"init\") => Ok(pg_constants::INIT_FORKNUM),\n\n Some(_) => Err(FilePathError::InvalidForkName),\n\n }\n\n}\n\n\n", "file_path": "postgres_ffi/src/relfile_utils.rs", "rank": 40, "score": 187707.7330873707 }, { "content": "pub fn parse_request_param<T: FromStr>(\n\n request: &Request<Body>,\n\n param_name: &str,\n\n) -> Result<T, ApiError> {\n\n match get_request_param(request, param_name)?.parse() {\n\n Ok(v) => Ok(v),\n\n Err(_) => Err(ApiError::BadRequest(format!(\n\n \"failed to parse {}\",\n\n param_name\n\n ))),\n\n }\n\n}\n", "file_path": "zenith_utils/src/http/request.rs", "rank": 41, "score": 187590.29785875668 }, { "content": "fn build_apply_record_msg(endlsn: Lsn, rec: &[u8], buf: &mut Vec<u8>) {\n\n let len = 4 + 8 + rec.len();\n\n\n\n buf.put_u8(b'A');\n\n buf.put_u32(len as u32);\n\n buf.put_u64(endlsn.0);\n\n buf.put(rec);\n\n}\n\n\n", "file_path": "pageserver/src/walredo.rs", "rank": 42, "score": 187425.11235635876 }, { "content": "/// Build a list of existing Postgres roles\n\npub fn get_existing_roles(xact: &mut Transaction<'_>) -> Result<Vec<Role>> {\n\n let postgres_roles = xact\n\n .query(\"SELECT rolname, rolpassword FROM pg_catalog.pg_authid\", &[])?\n\n .iter()\n\n .map(|row| Role {\n\n name: row.get(\"rolname\"),\n\n encrypted_password: row.get(\"rolpassword\"),\n\n options: None,\n\n })\n\n .collect();\n\n\n\n Ok(postgres_roles)\n\n}\n\n\n", "file_path": "compute_tools/src/pg_helpers.rs", "rank": 43, "score": 186362.75405820168 }, { "content": "fn pageserver_config_overrides<'a>(init_match: &'a ArgMatches) -> Vec<&'a str> {\n\n init_match\n\n .values_of(\"pageserver-config-override\")\n\n .into_iter()\n\n .flatten()\n\n .collect()\n\n}\n\n\n", "file_path": "zenith/src/main.rs", "rank": 44, "score": 185070.89032990433 }, { "content": "pub fn init_pageserver(conf: &'static PageServerConf, create_tenant: Option<&str>) -> Result<()> {\n\n // Initialize logger\n\n // use true as daemonize parameter because otherwise we pollute zenith cli output with a few pages long output of info messages\n\n let _log_file = logging::init(LOG_FILE_NAME, true)?;\n\n\n\n // We don't use the real WAL redo manager, because we don't want to spawn the WAL redo\n\n // process during repository initialization.\n\n //\n\n // FIXME: That caused trouble, because the WAL redo manager spawned a thread that launched\n\n // initdb in the background, and it kept running even after the \"zenith init\" had exited.\n\n // In tests, we started the page server immediately after that, so that initdb was still\n\n // running in the background, and we failed to run initdb again in the same directory. This\n\n // has been solved for the rapid init+start case now, but the general race condition remains\n\n // if you restart the server quickly. The WAL redo manager doesn't use a separate thread\n\n // anymore, but I think that could still happen.\n\n let dummy_redo_mgr = Arc::new(crate::walredo::DummyRedoManager {});\n\n\n\n if let Some(tenantid) = create_tenant {\n\n let tenantid = ZTenantId::from_str(tenantid)?;\n\n println!(\"initializing tenantid {}\", tenantid);\n\n create_repo(conf, tenantid, dummy_redo_mgr).with_context(|| \"failed to create repo\")?;\n\n }\n\n crashsafe_dir::create_dir_all(conf.tenants_path())?;\n\n\n\n println!(\"pageserver init succeeded\");\n\n Ok(())\n\n}\n\n\n", "file_path": "pageserver/src/branches.rs", "rank": 45, "score": 183918.42690809997 }, { "content": "/// Reads the archive metadata out of the archive name:\n\n/// * `disk_consistent_lsn` of the checkpoint that was archived\n\n/// * size of the archive header\n\npub fn parse_archive_name(archive_path: &Path) -> anyhow::Result<(Lsn, u64)> {\n\n let archive_name = archive_path\n\n .file_name()\n\n .ok_or_else(|| anyhow!(\"Archive '{}' has no file name\", archive_path.display()))?\n\n .to_string_lossy();\n\n let (lsn_str, header_size_str) =\n\n archive_name.rsplit_once(ARCHIVE_EXTENSION).ok_or_else(|| {\n\n anyhow!(\n\n \"Archive '{}' has incorrect extension, expected to contain '{}'\",\n\n archive_path.display(),\n\n ARCHIVE_EXTENSION\n\n )\n\n })?;\n\n let disk_consistent_lsn = Lsn::from_hex(lsn_str).with_context(|| {\n\n format!(\n\n \"Archive '{}' has an invalid disk consistent lsn in its extension\",\n\n archive_path.display(),\n\n )\n\n })?;\n\n let header_size = header_size_str.parse::<u64>().with_context(|| {\n\n format!(\n\n \"Archive '{}' has an invalid a header offset number in its extension\",\n\n archive_path.display(),\n\n )\n\n })?;\n\n Ok((disk_consistent_lsn, header_size))\n\n}\n\n\n", "file_path": "pageserver/src/remote_storage/storage_sync/compression.rs", "rank": 46, "score": 183544.14881376564 }, { "content": "fn register_sync_status(sync_start: Instant, sync_name: &str, sync_status: Option<bool>) {\n\n let secs_elapsed = sync_start.elapsed().as_secs_f64();\n\n debug!(\"Processed a sync task in {} seconds\", secs_elapsed);\n\n match sync_status {\n\n Some(true) => IMAGE_SYNC_TIME.with_label_values(&[sync_name, \"success\"]),\n\n Some(false) => IMAGE_SYNC_TIME.with_label_values(&[sync_name, \"failure\"]),\n\n None => return,\n\n }\n\n .observe(secs_elapsed)\n\n}\n\n\n\nasync fn update_index_description<\n\n P: Send + Sync + 'static,\n\n S: RemoteStorage<StoragePath = P> + Send + Sync + 'static,\n\n>(\n\n (storage, index): &(S, RwLock<RemoteTimelineIndex>),\n\n timeline_dir: &Path,\n\n id: TimelineSyncId,\n\n) -> anyhow::Result<RemoteTimeline> {\n\n let mut index_write = index.write().await;\n", "file_path": "pageserver/src/remote_storage/storage_sync.rs", "rank": 47, "score": 182361.12354161855 }, { "content": "///\n\n/// Parse a filename of a relation file. Returns (relfilenode, forknum, segno) tuple.\n\n///\n\n/// Formats:\n\n/// <oid>\n\n/// <oid>_<fork name>\n\n/// <oid>.<segment number>\n\n/// <oid>_<fork name>.<segment number>\n\n///\n\n/// See functions relpath() and _mdfd_segpath() in PostgreSQL sources.\n\n///\n\npub fn parse_relfilename(fname: &str) -> Result<(u32, u8, u32), FilePathError> {\n\n lazy_static! {\n\n static ref RELFILE_RE: Regex =\n\n Regex::new(r\"^(?P<relnode>\\d+)(_(?P<forkname>[a-z]+))?(\\.(?P<segno>\\d+))?$\").unwrap();\n\n }\n\n let caps = RELFILE_RE\n\n .captures(fname)\n\n .ok_or(FilePathError::InvalidFileName)?;\n\n\n\n let relnode_str = caps.name(\"relnode\").unwrap().as_str();\n\n let relnode = relnode_str.parse::<u32>()?;\n\n\n\n let forkname = caps.name(\"forkname\").map(|f| f.as_str());\n\n let forknum = forkname_to_number(forkname)?;\n\n\n\n let segno_match = caps.name(\"segno\");\n\n let segno = if segno_match.is_none() {\n\n 0\n\n } else {\n\n segno_match.unwrap().as_str().parse::<u32>()?\n", "file_path": "postgres_ffi/src/relfile_utils.rs", "rank": 48, "score": 181715.97094569955 }, { "content": "/// Safe write of s into buf as cstring (String in the protocol).\n\nfn write_cstr(s: &[u8], buf: &mut BytesMut) -> Result<(), io::Error> {\n\n if s.contains(&0) {\n\n return Err(io::Error::new(\n\n io::ErrorKind::InvalidInput,\n\n \"string contains embedded null\",\n\n ));\n\n }\n\n buf.put_slice(s);\n\n buf.put_u8(0);\n\n Ok(())\n\n}\n\n\n\nimpl<'a> BeMessage<'a> {\n\n /// Write message to the given buf.\n\n // Unlike the reading side, we use BytesMut\n\n // here as msg len preceeds its body and it is handy to write it down first\n\n // and then fill the length. With Write we would have to either calc it\n\n // manually or have one more buffer.\n\n pub fn write(buf: &mut BytesMut, message: &BeMessage) -> io::Result<()> {\n\n match message {\n", "file_path": "zenith_utils/src/pq_proto.rs", "rank": 49, "score": 175441.4151544511 }, { "content": "pub fn drop_wal_receiver(timelineid: ZTimelineId, tenantid: ZTenantId) {\n\n let mut receivers = WAL_RECEIVERS.lock();\n\n receivers.remove(&timelineid);\n\n\n\n // Check if it was the last walreceiver of the tenant.\n\n // TODO now we store one WalReceiverEntry per timeline,\n\n // so this iterator looks a bit strange.\n\n for (_timelineid, entry) in receivers.iter() {\n\n if entry.tenantid == tenantid {\n\n return;\n\n }\n\n }\n\n\n\n // When last walreceiver of the tenant is gone, change state to Idle\n\n tenant_mgr::set_tenant_state(tenantid, TenantState::Idle).unwrap();\n\n}\n\n\n", "file_path": "pageserver/src/walreceiver.rs", "rank": 50, "score": 169951.41842691478 }, { "content": "/// Create or completely rewrite configuration file specified by `path`\n\npub fn write_postgres_conf(path: &Path, spec: &ClusterSpec) -> Result<()> {\n\n // File::create() destroys the file content if it exists.\n\n let mut postgres_conf = File::create(path)?;\n\n\n\n write_zenith_managed_block(&mut postgres_conf, &spec.cluster.settings.as_pg_settings())?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "compute_tools/src/config.rs", "rank": 51, "score": 169272.5023958661 }, { "content": "///\n\n/// Main proxy listener loop.\n\n///\n\n/// Listens for connections, and launches a new handler thread for each.\n\n///\n\npub fn thread_main(\n\n state: &'static ProxyState,\n\n listener: std::net::TcpListener,\n\n) -> anyhow::Result<()> {\n\n loop {\n\n let (socket, peer_addr) = listener.accept()?;\n\n println!(\"accepted connection from {}\", peer_addr);\n\n socket.set_nodelay(true).unwrap();\n\n\n\n // TODO Use a threadpool instead. Maybe use tokio's threadpool by\n\n // spawning a future into its runtime. Tokio's JoinError should\n\n // allow us to handle cleanup properly even if the future panics.\n\n thread::Builder::new()\n\n .name(\"Proxy thread\".into())\n\n .spawn(move || {\n\n if let Err(err) = proxy_conn_main(state, socket) {\n\n println!(\"error: {}\", err);\n\n }\n\n\n\n // Clean up CANCEL_MAP.\n\n THREAD_CANCEL_KEY_DATA.with(|cell| {\n\n if let Some(cancel_key_data) = cell.get() {\n\n CANCEL_MAP.lock().remove(&cancel_key_data);\n\n };\n\n });\n\n })?;\n\n }\n\n}\n\n\n", "file_path": "proxy/src/proxy.rs", "rank": 52, "score": 166019.41868677305 }, { "content": "pub fn create_repo(\n\n conf: &'static PageServerConf,\n\n tenantid: ZTenantId,\n\n wal_redo_manager: Arc<dyn WalRedoManager + Send + Sync>,\n\n) -> Result<Arc<dyn Repository>> {\n\n let repo_dir = conf.tenant_path(&tenantid);\n\n if repo_dir.exists() {\n\n bail!(\"repo for {} already exists\", tenantid)\n\n }\n\n\n\n // top-level dir may exist if we are creating it through CLI\n\n crashsafe_dir::create_dir_all(&repo_dir)\n\n .with_context(|| format!(\"could not create directory {}\", repo_dir.display()))?;\n\n\n\n crashsafe_dir::create_dir(conf.timelines_path(&tenantid))?;\n\n crashsafe_dir::create_dir_all(conf.branches_path(&tenantid))?;\n\n crashsafe_dir::create_dir_all(conf.tags_path(&tenantid))?;\n\n\n\n info!(\"created directory structure in {}\", repo_dir.display());\n\n\n", "file_path": "pageserver/src/branches.rs", "rank": 53, "score": 166019.41868677305 }, { "content": "/// Get basebackup from the libpq connection to pageserver using `connstr` and\n\n/// unarchive it to `pgdata` directory overriding all its previous content.\n\npub fn get_basebackup(\n\n pgdata: &str,\n\n connstr: &str,\n\n tenant: &str,\n\n timeline: &str,\n\n lsn: &str,\n\n) -> Result<()> {\n\n let mut client = Client::connect(connstr, NoTls)?;\n\n let basebackup_cmd = match lsn {\n\n \"0/0\" => format!(\"basebackup {} {}\", tenant, timeline), // First start of the compute\n\n _ => format!(\"basebackup {} {} {}\", tenant, timeline, lsn),\n\n };\n\n let copyreader = client.copy_out(basebackup_cmd.as_str())?;\n\n let mut ar = tar::Archive::new(copyreader);\n\n\n\n ar.unpack(&pgdata)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "compute_tools/src/zenith.rs", "rank": 54, "score": 163714.2927429787 }, { "content": "/// Initiate graceful shutdown of the http endpoint\n\npub fn shutdown() {\n\n if let Some(tx) = SHUTDOWN_SENDER.lock().unwrap().take() {\n\n let _ = tx.send(());\n\n }\n\n}\n\n\n", "file_path": "zenith_utils/src/http/endpoint.rs", "rank": 55, "score": 163709.16836001148 }, { "content": "// Launch a new WAL receiver, or tell one that's running about change in connection string\n\npub fn launch_wal_receiver(\n\n conf: &'static PageServerConf,\n\n timelineid: ZTimelineId,\n\n wal_producer_connstr: &str,\n\n tenantid: ZTenantId,\n\n) {\n\n let mut receivers = WAL_RECEIVERS.lock();\n\n\n\n match receivers.get_mut(&timelineid) {\n\n Some(receiver) => {\n\n receiver.wal_producer_connstr = wal_producer_connstr.into();\n\n }\n\n None => {\n\n let (tx, rx) = tokio::sync::oneshot::channel::<()>();\n\n\n\n let wal_receiver_handle = thread::Builder::new()\n\n .name(\"WAL receiver thread\".into())\n\n .spawn(move || {\n\n IS_WAL_RECEIVER.with(|c| c.set(true));\n\n thread_main(conf, timelineid, tenantid, rx);\n", "file_path": "pageserver/src/walreceiver.rs", "rank": 56, "score": 163709.16836001148 }, { "content": "pub fn make_router(\n\n conf: &'static PageServerConf,\n\n auth: Option<Arc<JwtAuth>>,\n\n) -> RouterBuilder<hyper::Body, ApiError> {\n\n let spec = include_bytes!(\"openapi_spec.yml\");\n\n let mut router = attach_openapi_ui(endpoint::make_router(), spec, \"/swagger.yml\", \"/v1/doc\");\n\n if auth.is_some() {\n\n router = router.middleware(auth_middleware(|request| {\n\n let state = get_state(request);\n\n if state.allowlist_routes.contains(request.uri()) {\n\n None\n\n } else {\n\n state.auth.as_deref()\n\n }\n\n }))\n\n }\n\n\n\n router\n\n .data(Arc::new(State::new(conf, auth)))\n\n .get(\"/v1/status\", status_handler)\n", "file_path": "pageserver/src/http/routes.rs", "rank": 57, "score": 163709.16836001148 }, { "content": "///\n\n/// Main loop of the page service.\n\n///\n\n/// Listens for connections, and launches a new handler thread for each.\n\n///\n\npub fn thread_main(\n\n conf: &'static PageServerConf,\n\n auth: Option<Arc<JwtAuth>>,\n\n listener: TcpListener,\n\n auth_type: AuthType,\n\n) -> anyhow::Result<()> {\n\n let mut join_handles = Vec::new();\n\n\n\n while !tenant_mgr::shutdown_requested() {\n\n let (socket, peer_addr) = listener.accept()?;\n\n debug!(\"accepted connection from {}\", peer_addr);\n\n socket.set_nodelay(true).unwrap();\n\n let local_auth = auth.clone();\n\n\n\n let handle = thread::Builder::new()\n\n .name(\"serving Page Service thread\".into())\n\n .spawn(move || {\n\n if let Err(err) = page_service_conn_main(conf, local_auth, socket, auth_type) {\n\n error!(%err, \"page server thread exited with error\");\n\n }\n", "file_path": "pageserver/src/page_service.rs", "rank": 58, "score": 163709.16836001148 }, { "content": "/// Accept incoming TCP connections and spawn them into a background thread.\n\npub fn thread_main(\n\n conf: SafeKeeperConf,\n\n listener: TcpListener,\n\n tx: UnboundedSender<CallmeEvent>,\n\n) -> Result<()> {\n\n loop {\n\n match listener.accept() {\n\n Ok((socket, peer_addr)) => {\n\n debug!(\"accepted connection from {}\", peer_addr);\n\n let conf = conf.clone();\n\n\n\n let tx_clone = tx.clone();\n\n let _ = thread::Builder::new()\n\n .name(\"WAL service thread\".into())\n\n .spawn(move || {\n\n if let Err(err) = handle_socket(socket, conf, tx_clone) {\n\n error!(\"connection handler exited: {}\", err);\n\n }\n\n })\n\n .unwrap();\n\n }\n\n Err(e) => error!(\"Failed to accept connection: {}\", e),\n\n }\n\n }\n\n}\n\n\n", "file_path": "walkeeper/src/wal_service.rs", "rank": 59, "score": 163709.16836001148 }, { "content": "pub fn main() {\n\n let mut data_dir = PathBuf::new();\n\n data_dir.push(\".\");\n\n let wal_seg_size = 16 * 1024 * 1024;\n\n let (wal_end, tli) = find_end_of_wal(&data_dir, wal_seg_size, true, Lsn(0)).unwrap();\n\n println!(\n\n \"wal_end={:>08X}{:>08X}, tli={}\",\n\n (wal_end >> 32) as u32,\n\n wal_end as u32,\n\n tli\n\n );\n\n}\n\n\n\nimpl XLogRecord {\n\n pub fn from_slice(buf: &[u8]) -> XLogRecord {\n\n use zenith_utils::bin_ser::LeSer;\n\n XLogRecord::des(buf).unwrap()\n\n }\n\n\n\n pub fn from_bytes<B: Buf>(buf: &mut B) -> XLogRecord {\n", "file_path": "postgres_ffi/src/xlog_utils.rs", "rank": 60, "score": 163709.16836001148 }, { "content": "/// Escape a value for putting in postgresql.conf.\n\nfn escape_str(s: &str) -> String {\n\n // If the string doesn't contain anything that needs quoting or escaping, return it\n\n // as it is.\n\n //\n\n // The first part of the regex, before the '|', matches the INTEGER rule in the\n\n // PostgreSQL flex grammar (guc-file.l). It matches plain integers like \"123\" and\n\n // \"-123\", and also accepts units like \"10MB\". The second part of the regex matches\n\n // the UNQUOTED_STRING rule, and accepts strings that contain a single word, beginning\n\n // with a letter. That covers words like \"off\" or \"posix\". Everything else is quoted.\n\n //\n\n // This regex is a bit more conservative than the rules in guc-file.l, so we quote some\n\n // strings that PostgreSQL would accept without quoting, but that's OK.\n\n lazy_static! {\n\n static ref UNQUOTED_RE: Regex =\n\n Regex::new(r\"(^[-+]?[0-9]+[a-zA-Z]*$)|(^[a-zA-Z][a-zA-Z0-9]*$)\").unwrap();\n\n }\n\n if UNQUOTED_RE.is_match(s) {\n\n s.to_string()\n\n } else {\n\n // Otherwise escape and quote it\n\n let s = s\n\n .replace('\\\\', \"\\\\\\\\\")\n\n .replace('\\n', \"\\\\n\")\n\n .replace('\\'', \"''\");\n\n\n\n \"\\'\".to_owned() + &s + \"\\'\"\n\n }\n\n}\n\n\n", "file_path": "control_plane/src/postgresql_conf.rs", "rank": 61, "score": 162851.23303964932 }, { "content": "#[allow(non_snake_case)]\n\npub fn XLogSegmentsPerXLogId(wal_segsz_bytes: usize) -> XLogSegNo {\n\n (0x100000000u64 / wal_segsz_bytes as u64) as XLogSegNo\n\n}\n\n\n", "file_path": "postgres_ffi/src/xlog_utils.rs", "rank": 62, "score": 162716.39737556898 }, { "content": "/// Handles command to craft logical message WAL record with given\n\n/// content, and then append it with specified term and lsn. This\n\n/// function is used to test safekeepers in different scenarios.\n\npub fn handle_json_ctrl(\n\n spg: &mut SafekeeperPostgresHandler,\n\n pgb: &mut PostgresBackend,\n\n append_request: &AppendLogicalMessage,\n\n) -> Result<()> {\n\n info!(\"JSON_CTRL request: {:?}\", append_request);\n\n\n\n // need to init safekeeper state before AppendRequest\n\n prepare_safekeeper(spg)?;\n\n\n\n // if send_proposer_elected is true, we need to update local history\n\n if append_request.send_proposer_elected {\n\n send_proposer_elected(spg, append_request.term, append_request.epoch_start_lsn)?;\n\n }\n\n\n\n let inserted_wal = append_logical_message(spg, append_request)?;\n\n let response = AppendResult {\n\n state: spg.timeline.get().get_info(),\n\n inserted_wal,\n\n };\n", "file_path": "walkeeper/src/json_ctrl.rs", "rank": 63, "score": 161520.37797983282 }, { "content": "/// Points to a place in pageserver's local directory,\n\n/// where certain timeline's metadata file should be located.\n\npub fn metadata_path(\n\n conf: &'static PageServerConf,\n\n timelineid: ZTimelineId,\n\n tenantid: ZTenantId,\n\n) -> PathBuf {\n\n conf.timeline_path(&timelineid, &tenantid)\n\n .join(METADATA_FILE_NAME)\n\n}\n\n\n\nimpl TimelineMetadata {\n\n pub fn new(\n\n disk_consistent_lsn: Lsn,\n\n prev_record_lsn: Option<Lsn>,\n\n ancestor_timeline: Option<ZTimelineId>,\n\n ancestor_lsn: Lsn,\n\n latest_gc_cutoff_lsn: Lsn,\n\n initdb_lsn: Lsn,\n\n ) -> Self {\n\n Self {\n\n disk_consistent_lsn,\n", "file_path": "pageserver/src/layered_repository/metadata.rs", "rank": 64, "score": 161510.55886893708 }, { "content": "/// Updates tenants' repositories, changing their timelines state in memory.\n\npub fn set_timeline_states(\n\n conf: &'static PageServerConf,\n\n timeline_states: HashMap<ZTenantId, HashMap<ZTimelineId, TimelineSyncState>>,\n\n) {\n\n if timeline_states.is_empty() {\n\n debug!(\"no timeline state updates to perform\");\n\n return;\n\n }\n\n\n\n info!(\"Updating states for {} timelines\", timeline_states.len());\n\n trace!(\"States: {:?}\", timeline_states);\n\n\n\n let mut m = access_tenants();\n\n for (tenant_id, timeline_states) in timeline_states {\n\n let tenant = m.entry(tenant_id).or_insert_with(|| Tenant {\n\n state: TenantState::Idle,\n\n repo: None,\n\n });\n\n if let Err(e) = put_timelines_into_tenant(conf, tenant, tenant_id, timeline_states) {\n\n error!(\n\n \"Failed to update timeline states for tenant {}: {:#}\",\n\n tenant_id, e\n\n );\n\n }\n\n }\n\n}\n\n\n", "file_path": "pageserver/src/tenant_mgr.rs", "rank": 65, "score": 161510.55886893708 }, { "content": "pub fn get_timeline_for_tenant(\n\n tenantid: ZTenantId,\n\n timelineid: ZTimelineId,\n\n) -> Result<Arc<dyn Timeline>> {\n\n get_repository_for_tenant(tenantid)?\n\n .get_timeline(timelineid)?\n\n .local_timeline()\n\n .ok_or_else(|| anyhow!(\"cannot fetch timeline {}\", timelineid))\n\n}\n\n\n", "file_path": "pageserver/src/tenant_mgr.rs", "rank": 66, "score": 161510.55886893708 }, { "content": "pub fn create_repository_for_tenant(\n\n conf: &'static PageServerConf,\n\n tenantid: ZTenantId,\n\n) -> Result<()> {\n\n let wal_redo_manager = Arc::new(PostgresRedoManager::new(conf, tenantid));\n\n let repo = Some(branches::create_repo(conf, tenantid, wal_redo_manager)?);\n\n\n\n match access_tenants().entry(tenantid) {\n\n hash_map::Entry::Occupied(_) => bail!(\"tenant {} already exists\", tenantid),\n\n hash_map::Entry::Vacant(v) => {\n\n v.insert(Tenant {\n\n state: TenantState::Idle,\n\n repo,\n\n });\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "pageserver/src/tenant_mgr.rs", "rank": 67, "score": 161510.55886893708 }, { "content": "fn hello_message(redirect_uri: &str, session_id: &str) -> String {\n\n format!(\n\n concat![\n\n \"☀️ Welcome to Zenith!\\n\",\n\n \"To proceed with database creation, open the following link:\\n\\n\",\n\n \" {redirect_uri}{session_id}\\n\\n\",\n\n \"It needs to be done once and we will send you '.pgpass' file,\\n\",\n\n \"which will allow you to access or create \",\n\n \"databases without opening your web browser.\"\n\n ],\n\n redirect_uri = redirect_uri,\n\n session_id = session_id,\n\n )\n\n}\n\n\n\n/// Create a TCP connection to a postgres database, authenticate with it, and receive the ReadyForQuery message\n\nasync fn connect_to_db(\n\n db_info: DatabaseInfo,\n\n) -> anyhow::Result<(String, tokio::net::TcpStream, CancelKeyData)> {\n\n // Make raw connection. When connect_raw finishes we've received ReadyForQuery.\n", "file_path": "proxy/src/proxy.rs", "rank": 68, "score": 161013.27348617557 }, { "content": "fn fill_rust_env_vars(cmd: &mut Command) -> &mut Command {\n\n let cmd = cmd.env_clear().env(\"RUST_BACKTRACE\", \"1\");\n\n\n\n let var = \"LLVM_PROFILE_FILE\";\n\n if let Some(val) = std::env::var_os(var) {\n\n cmd.env(var, val);\n\n }\n\n\n\n const RUST_LOG_KEY: &str = \"RUST_LOG\";\n\n if let Ok(rust_log_value) = std::env::var(RUST_LOG_KEY) {\n\n cmd.env(RUST_LOG_KEY, rust_log_value)\n\n } else {\n\n cmd\n\n }\n\n}\n", "file_path": "control_plane/src/lib.rs", "rank": 69, "score": 159589.74814081722 }, { "content": "///\n\n/// Scan a directory that contains PostgreSQL WAL files, for the end of WAL.\n\n/// If precise, returns end LSN (next insertion point, basically);\n\n/// otherwise, start of the last segment.\n\n/// Returns (0, 0) if there is no WAL.\n\n///\n\npub fn find_end_of_wal(\n\n data_dir: &Path,\n\n wal_seg_size: usize,\n\n precise: bool,\n\n start_lsn: Lsn, // start reading WAL at this point or later\n\n) -> Result<(XLogRecPtr, TimeLineID)> {\n\n let mut high_segno: XLogSegNo = 0;\n\n let mut high_tli: TimeLineID = 0;\n\n let mut high_ispartial = false;\n\n\n\n for entry in fs::read_dir(data_dir).unwrap().flatten() {\n\n let ispartial: bool;\n\n let entry_name = entry.file_name();\n\n let fname = entry_name.to_str().unwrap();\n\n /*\n\n * Check if the filename looks like an xlog file, or a .partial file.\n\n */\n\n if IsXLogFileName(fname) {\n\n ispartial = false;\n\n } else if IsPartialXLogFileName(fname) {\n", "file_path": "postgres_ffi/src/xlog_utils.rs", "rank": 70, "score": 159426.4365524808 }, { "content": "///\n\n/// Import all relation data pages from local disk into the repository.\n\n///\n\n/// This is currently only used to import a cluster freshly created by initdb.\n\n/// The code that deals with the checkpoint would not work right if the\n\n/// cluster was not shut down cleanly.\n\npub fn import_timeline_from_postgres_datadir(\n\n path: &Path,\n\n writer: &dyn TimelineWriter,\n\n lsn: Lsn,\n\n) -> Result<()> {\n\n let mut pg_control: Option<ControlFileData> = None;\n\n\n\n // Scan 'global'\n\n for direntry in fs::read_dir(path.join(\"global\"))? {\n\n let direntry = direntry?;\n\n match direntry.file_name().to_str() {\n\n None => continue,\n\n\n\n Some(\"pg_control\") => {\n\n pg_control = Some(import_control_file(writer, lsn, &direntry.path())?);\n\n }\n\n Some(\"pg_filenode.map\") => import_nonrel_file(\n\n writer,\n\n lsn,\n\n RelishTag::FileNodeMap {\n", "file_path": "pageserver/src/import_datadir.rs", "rank": 71, "score": 159420.55255089386 }, { "content": "pub fn attach_openapi_ui(\n\n router_builder: RouterBuilder<hyper::Body, ApiError>,\n\n spec: &'static [u8],\n\n spec_mount_path: &'static str,\n\n ui_mount_path: &'static str,\n\n) -> RouterBuilder<hyper::Body, ApiError> {\n\n router_builder.get(spec_mount_path, move |_| async move {\n\n Ok(Response::builder().body(Body::from(spec)).unwrap())\n\n }).get(ui_mount_path, move |_| async move {\n\n Ok(Response::builder().body(Body::from(format!(r#\"\n\n <!DOCTYPE html>\n\n <html lang=\"en\">\n\n <head>\n\n <title>rweb</title>\n\n <link href=\"https://cdn.jsdelivr.net/npm/swagger-ui-dist@3/swagger-ui.css\" rel=\"stylesheet\">\n\n </head>\n\n <body>\n\n <div id=\"swagger-ui\"></div>\n\n <script src=\"https://cdn.jsdelivr.net/npm/swagger-ui-dist@3/swagger-ui-bundle.js\" charset=\"UTF-8\"> </script>\n\n <script>\n", "file_path": "zenith_utils/src/http/endpoint.rs", "rank": 72, "score": 159415.6887108868 }, { "content": "pub fn serve_thread_main(\n\n router_builder: RouterBuilder<hyper::Body, ApiError>,\n\n listener: TcpListener,\n\n) -> anyhow::Result<()> {\n\n info!(\"Starting a http endpoint at {}\", listener.local_addr()?);\n\n\n\n // Create a Service from the router above to handle incoming requests.\n\n let service = RouterService::new(router_builder.build().map_err(|err| anyhow!(err))?).unwrap();\n\n\n\n // Enter a single-threaded tokio runtime bound to the current thread\n\n let runtime = tokio::runtime::Builder::new_current_thread()\n\n .enable_all()\n\n .build()?;\n\n\n\n let _guard = runtime.enter();\n\n\n\n let (send, recv) = tokio::sync::oneshot::channel::<()>();\n\n *SHUTDOWN_SENDER.lock().unwrap() = Some(send);\n\n\n\n let server = Server::from_tcp(listener)?\n\n .serve(service)\n\n .with_graceful_shutdown(async {\n\n recv.await.ok();\n\n });\n\n\n\n runtime.block_on(server)?;\n\n\n\n Ok(())\n\n}\n", "file_path": "zenith_utils/src/http/endpoint.rs", "rank": 73, "score": 159415.6887108868 }, { "content": "/// Based on the config, initiates the remote storage connection and starts a separate thread\n\n/// that ensures that pageserver and the remote storage are in sync with each other.\n\n/// If no external configuration connection given, no thread or storage initialization is done.\n\n/// Along with that, scans tenant files local and remote (if the sync gets enabled) to check the initial timeline states.\n\npub fn start_local_timeline_sync(\n\n config: &'static PageServerConf,\n\n shutdown_hook: sync::watch::Receiver<()>,\n\n) -> anyhow::Result<SyncStartupData> {\n\n let local_timeline_files = local_tenant_timeline_files(config)\n\n .context(\"Failed to collect local tenant timeline files\")?;\n\n\n\n match &config.remote_storage_config {\n\n Some(storage_config) => match &storage_config.storage {\n\n RemoteStorageKind::LocalFs(root) => storage_sync::spawn_storage_sync_thread(\n\n shutdown_hook,\n\n config,\n\n local_timeline_files,\n\n LocalFs::new(root.clone(), &config.workdir)?,\n\n storage_config.max_concurrent_sync,\n\n storage_config.max_sync_errors,\n\n ),\n\n RemoteStorageKind::AwsS3(s3_config) => storage_sync::spawn_storage_sync_thread(\n\n shutdown_hook,\n\n config,\n", "file_path": "pageserver/src/remote_storage.rs", "rank": 74, "score": 159415.6887108868 }, { "content": "fn parse_token(header_value: &str) -> Result<&str, ApiError> {\n\n // header must be in form Bearer <token>\n\n let (prefix, token) = header_value\n\n .split_once(' ')\n\n .ok_or_else(|| ApiError::Unauthorized(\"malformed authorization header\".to_string()))?;\n\n if prefix != \"Bearer\" {\n\n return Err(ApiError::Unauthorized(\n\n \"malformed authorization header\".to_string(),\n\n ));\n\n }\n\n Ok(token)\n\n}\n\n\n", "file_path": "zenith_utils/src/http/endpoint.rs", "rank": 75, "score": 157818.8436110345 }, { "content": "// Set the flag to inform connections to cancel\n\npub fn set_pgbackend_shutdown_requested() {\n\n PGBACKEND_SHUTDOWN_REQUESTED.swap(true, Ordering::Relaxed);\n\n}\n", "file_path": "zenith_utils/src/postgres_backend.rs", "rank": 76, "score": 157417.3848472791 }, { "content": "// Returns checkpoint LSN from controlfile\n\nfn get_lsn_from_controlfile(path: &Path) -> Result<Lsn> {\n\n // Read control file to extract the LSN\n\n let controlfile_path = path.join(\"global\").join(\"pg_control\");\n\n let controlfile = ControlFileData::decode(&fs::read(controlfile_path)?)?;\n\n let lsn = controlfile.checkPoint;\n\n\n\n Ok(Lsn(lsn))\n\n}\n\n\n", "file_path": "pageserver/src/branches.rs", "rank": 77, "score": 157287.91712170298 }, { "content": "// Truncate 0 from C string in Bytes and stringify it (returns slice, no allocations)\n\n// PG protocol strings are always C strings.\n\nfn cstr_to_str(b: &Bytes) -> Result<&str> {\n\n let without_null = if b.last() == Some(&0) {\n\n &b[..b.len() - 1]\n\n } else {\n\n &b[..]\n\n };\n\n std::str::from_utf8(without_null).map_err(|e| e.into())\n\n}\n\n\n\nimpl PostgresBackend {\n\n pub fn new(\n\n socket: TcpStream,\n\n auth_type: AuthType,\n\n tls_config: Option<Arc<rustls::ServerConfig>>,\n\n set_read_timeout: bool,\n\n ) -> io::Result<Self> {\n\n let peer_addr = socket.peer_addr()?;\n\n if set_read_timeout {\n\n socket\n\n .set_read_timeout(Some(Duration::from_secs(5)))\n", "file_path": "zenith_utils/src/postgres_backend.rs", "rank": 78, "score": 157046.2840919069 }, { "content": "/// De-escape a possibly-quoted value.\n\n///\n\n/// See `DeescapeQuotedString` function in PostgreSQL sources for how PostgreSQL\n\n/// does this.\n\nfn deescape_str(s: &str) -> Result<String> {\n\n // If the string has a quote at the beginning and end, strip them out.\n\n if s.len() >= 2 && s.starts_with('\\'') && s.ends_with('\\'') {\n\n let mut result = String::new();\n\n\n\n let mut iter = s[1..(s.len() - 1)].chars().peekable();\n\n while let Some(c) = iter.next() {\n\n let newc = if c == '\\\\' {\n\n match iter.next() {\n\n Some('b') => '\\x08',\n\n Some('f') => '\\x0c',\n\n Some('n') => '\\n',\n\n Some('r') => '\\r',\n\n Some('t') => '\\t',\n\n Some('0'..='7') => {\n\n // TODO\n\n bail!(\"octal escapes not supported\");\n\n }\n\n Some(n) => n,\n\n None => break,\n", "file_path": "control_plane/src/postgresql_conf.rs", "rank": 79, "score": 157041.02021526278 }, { "content": "#[allow(non_snake_case)]\n\npub fn XLogFileName(tli: TimeLineID, logSegNo: XLogSegNo, wal_segsz_bytes: usize) -> String {\n\n return format!(\n\n \"{:>08X}{:>08X}{:>08X}\",\n\n tli,\n\n logSegNo / XLogSegmentsPerXLogId(wal_segsz_bytes),\n\n logSegNo % XLogSegmentsPerXLogId(wal_segsz_bytes)\n\n );\n\n}\n\n\n", "file_path": "postgres_ffi/src/xlog_utils.rs", "rank": 80, "score": 156879.6121455067 }, { "content": "pub fn shutdown_all_tenants() -> Result<()> {\n\n SHUTDOWN_REQUESTED.swap(true, Ordering::Relaxed);\n\n\n\n let tenantids = list_tenantids()?;\n\n\n\n for tenantid in &tenantids {\n\n set_tenant_state(*tenantid, TenantState::Stopping)?;\n\n }\n\n\n\n for tenantid in tenantids {\n\n // Wait for checkpointer and GC to finish their job\n\n tenant_threads::wait_for_tenant_threads_to_stop(tenantid);\n\n\n\n let repo = get_repository_for_tenant(tenantid)?;\n\n debug!(\"shutdown tenant {}\", tenantid);\n\n repo.shutdown()?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "pageserver/src/tenant_mgr.rs", "rank": 81, "score": 156324.35527900315 }, { "content": "/// Create new WAL record for non-transactional logical message.\n\n/// Used for creating artificial WAL for tests, as LogicalMessage\n\n/// record is basically no-op.\n\nfn encode_logical_message(prefix: &str, message: &str) -> Vec<u8> {\n\n let mut prefix_bytes = BytesMut::with_capacity(prefix.len() + 1);\n\n prefix_bytes.put(prefix.as_bytes());\n\n prefix_bytes.put_u8(0);\n\n\n\n let message_bytes = message.as_bytes();\n\n\n\n let logical_message = XlLogicalMessage {\n\n db_id: 0,\n\n transactional: 0,\n\n prefix_size: prefix_bytes.len() as u64,\n\n message_size: message_bytes.len() as u64,\n\n };\n\n\n\n let mainrdata = logical_message.encode();\n\n let mainrdata_len: usize = mainrdata.len() + prefix_bytes.len() + message_bytes.len();\n\n // only short mainrdata is supported for now\n\n assert!(mainrdata_len <= 255);\n\n let mainrdata_len = mainrdata_len as u8;\n\n\n", "file_path": "walkeeper/src/json_ctrl.rs", "rank": 82, "score": 156320.34748985805 }, { "content": "/// Adds the new checkpoint files as an upload sync task to the queue.\n\n/// On task failure, it gets retried again from the start a number of times.\n\n///\n\n/// Ensure that the loop is started otherwise the task is never processed.\n\npub fn schedule_timeline_checkpoint_upload(\n\n tenant_id: ZTenantId,\n\n timeline_id: ZTimelineId,\n\n layers: Vec<PathBuf>,\n\n metadata: TimelineMetadata,\n\n) {\n\n if layers.is_empty() {\n\n debug!(\"Skipping empty layers upload task\");\n\n return;\n\n }\n\n\n\n if !sync_queue::push(SyncTask::new(\n\n TimelineSyncId(tenant_id, timeline_id),\n\n 0,\n\n SyncKind::Upload(NewCheckpoint { layers, metadata }),\n\n )) {\n\n warn!(\n\n \"Could not send an upload task for tenant {}, timeline {}\",\n\n tenant_id, timeline_id\n\n )\n\n } else {\n\n warn!(\n\n \"Could not send an upload task for tenant {}, timeline {}: the sync queue is not initialized\",\n\n tenant_id, timeline_id\n\n )\n\n }\n\n}\n\n\n", "file_path": "pageserver/src/remote_storage/storage_sync.rs", "rank": 83, "score": 155509.12064569848 }, { "content": "pub fn get_request_param<'a>(\n\n request: &'a Request<Body>,\n\n param_name: &str,\n\n) -> Result<&'a str, ApiError> {\n\n match request.param(param_name) {\n\n Some(arg) => Ok(arg),\n\n None => {\n\n return Err(ApiError::BadRequest(format!(\n\n \"no {} specified in path param\",\n\n param_name\n\n )))\n\n }\n\n }\n\n}\n\n\n", "file_path": "zenith_utils/src/http/request.rs", "rank": 84, "score": 154229.48512095292 }, { "content": "pub fn get_current_timestamp() -> TimestampTz {\n\n const UNIX_EPOCH_JDATE: u64 = 2440588; /* == date2j(1970, 1, 1) */\n\n const POSTGRES_EPOCH_JDATE: u64 = 2451545; /* == date2j(2000, 1, 1) */\n\n const SECS_PER_DAY: u64 = 86400;\n\n const USECS_PER_SEC: u64 = 1000000;\n\n match SystemTime::now().duration_since(SystemTime::UNIX_EPOCH) {\n\n Ok(n) => {\n\n ((n.as_secs() - ((POSTGRES_EPOCH_JDATE - UNIX_EPOCH_JDATE) * SECS_PER_DAY))\n\n * USECS_PER_SEC\n\n + n.subsec_micros() as u64) as i64\n\n }\n\n Err(_) => panic!(\"SystemTime before UNIX EPOCH!\"),\n\n }\n\n}\n\n\n", "file_path": "postgres_ffi/src/xlog_utils.rs", "rank": 85, "score": 150322.91705576458 }, { "content": "/// Immediately terminate the calling process without calling\n\n/// atexit callbacks, C runtime destructors etc. We mainly use\n\n/// this to protect coverage data from concurrent writes.\n\npub fn exit_now(code: u8) {\n\n unsafe { nix::libc::_exit(code as _) };\n\n}\n", "file_path": "zenith_utils/src/shutdown.rs", "rank": 86, "score": 149743.778466007 }, { "content": "///\n\n/// Initialize the virtual file module. This must be called once at page\n\n/// server startup.\n\n///\n\npub fn init(num_slots: usize) {\n\n if OPEN_FILES.set(OpenFiles::new(num_slots)).is_err() {\n\n panic!(\"virtual_file::init called twice\");\n\n }\n\n}\n\n\n\nconst TEST_MAX_FILE_DESCRIPTORS: usize = 10;\n\n\n", "file_path": "pageserver/src/virtual_file.rs", "rank": 87, "score": 149733.816266988 }, { "content": "///\n\n/// Get a handle to the page cache.\n\n///\n\npub fn get() -> &'static PageCache {\n\n //\n\n // In unit tests, page server startup doesn't happen and no one calls\n\n // page_cache::init(). Initialize it here with a tiny cache, so that the\n\n // page cache is usable in unit tests.\n\n //\n\n if cfg!(test) {\n\n PAGE_CACHE.get_or_init(|| PageCache::new(TEST_PAGE_CACHE_SIZE))\n\n } else {\n\n PAGE_CACHE.get().expect(\"page cache not initialized\")\n\n }\n\n}\n\n\n\npub const PAGE_SZ: usize = postgres_ffi::pg_constants::BLCKSZ as usize;\n\nconst MAX_USAGE_COUNT: u8 = 5;\n\n\n\n///\n\n/// CacheKey uniquely identifies a \"thing\" to cache in the page cache.\n\n///\n", "file_path": "pageserver/src/page_cache.rs", "rank": 88, "score": 149733.816266988 }, { "content": "pub fn json_response<T: Serialize>(\n\n status: StatusCode,\n\n data: T,\n\n) -> Result<Response<Body>, ApiError> {\n\n let json = serde_json::to_string(&data).map_err(ApiError::from_err)?;\n\n let response = Response::builder()\n\n .status(status)\n\n .header(header::CONTENT_TYPE, \"application/json\")\n\n .body(Body::from(json))\n\n .map_err(ApiError::from_err)?;\n\n Ok(response)\n\n}\n", "file_path": "zenith_utils/src/http/json.rs", "rank": 89, "score": 147735.5124033803 }, { "content": "/// Can this request be served by zenith redo funcitons\n\n/// or we need to pass it to wal-redo postgres process?\n\nfn can_apply_in_zenith(rec: &ZenithWalRecord) -> bool {\n\n // Currently, we don't have bespoken Rust code to replay any\n\n // Postgres WAL records. But everything else is handled in zenith.\n\n #[allow(clippy::match_like_matches_macro)]\n\n match rec {\n\n ZenithWalRecord::Postgres {\n\n will_init: _,\n\n rec: _,\n\n } => false,\n\n _ => true,\n\n }\n\n}\n\n\n", "file_path": "pageserver/src/walredo.rs", "rank": 90, "score": 147160.21408284377 }, { "content": "// Wait for walreceiver to stop\n\n// Now it stops when pageserver shutdown is requested.\n\n// In future we can make this more granular and send shutdown signals\n\n// per tenant/timeline to cancel inactive walreceivers.\n\n// TODO deal with blocking pg connections\n\npub fn stop_wal_receiver(timelineid: ZTimelineId) {\n\n let mut receivers = WAL_RECEIVERS.lock();\n\n\n\n if let Some(r) = receivers.get_mut(&timelineid) {\n\n match r.wal_receiver_interrupt_sender.take() {\n\n Some(s) => {\n\n if s.send(()).is_err() {\n\n warn!(\"wal receiver interrupt signal already sent\");\n\n }\n\n }\n\n None => {\n\n warn!(\"wal_receiver_interrupt_sender is missing, wal recever shouldn't be running\")\n\n }\n\n }\n\n\n\n info!(\"waiting for wal receiver to stop\");\n\n let handle = r.wal_receiver_handle.take();\n\n // do not hold the lock while joining the handle (deadlock is possible otherwise)\n\n drop(receivers);\n\n // there is no timeout or try_join option available so in case of a bug this can hang forever\n\n handle.map(JoinHandle::join);\n\n }\n\n}\n\n\n", "file_path": "pageserver/src/walreceiver.rs", "rank": 91, "score": 145827.24820179964 }, { "content": "pub fn thread_main(conf: SafeKeeperConf) {\n\n // Create a new thread pool\n\n //\n\n // FIXME: keep it single-threaded for now, make it easier to debug with gdb,\n\n // and we're not concerned with performance yet.\n\n //let runtime = runtime::Runtime::new().unwrap();\n\n let runtime = runtime::Builder::new_current_thread()\n\n .enable_all()\n\n .build()\n\n .unwrap();\n\n\n\n info!(\"Starting S3 offload task\");\n\n\n\n runtime.block_on(async {\n\n main_loop(&conf).await.unwrap();\n\n });\n\n}\n\n\n\nasync fn offload_files(\n\n bucket: &Bucket,\n", "file_path": "walkeeper/src/s3_offload.rs", "rank": 92, "score": 145827.24820179964 }, { "content": "fn mx_offset_to_member_page(xid: u32) -> u32 {\n\n xid / pg_constants::MULTIXACT_MEMBERS_PER_PAGE as u32\n\n}\n\n\n", "file_path": "postgres_ffi/src/nonrelfile_utils.rs", "rank": 93, "score": 145393.26421226608 }, { "content": "pub fn connection_address(config: &Config) -> String {\n\n let (host, port) = connection_host_port(config);\n\n format!(\"{}:{}\", host, port)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_connection_host_port() {\n\n let config: Config = \"postgresql://no_user@localhost:64000/no_db\"\n\n .parse()\n\n .unwrap();\n\n assert_eq!(\n\n connection_host_port(&config),\n\n (\"localhost\".to_owned(), 64000)\n\n );\n\n }\n\n\n", "file_path": "zenith_utils/src/connstring.rs", "rank": 94, "score": 143801.07139074258 }, { "content": "fn parse_cmd(cmd: &str) -> Result<SafekeeperPostgresCommand> {\n\n if cmd.starts_with(\"START_WAL_PUSH\") {\n\n let re = Regex::new(r\"START_WAL_PUSH(?: (.+))?\").unwrap();\n\n\n\n let caps = re.captures(cmd).unwrap();\n\n let pageserver_connstr = caps.get(1).map(|m| m.as_str().to_owned());\n\n Ok(SafekeeperPostgresCommand::StartWalPush { pageserver_connstr })\n\n } else if cmd.starts_with(\"START_REPLICATION\") {\n\n let re =\n\n Regex::new(r\"START_REPLICATION(?: PHYSICAL)? ([[:xdigit:]]+/[[:xdigit:]]+)\").unwrap();\n\n let mut caps = re.captures_iter(cmd);\n\n let start_lsn = caps\n\n .next()\n\n .map(|cap| cap[1].parse::<Lsn>())\n\n .ok_or_else(|| anyhow!(\"failed to parse start LSN from START_REPLICATION command\"))??;\n\n Ok(SafekeeperPostgresCommand::StartReplication { start_lsn })\n\n } else if cmd.starts_with(\"IDENTIFY_SYSTEM\") {\n\n Ok(SafekeeperPostgresCommand::IdentifySystem)\n\n } else if cmd.starts_with(\"JSON_CTRL\") {\n\n let cmd = cmd\n", "file_path": "walkeeper/src/handler.rs", "rank": 95, "score": 142952.3947620097 }, { "content": "fn archive_name(disk_consistent_lsn: Lsn, header_size: u64) -> String {\n\n let archive_name = format!(\n\n \"{:016X}{ARCHIVE_EXTENSION}{}\",\n\n u64::from(disk_consistent_lsn),\n\n header_size,\n\n ARCHIVE_EXTENSION = ARCHIVE_EXTENSION,\n\n );\n\n archive_name\n\n}\n\n\n\nasync fn uncompress_with_header(\n\n files_to_skip: &BTreeSet<PathBuf>,\n\n destination_dir: &Path,\n\n header: ArchiveHeader,\n\n archive_after_header: impl io::AsyncRead + Send + Sync + Unpin,\n\n) -> anyhow::Result<()> {\n\n debug!(\"Uncompressing archive into {}\", destination_dir.display());\n\n let mut archive = ZstdDecoder::new(io::BufReader::new(archive_after_header));\n\n\n\n if !destination_dir.exists() {\n", "file_path": "pageserver/src/remote_storage/storage_sync/compression.rs", "rank": 96, "score": 142719.7608083233 }, { "content": "pub fn wait_for_tenant_threads_to_stop(tenantid: ZTenantId) {\n\n let mut handles = TENANT_HANDLES.lock().unwrap();\n\n if let Some(h) = handles.get_mut(&tenantid) {\n\n h.checkpointer_handle.take().map(JoinHandle::join);\n\n trace!(\"checkpointer for tenant {} has stopped\", tenantid);\n\n h.gc_handle.take().map(JoinHandle::join);\n\n trace!(\"gc for tenant {} has stopped\", tenantid);\n\n }\n\n handles.remove(&tenantid);\n\n}\n\n\n", "file_path": "pageserver/src/tenant_threads.rs", "rank": 97, "score": 142257.54623546673 }, { "content": "/// Dump contents of a layer file to stdout.\n\npub fn dump_layerfile_from_path(path: &Path) -> Result<()> {\n\n let file = File::open(path)?;\n\n let book = Book::new(file)?;\n\n\n\n match book.magic() {\n\n delta_layer::DELTA_FILE_MAGIC => {\n\n DeltaLayer::new_for_path(path, &book)?.dump()?;\n\n }\n\n image_layer::IMAGE_FILE_MAGIC => {\n\n ImageLayer::new_for_path(path, &book)?.dump()?;\n\n }\n\n magic => bail!(\"unrecognized magic identifier: {:?}\", magic),\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "pageserver/src/layered_repository.rs", "rank": 98, "score": 141892.80718916195 }, { "content": "///\n\n/// Initialize the page cache. This must be called once at page server startup.\n\n///\n\npub fn init(conf: &'static PageServerConf) {\n\n if PAGE_CACHE\n\n .set(PageCache::new(conf.page_cache_size))\n\n .is_err()\n\n {\n\n panic!(\"page cache already initialized\");\n\n }\n\n}\n\n\n", "file_path": "pageserver/src/page_cache.rs", "rank": 99, "score": 141892.80718916195 } ]
Rust
src/main.rs
dbrgn/galerio
1fe5984f36f8362aca36aa03541f73322fb943c8
use std::{ fs, io::{self, Write}, path::{Path, PathBuf}, time::Instant, }; use anyhow::{anyhow, Result}; use exif::{In as IdfNum, Reader as ExifReader, Tag as ExifTag, Value as ExifValue}; use image::{self, imageops::FilterType, GenericImageView, ImageFormat}; use lazy_static::lazy_static; use serde::Serialize; use structopt::StructOpt; use tera::Tera; const NAME: &str = "galerio"; const VERSION: &str = env!("CARGO_PKG_VERSION"); lazy_static! { static ref START_TIME: Instant = Instant::now(); } fn log(msg: &str) { let start_time = *START_TIME; let elapsed = Instant::now().duration_since(start_time).as_millis(); println!("[+{:>4}ms] {}", elapsed, msg); } macro_rules! log { ($($arg:tt)*) => { log(&format!($($arg)*)); } } #[derive(Debug, StructOpt)] #[structopt(name = NAME)] struct Args { #[structopt(parse(from_os_str))] input_dir: PathBuf, #[structopt(parse(from_os_str))] output_dir: PathBuf, title: String, #[structopt(short = "h", long = "height", default_value = "300")] thumbnail_height: u32, #[structopt(short = "l", long = "max-large-size")] max_large_size: Option<u32>, #[structopt(long = "no-download")] no_download: bool, #[structopt(long)] skip_processing: bool, } #[derive(Serialize)] struct Image { filename_full: String, filename_thumb: String, } #[derive(Serialize)] struct Context { title: String, galerio_version: &'static str, isodate: String, download_filename: Option<String>, images: Vec<Image>, } fn get_dimensions(image_path: impl AsRef<Path>) -> Result<(u32, u32)> { let img = image::open(image_path)?; Ok(img.dimensions()) } fn resize_image( image_path: impl AsRef<Path>, max_width: u32, max_height: u32, orientation: &Orientation, ) -> Result<Vec<u8>> { let img = image::open(image_path)?; let resized = match orientation { Orientation::Deg0 => img, Orientation::Deg90 => img.rotate270(), Orientation::Deg180 => img.rotate180(), Orientation::Deg270 => img.rotate90(), } .resize(max_width, max_height, FilterType::CatmullRom); let mut buf = Vec::new(); resized.write_to(&mut buf, ImageFormat::Jpeg)?; Ok(buf) } #[derive(Copy, Clone, Debug, PartialEq, Eq)] enum Orientation { Deg0, Deg90, Deg180, Deg270, } fn get_orientation(image_path: impl AsRef<Path>) -> Result<Orientation> { let file = fs::File::open(&image_path)?; let orientation = ExifReader::new() .read_from_container(&mut std::io::BufReader::new(&file))? .get_field(ExifTag::Orientation, IdfNum::PRIMARY) .map(|field| field.value.clone()) .and_then(|val: ExifValue| { if let ExifValue::Short(data) = val { data.get(0).cloned() } else { None } }) .map(|orientation| match orientation { 1 => Orientation::Deg0, 8 => Orientation::Deg90, 3 => Orientation::Deg180, 6 => Orientation::Deg270, _ => Orientation::Deg0, }); Ok(orientation.unwrap_or(Orientation::Deg0)) } fn main() -> Result<()> { let args = Args::from_args(); log!("Starting..."); if !args.input_dir.exists() { return Err(anyhow!("Input directory does not exist")); } if !args.input_dir.is_dir() { return Err(anyhow!("Input directory path is not a directory")); } if !args.output_dir.exists() { log!("Creating output directory {:?}", args.output_dir); fs::create_dir_all(&args.output_dir)?; } log!("Input dir: {:?}", args.input_dir); log!("Output dir: {:?}", args.output_dir); let mut image_files = fs::read_dir(&args.input_dir)? .filter_map(|res| res.ok()) .filter(|dir_entry| { dir_entry .file_type() .map(|ft| ft.is_file()) .unwrap_or(false) }) .filter(|dir_entry| { dir_entry .file_name() .to_str() .map(|s| s.ends_with(".jpg") || s.ends_with(".JPG")) .unwrap_or(false) }) .map(|dir_entry| dir_entry.path()) .collect::<Vec<_>>(); image_files.sort(); let download_filename = if args.no_download { None } else { let name: String = args .title .chars() .map(|c| if c == ' ' { '_' } else { c }) .filter(|c| c.is_ascii_alphanumeric() || *c == '-' || *c == '_' || *c == '.') .collect(); Some(format!("{}.zip", name)) }; let mut images = Vec::with_capacity(image_files.len()); let mut zipfile = download_filename .as_ref() .and_then(|filename| Some(fs::File::create(args.output_dir.join(filename)).unwrap())) .map(zip::ZipWriter::new); for f in &image_files { let filename_full = f.file_name().unwrap().to_str().unwrap().to_string(); let filename_thumb = format!( "{}.thumb.jpg", f.file_stem() .and_then(|stem| stem.to_str()) .ok_or_else(|| anyhow!("Could not determine file stem for file {:?}", f))?, ); if !args.skip_processing { log!("Processing {:?}", filename_full); let orientation = get_orientation(&f)?; let thumbnail_bytes = resize_image( &f, args.thumbnail_height * 4, args.thumbnail_height, &orientation, )?; let thumbnail_path = args.output_dir.join(&filename_thumb); fs::write(thumbnail_path, thumbnail_bytes)?; let full_path = args.output_dir.join(&filename_full); if let Some(max_size) = args.max_large_size { let (w, h) = get_dimensions(&f)?; if w > max_size || h > max_size { let large_bytes = resize_image(&f, max_size, max_size, &orientation)?; fs::write(&full_path, large_bytes)?; } else { fs::copy(&f, &full_path)?; } } else { fs::copy(&f, &full_path)?; } let options = zip::write::FileOptions::default() .compression_method(zip::CompressionMethod::Stored); if let Some(ref mut zipwriter) = zipfile { zipwriter.start_file(&filename_full, options)?; zipwriter.write(&fs::read(&full_path)?)?; } } images.push(Image { filename_full, filename_thumb, }); } let context = Context { title: args.title.clone(), galerio_version: VERSION, images, download_filename, isodate: chrono::Utc::now().to_rfc3339(), }; let tera = Tera::new("templates/**/*.html")?; let rendered = tera.render("index.html", &tera::Context::from_serialize(&context)?)?; log!("Writing index.html"); fs::write(args.output_dir.join("index.html"), rendered)?; log!("Writing static files"); fs::create_dir(args.output_dir.join("static")).or_else(|e| { if e.kind() == io::ErrorKind::AlreadyExists { Ok(()) } else { Err(e) } })?; fs::write( args.output_dir.join("static/simple-lightbox.min.js"), include_bytes!("../static/simple-lightbox.min.js"), )?; fs::write( args.output_dir.join("static/simple-lightbox.min.css"), include_bytes!("../static/simple-lightbox.min.css"), )?; log!("Done!"); Ok(()) }
use std::{ fs, io::{self, Write}, path::{Path, PathBuf}, time::Instant, }; use anyhow::{anyhow, Result}; use exif::{In as IdfNum, Reader as ExifReader, Tag as ExifTag, Value as ExifValue}; use image::{self, imageops::FilterType, GenericImageView, ImageFormat}; use lazy_static::lazy_static; use serde::Serialize; use structopt::StructOpt; use tera::Tera; const NAME: &str = "galerio"; const VERSION: &str = env!("CARGO_PKG_VERSION"); lazy_static! { static ref START_TIME: Instant = Instant::now(); } fn log(msg: &str) { let start_time = *START_TIME; let elapsed = Instant::now().duration_since(start_time).as_millis(); println!("[+{:>4}ms] {}", elapsed, msg); } macro_rules! log { ($($arg:tt)*) => { log(&format!($($arg)*)); } } #[derive(Debug, StructOpt)] #[structopt(name = NAME)] struct Args { #[structopt(parse(from_os_str))] input_dir: PathBuf, #[structopt(parse(from_os_str))] output_dir: PathBuf, title: String, #[structopt(short = "h", long = "height", default_value = "300")] thumbnail_height: u32, #[structopt(short = "l", long = "max-large-size")] max_large_size: Option<u32>, #[structopt(long = "no-download")] no_download: bool, #[structopt(long)] skip_processing: bool, } #[derive(Serialize)] struct Image { filename_full: String, filename_thumb: String, } #[derive(Serialize)] struct Context { title: String, galerio_version: &'static str, isodate: String, download_filename: Option<String>, images: Vec<Image>, } fn get_dimensions(image_path: impl AsRef<Path>) -> Result<(u32, u32)> { let img = image::open(image_path)?; Ok(img.dimensions()) } fn resize_image( image_path: impl AsRef<Path>, max_width: u32, max_height: u32, orientation: &Orientation, ) -> Result<Vec<u8>> { let img = image::open(image_path)?; let resized = match orientation { Orientation::Deg0 => img, Orientation::Deg90 => img.rotate270(), Orientation::Deg180 => img.rotate180(), Orientation::Deg270 => img.rotate90(), } .resize(max_width, max_height, FilterType::CatmullRom); let mut buf = Vec::new(); resized.write_to(&mut buf, ImageFormat::Jpeg)?; Ok(buf) } #[derive(Copy, Clone, Debug, PartialEq, Eq)] enum Orientation { Deg0, Deg90, Deg180, Deg270, } fn get_orientation(image_path: impl AsRef<Path>) -> Result<Orientation> { let file = fs::File::open(&image_path)?; let orientation = ExifReader::new() .read_from_container(&mut std::io::BufReader::new(&file))? .get_field(ExifTag::Orientation, IdfNum::PRIMARY) .map(|field| field.value.clone()) .and_then(|val: ExifValue| { if let ExifValue::Short(data) = val { data.get(0).cloned() } else { None } }) .map(|
let full_path = args.output_dir.join(&filename_full); if let Some(max_size) = args.max_large_size { let (w, h) = get_dimensions(&f)?; if w > max_size || h > max_size { let large_bytes = resize_image(&f, max_size, max_size, &orientation)?; fs::write(&full_path, large_bytes)?; } else { fs::copy(&f, &full_path)?; } } else { fs::copy(&f, &full_path)?; } let options = zip::write::FileOptions::default() .compression_method(zip::CompressionMethod::Stored); if let Some(ref mut zipwriter) = zipfile { zipwriter.start_file(&filename_full, options)?; zipwriter.write(&fs::read(&full_path)?)?; } } images.push(Image { filename_full, filename_thumb, }); } let context = Context { title: args.title.clone(), galerio_version: VERSION, images, download_filename, isodate: chrono::Utc::now().to_rfc3339(), }; let tera = Tera::new("templates/**/*.html")?; let rendered = tera.render("index.html", &tera::Context::from_serialize(&context)?)?; log!("Writing index.html"); fs::write(args.output_dir.join("index.html"), rendered)?; log!("Writing static files"); fs::create_dir(args.output_dir.join("static")).or_else(|e| { if e.kind() == io::ErrorKind::AlreadyExists { Ok(()) } else { Err(e) } })?; fs::write( args.output_dir.join("static/simple-lightbox.min.js"), include_bytes!("../static/simple-lightbox.min.js"), )?; fs::write( args.output_dir.join("static/simple-lightbox.min.css"), include_bytes!("../static/simple-lightbox.min.css"), )?; log!("Done!"); Ok(()) }
orientation| match orientation { 1 => Orientation::Deg0, 8 => Orientation::Deg90, 3 => Orientation::Deg180, 6 => Orientation::Deg270, _ => Orientation::Deg0, }); Ok(orientation.unwrap_or(Orientation::Deg0)) } fn main() -> Result<()> { let args = Args::from_args(); log!("Starting..."); if !args.input_dir.exists() { return Err(anyhow!("Input directory does not exist")); } if !args.input_dir.is_dir() { return Err(anyhow!("Input directory path is not a directory")); } if !args.output_dir.exists() { log!("Creating output directory {:?}", args.output_dir); fs::create_dir_all(&args.output_dir)?; } log!("Input dir: {:?}", args.input_dir); log!("Output dir: {:?}", args.output_dir); let mut image_files = fs::read_dir(&args.input_dir)? .filter_map(|res| res.ok()) .filter(|dir_entry| { dir_entry .file_type() .map(|ft| ft.is_file()) .unwrap_or(false) }) .filter(|dir_entry| { dir_entry .file_name() .to_str() .map(|s| s.ends_with(".jpg") || s.ends_with(".JPG")) .unwrap_or(false) }) .map(|dir_entry| dir_entry.path()) .collect::<Vec<_>>(); image_files.sort(); let download_filename = if args.no_download { None } else { let name: String = args .title .chars() .map(|c| if c == ' ' { '_' } else { c }) .filter(|c| c.is_ascii_alphanumeric() || *c == '-' || *c == '_' || *c == '.') .collect(); Some(format!("{}.zip", name)) }; let mut images = Vec::with_capacity(image_files.len()); let mut zipfile = download_filename .as_ref() .and_then(|filename| Some(fs::File::create(args.output_dir.join(filename)).unwrap())) .map(zip::ZipWriter::new); for f in &image_files { let filename_full = f.file_name().unwrap().to_str().unwrap().to_string(); let filename_thumb = format!( "{}.thumb.jpg", f.file_stem() .and_then(|stem| stem.to_str()) .ok_or_else(|| anyhow!("Could not determine file stem for file {:?}", f))?, ); if !args.skip_processing { log!("Processing {:?}", filename_full); let orientation = get_orientation(&f)?; let thumbnail_bytes = resize_image( &f, args.thumbnail_height * 4, args.thumbnail_height, &orientation, )?; let thumbnail_path = args.output_dir.join(&filename_thumb); fs::write(thumbnail_path, thumbnail_bytes)?;
random
[ { "content": "# Galerio\n\n\n\nGalerio is a simple generator for HTML flexbox galleries written in Rust. From\n\na directory with JPEG files, it generates a self-contained gallery without\n\nexternal dependencies.\n\n\n\n## Features\n\n\n\n- Simple CSS3/Flexbox based gallery\n\n- Touch friendly lightbox for viewing images in full screen\n\n- ZIP download of entire gallery (can be turned off)\n\n\n\n## Building\n\n\n\n cargo build --release\n\n\n\nNote: Make sure to create a release build, otherwise processing will be insanely slow.\n\n\n\n## Usage\n\n\n\n galerio 0.1.0\n\n\n\n USAGE:\n\n galerio [FLAGS] [OPTIONS] <input-dir> <output-dir> <title>\n\n\n\n FLAGS:\n\n --help Prints help information\n\n --no-download Disallow full gallery download as ZIP\n\n --skip-processing Skip processing image files\n\n -V, --version Prints version information\n\n\n\n OPTIONS:\n\n -h, --height <thumbnail-height> Max thumbnail height in pixels [default: 300]\n\n\n\n ARGS:\n\n <input-dir> Input directory\n\n <output-dir> Output directory\n\n <title> Gallery title\n\n\n\nExamle:\n\n\n\n galerio /home/user/pictures/switzerland2020/ /srv/www/galleries/switzerland2020/ \"Switzerland 2020\"\n\n\n\n\n\n## License\n\n\n\nLicensed under either of\n\n\n\n * Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or\n\n http://www.apache.org/licenses/LICENSE-2.0)\n\n * MIT license ([LICENSE-MIT](LICENSE-MIT) or\n\n http://opensource.org/licenses/MIT) at your option.\n\n\n\n### Contribution\n\n\n\nUnless you explicitly state otherwise, any contribution intentionally submitted\n\nfor inclusion in the work by you, as defined in the Apache-2.0 license, shall\n\nbe dual licensed as above, without any additional terms or conditions.\n", "file_path": "README.md", "rank": 9, "score": 16871.2103773781 } ]
Rust
oxidizer-entity-macro/src/utils.rs
TylerLafayette/oxidizer
c59ce9a50243f0eb35203d2d72fbc9ff36cb5afd
use proc_macro2::TokenStream; use quote::{quote, quote_spanned}; use syn::{ spanned::Spanned, AngleBracketedGenericArguments, Field, GenericArgument, Meta, Path, PathArguments, PathSegment, Type, TypePath, }; pub fn iterate_angle_bracketed( ab: &AngleBracketedGenericArguments, expected: &Vec<String>, index: usize, ) -> bool { let index = index; if expected.len() == index { return true; } for arg in &ab.args { let res = match arg { GenericArgument::Type(Type::Path(tp)) => check_type_order(tp, expected, index), _ => unimplemented!(), }; if res { return true; } } false } pub fn iterate_path_arguments(seg: &PathSegment, expected: &Vec<String>, index: usize) -> bool { let mut index = index; if expected.len() == index { return true; } if seg.ident.to_string() == expected[index] { index += 1; } if expected.len() == index { return true; } match &seg.arguments { PathArguments::AngleBracketed(angle) => iterate_angle_bracketed(angle, expected, index), PathArguments::Parenthesized(_paren) => unimplemented!(), PathArguments::None => expected.len() == index, } } pub fn iterate_path_segments(p: &Path, expected: &Vec<String>, index: usize) -> bool { let index = index; if expected.len() == index { return true; } for seg in p.segments.iter() { if iterate_path_arguments(seg, &expected, index) { return true; } } expected.len() == index } pub fn check_type_order(p: &TypePath, expected: &Vec<String>, index: usize) -> bool { let mut index = index; if expected.len() == index { return true; } if let Some(ident) = p.path.get_ident() { if ident.to_string() == expected[0] { index += 1; } } iterate_path_segments(&p.path, expected, index) } pub fn is_typed_with(segment: &PathSegment, expected: Vec<&str>) -> bool { let expected = expected.iter().map(|v| v.to_string()).collect(); iterate_path_arguments(segment, &expected, 0) } pub fn is_chrono_option(segment: &PathSegment) -> bool { let expected: Vec<&str> = vec!["Option", "DateTime", "Utc"]; let no_option_expected: Vec<&str> = vec!["DateTime", "Utc"]; is_typed_with(segment, expected) || is_typed_with(segment, no_option_expected) } pub fn search_attr_in_field(field: &Field, attr: &str) -> bool { for option in (&field.attrs).into_iter() { let option = option.parse_meta().unwrap(); match option { Meta::Path(path) if path.get_ident().unwrap().to_string() == attr => { return true; } _ => {} } } return false; } pub fn type_to_db_type(ty: &Type) -> TokenStream { let segments = match ty { syn::Type::Path(TypePath { path: Path { segments, .. }, .. }) => segments, _ => unimplemented!(), }; match segments.first().unwrap() { PathSegment { ident, .. } if ident.to_string() == "String" => { quote! { oxidizer::types::text() } } segment if is_typed_with(segment, vec!["Option", "String"]) => { quote! { oxidizer::types::text() } } PathSegment { ident, .. } if ident.to_string() == "i8" => { quote! { oxidizer::types::custom("char") } } segment if is_typed_with(segment, vec!["Option", "i8"]) => { quote! { oxidizer::types::text() } } PathSegment { ident, .. } if ident.to_string() == "i16" => { quote! { oxidizer::types::custom("SMALLINT") } } segment if is_typed_with(segment, vec!["Option", "i16"]) => { quote! { oxidizer::types::text() } } PathSegment { ident, .. } if ident.to_string() == "i32" => { quote! { oxidizer::types::integer() } } segment if is_typed_with(segment, vec!["Option", "i32"]) => { quote! { oxidizer::types::text() } } PathSegment { ident, .. } if ident.to_string() == "u32" => { quote! { oxidizer::types::custom("OID") } } segment if is_typed_with(segment, vec!["Option", "u32"]) => { quote! { oxidizer::types::text() } } PathSegment { ident, .. } if ident.to_string() == "i64" => { quote! { oxidizer::types::custom("BIGINT") } } segment if is_typed_with(segment, vec!["Option", "i64"]) => { quote! { oxidizer::types::text() } } PathSegment { ident, .. } if ident.to_string() == "f32" => { quote! { oxidizer::types::custom("REAL") } } segment if is_typed_with(segment, vec!["Option", "f32"]) => { quote! { oxidizer::types::text() } } PathSegment { ident, .. } if ident.to_string() == "f64" => { quote! { oxidizer::types::custom("DOUBLE PRECISION") } } segment if is_typed_with(segment, vec!["Option", "f64"]) => { quote! { oxidizer::types::text() } } PathSegment { ident, .. } if ident.to_string() == "bool" => { quote! { oxidizer::types::boolean() } } segment if is_typed_with(segment, vec!["Option", "bool"]) => { quote! { oxidizer::types::text() } } segment if is_chrono_option(segment) => { quote! { oxidizer::types::custom("timestamp with time zone") } } _ => quote_spanned! { ty.span() => compile_error!("Invalid type") }, } }
use proc_macro2::TokenStream; use quote::{quote, quote_spanned}; use syn::{ spanned::Spanned, AngleBracketedGenericArguments, Field, GenericArgument, Meta, Path, PathArguments, PathSegment, Type, TypePath, }; pub fn iterate_angle_bracketed( ab: &AngleBracketedGenericArguments, expected: &Vec<String>, index: usize, ) -> bool { let index = index; if expected.len() == index { return true; } for arg in &ab.args { let res = match arg { GenericArgument::Type(Type::Path(tp
egments.iter() { if iterate_path_arguments(seg, &expected, index) { return true; } } expected.len() == index } pub fn check_type_order(p: &TypePath, expected: &Vec<String>, index: usize) -> bool { let mut index = index; if expected.len() == index { return true; } if let Some(ident) = p.path.get_ident() { if ident.to_string() == expected[0] { index += 1; } } iterate_path_segments(&p.path, expected, index) } pub fn is_typed_with(segment: &PathSegment, expected: Vec<&str>) -> bool { let expected = expected.iter().map(|v| v.to_string()).collect(); iterate_path_arguments(segment, &expected, 0) } pub fn is_chrono_option(segment: &PathSegment) -> bool { let expected: Vec<&str> = vec!["Option", "DateTime", "Utc"]; let no_option_expected: Vec<&str> = vec!["DateTime", "Utc"]; is_typed_with(segment, expected) || is_typed_with(segment, no_option_expected) } pub fn search_attr_in_field(field: &Field, attr: &str) -> bool { for option in (&field.attrs).into_iter() { let option = option.parse_meta().unwrap(); match option { Meta::Path(path) if path.get_ident().unwrap().to_string() == attr => { return true; } _ => {} } } return false; } pub fn type_to_db_type(ty: &Type) -> TokenStream { let segments = match ty { syn::Type::Path(TypePath { path: Path { segments, .. }, .. }) => segments, _ => unimplemented!(), }; match segments.first().unwrap() { PathSegment { ident, .. } if ident.to_string() == "String" => { quote! { oxidizer::types::text() } } segment if is_typed_with(segment, vec!["Option", "String"]) => { quote! { oxidizer::types::text() } } PathSegment { ident, .. } if ident.to_string() == "i8" => { quote! { oxidizer::types::custom("char") } } segment if is_typed_with(segment, vec!["Option", "i8"]) => { quote! { oxidizer::types::text() } } PathSegment { ident, .. } if ident.to_string() == "i16" => { quote! { oxidizer::types::custom("SMALLINT") } } segment if is_typed_with(segment, vec!["Option", "i16"]) => { quote! { oxidizer::types::text() } } PathSegment { ident, .. } if ident.to_string() == "i32" => { quote! { oxidizer::types::integer() } } segment if is_typed_with(segment, vec!["Option", "i32"]) => { quote! { oxidizer::types::text() } } PathSegment { ident, .. } if ident.to_string() == "u32" => { quote! { oxidizer::types::custom("OID") } } segment if is_typed_with(segment, vec!["Option", "u32"]) => { quote! { oxidizer::types::text() } } PathSegment { ident, .. } if ident.to_string() == "i64" => { quote! { oxidizer::types::custom("BIGINT") } } segment if is_typed_with(segment, vec!["Option", "i64"]) => { quote! { oxidizer::types::text() } } PathSegment { ident, .. } if ident.to_string() == "f32" => { quote! { oxidizer::types::custom("REAL") } } segment if is_typed_with(segment, vec!["Option", "f32"]) => { quote! { oxidizer::types::text() } } PathSegment { ident, .. } if ident.to_string() == "f64" => { quote! { oxidizer::types::custom("DOUBLE PRECISION") } } segment if is_typed_with(segment, vec!["Option", "f64"]) => { quote! { oxidizer::types::text() } } PathSegment { ident, .. } if ident.to_string() == "bool" => { quote! { oxidizer::types::boolean() } } segment if is_typed_with(segment, vec!["Option", "bool"]) => { quote! { oxidizer::types::text() } } segment if is_chrono_option(segment) => { quote! { oxidizer::types::custom("timestamp with time zone") } } _ => quote_spanned! { ty.span() => compile_error!("Invalid type") }, } }
)) => check_type_order(tp, expected, index), _ => unimplemented!(), }; if res { return true; } } false } pub fn iterate_path_arguments(seg: &PathSegment, expected: &Vec<String>, index: usize) -> bool { let mut index = index; if expected.len() == index { return true; } if seg.ident.to_string() == expected[index] { index += 1; } if expected.len() == index { return true; } match &seg.arguments { PathArguments::AngleBracketed(angle) => iterate_angle_bracketed(angle, expected, index), PathArguments::Parenthesized(_paren) => unimplemented!(), PathArguments::None => expected.len() == index, } } pub fn iterate_path_segments(p: &Path, expected: &Vec<String>, index: usize) -> bool { let index = index; if expected.len() == index { return true; } for seg in p.s
random
[ { "content": "type GetFieldsIter<'a> = std::iter::Filter<syn::punctuated::Iter<'a, Field>, fn(&&Field) -> bool>;\n\n\n\nimpl Props {\n\n pub fn new(\n\n input: DeriveInput,\n\n attrs: Option<EntityAttr>,\n\n indexes: Vec<IndexAttr>,\n\n has_many_attrs: Vec<HasManyAttr>,\n\n ) -> Self {\n\n Props {\n\n input: input,\n\n attrs: attrs,\n\n indexes: indexes,\n\n has_many_attrs: has_many_attrs,\n\n }\n\n }\n\n\n\n pub fn get_name(&self) -> &Ident {\n\n &self.input.ident\n\n }\n", "file_path": "oxidizer-entity-macro/src/props.rs", "rank": 3, "score": 138554.7077884895 }, { "content": "pub trait FieldExtras {\n\n fn is_primary_key(&self) -> bool;\n\n fn is_indexed(&self) -> bool;\n\n fn is_nullable(&self) -> bool;\n\n fn is_ignore(&self) -> bool;\n\n fn parse_relation(&self) -> Option<RelationAttr>;\n\n fn parse_custom_type(&self) -> Option<CustomTypeAttr>;\n\n fn get_db_type(&self) -> TokenStream2;\n\n fn get_type(&self) -> TokenStream2;\n\n}\n\n\n\nimpl FieldExtras for Field {\n\n fn is_primary_key(&self) -> bool {\n\n search_attr_in_field(self, \"primary_key\")\n\n }\n\n\n\n fn is_indexed(&self) -> bool {\n\n search_attr_in_field(self, \"indexed\")\n\n }\n\n\n", "file_path": "oxidizer-entity-macro/src/field_extras.rs", "rank": 9, "score": 65339.66516953744 }, { "content": "pub fn entity_macro(item: TokenStream) -> TokenStream {\n\n entity_builder::EntityBuilder::new().build(item)\n\n}\n", "file_path": "oxidizer-entity-macro/src/lib.rs", "rank": 10, "score": 58704.8322067707 }, { "content": "fn main() {}\n\n\n\nmod test {\n\n use super::*;\n\n\n\n // #[tokio::test]\n\n // async fn test_abc() {\n\n // let mut abc = ABC::default();\n\n // }|\n\n}\n", "file_path": "oxidizer-tests/src/main.rs", "rank": 11, "score": 40370.83869437498 }, { "content": "#[async_trait]\n\npub trait IEntity: Sized {\n\n async fn save(&mut self, db: &DB) -> DBResult<bool>;\n\n async fn delete(&mut self, db: &DB) -> DBResult<bool>;\n\n\n\n fn is_synced_with_db(&self) -> bool;\n\n\n\n fn from_row(row: &Row) -> DBResult<Self>;\n\n fn create_migration() -> DBResult<Migration>;\n\n fn get_table_name() -> String;\n\n\n\n async fn find(\n\n db: &DB,\n\n query: &str,\n\n params: &'_ [&'_ (dyn ToSql + Sync)],\n\n ) -> DBResult<Vec<Self>>;\n\n async fn first(\n\n db: &DB,\n\n query: &str,\n\n params: &'_ [&'_ (dyn ToSql + Sync)],\n\n ) -> DBResult<Option<Self>>;\n\n}\n", "file_path": "oxidizer/src/entity.rs", "rank": 12, "score": 36778.46919723596 }, { "content": "use darling::FromMeta;\n\nuse proc_macro2::TokenStream as TokenStream2;\n\nuse quote::{format_ident, quote, quote_spanned};\n\nuse syn::{spanned::Spanned, Field, Meta, Path, PathSegment, Type, TypePath};\n\n\n\nuse super::attrs::{CustomTypeAttr, RelationAttr};\n\nuse super::utils::search_attr_in_field;\n\nuse super::utils::type_to_db_type;\n\nuse super::utils::{check_type_order, iterate_path_arguments};\n\n\n", "file_path": "oxidizer-entity-macro/src/field_extras.rs", "rank": 13, "score": 24714.822613868542 }, { "content": " }\n\n None\n\n }\n\n\n\n fn is_nullable(&self) -> bool {\n\n match &self.ty {\n\n syn::Type::Path(tp) => {\n\n let expected: Vec<String> = vec![\"Option\".to_owned()];\n\n check_type_order(&tp, &expected, 0)\n\n }\n\n _ => false,\n\n }\n\n }\n\n\n\n fn get_type(&self) -> TokenStream2 {\n\n if let Some(ct) = self.parse_custom_type() {\n\n let ty = ct.ty;\n\n\n\n let ident = format_ident!(\"{}\", ty);\n\n\n", "file_path": "oxidizer-entity-macro/src/field_extras.rs", "rank": 14, "score": 24712.661585560087 }, { "content": " fn is_ignore(&self) -> bool {\n\n search_attr_in_field(self, \"field_ignore\")\n\n }\n\n\n\n fn parse_relation(&self) -> Option<RelationAttr> {\n\n for attr in (&self.attrs).into_iter() {\n\n let option = attr.parse_meta().unwrap();\n\n if let Ok(relation) = RelationAttr::from_meta(&option) {\n\n return Some(relation);\n\n }\n\n }\n\n None\n\n }\n\n\n\n fn parse_custom_type(&self) -> Option<CustomTypeAttr> {\n\n for attr in (&self.attrs).into_iter() {\n\n let option = attr.parse_meta().unwrap();\n\n if let Ok(ct) = CustomTypeAttr::from_meta(&option) {\n\n return Some(ct);\n\n }\n", "file_path": "oxidizer-entity-macro/src/field_extras.rs", "rank": 15, "score": 24707.79495117461 }, { "content": " let table_name_acessor = quote! { <#model_ident>::get_table_name() };\n\n\n\n return quote! {\n\n oxidizer::types::foreign(#table_name_acessor, #key)\n\n };\n\n }\n\n\n\n if let Some(ct) = self.parse_custom_type() {\n\n let ty = ct.ty;\n\n\n\n let ty: Type = match syn::parse_str(&ty) {\n\n Ok(t) => t,\n\n Err(_) => return quote_spanned! { ty.span() => compile_error!(\"Invalid type\") },\n\n };\n\n\n\n return type_to_db_type(&ty);\n\n }\n\n\n\n type_to_db_type(&self.ty)\n\n }\n\n}\n", "file_path": "oxidizer-entity-macro/src/field_extras.rs", "rank": 16, "score": 24705.122562614582 }, { "content": " return quote! { #ident };\n\n }\n\n\n\n let ty = &self.ty;\n\n\n\n quote! { #ty }\n\n }\n\n\n\n fn get_db_type(&self) -> TokenStream2 {\n\n if self.is_primary_key() {\n\n return quote! {\n\n oxidizer::types::primary()\n\n };\n\n }\n\n\n\n if let Some(relation) = self.parse_relation() {\n\n let model = relation.model;\n\n let key = relation.key;\n\n\n\n let model_ident = format_ident!(\"{}\", model);\n", "file_path": "oxidizer-entity-macro/src/field_extras.rs", "rank": 17, "score": 24700.339864740166 }, { "content": "#[derive(Default, Entity)]\n\n#[entity(table_name = \"custom2\")]\n\n#[index(name = \"myindex\", columns = \"name, date\", unique)]\n\n#[index(name = \"myindex2\", columns = \"email\", unique)]\n\nstruct TestCustomIndexes {\n\n #[primary_key]\n\n id: i32,\n\n\n\n name: String,\n\n date: String,\n\n email: String,\n\n}\n\n\n\n#[derive(Default, Entity)]\n\npub struct TestReverseRelation {\n\n #[primary_key]\n\n id: i32,\n\n\n\n #[relation(model = \"TestReverseRelationTarget\", key = \"id\")]\n\n entity_id: i32,\n\n}\n\n\n\n#[derive(Default, Entity)]\n\n#[has_many(model = \"TestReverseRelation\", field = \"entity_id\")]\n", "file_path": "oxidizer/src/tests_macro.rs", "rank": 18, "score": 23838.381676458794 }, { "content": " fields.map(|field| field.get_db_type()).collect()\n\n }\n\n\n\n pub fn get_fields_all_db_types(&self) -> Vec<TokenStream2> {\n\n self.build_db_types(self.get_fields_all())\n\n }\n\n\n\n pub fn get_primary_key_field(&self) -> Option<&Field> {\n\n self.get_fields_all().find(|field| field.is_primary_key())\n\n }\n\n\n\n pub fn get_fields_plain(&self) -> Vec<&Field> {\n\n self.get_fields_all()\n\n .filter(|field| {\n\n for option in (&field.attrs).into_iter() {\n\n let option = option.parse_meta().unwrap();\n\n match option {\n\n Meta::Path(path)\n\n if path.get_ident().unwrap().to_string() == \"primary_key\" =>\n\n {\n", "file_path": "oxidizer-entity-macro/src/props.rs", "rank": 20, "score": 15.30034014704787 }, { "content": "\n\n pub fn get_fields_all_types(&self) -> Vec<TokenStream2> {\n\n self.get_fields_all()\n\n .map(|field| field.get_type())\n\n .collect()\n\n }\n\n\n\n pub fn get_fields_all_nullable(&self) -> Vec<bool> {\n\n self.get_fields_all()\n\n .map(|field| field.is_nullable())\n\n .collect()\n\n }\n\n\n\n pub fn get_fields_all_indexed(&self) -> Vec<bool> {\n\n self.get_fields_all()\n\n .map(|field| field.is_indexed())\n\n .collect()\n\n }\n\n\n\n fn build_db_types(&self, fields: GetFieldsIter) -> Vec<TokenStream2> {\n", "file_path": "oxidizer-entity-macro/src/props.rs", "rank": 21, "score": 14.296557572421289 }, { "content": "use inflector::cases::snakecase::to_snake_case;\n\nuse proc_macro::TokenStream;\n\nuse proc_macro2::TokenStream as TokenStream2;\n\nuse quote::{quote, quote_spanned};\n\nuse syn::{\n\n punctuated::Punctuated, token::Comma, Data, DataStruct, DeriveInput, Field, Fields, Ident,\n\n Meta, Type,\n\n};\n\n\n\nuse super::attrs::HasManyAttr;\n\nuse super::attrs::{EntityAttr, IndexAttr};\n\nuse super::field_extras::*;\n\n\n\npub struct Props {\n\n input: DeriveInput,\n\n attrs: Option<EntityAttr>,\n\n indexes: Vec<IndexAttr>,\n\n has_many_attrs: Vec<HasManyAttr>,\n\n}\n\n\n", "file_path": "oxidizer-entity-macro/src/props.rs", "rank": 22, "score": 13.658667599950903 }, { "content": "use darling::FromMeta;\n\n\n\n#[derive(Debug, FromMeta)]\n\npub struct RelationAttr {\n\n pub model: String,\n\n pub key: String,\n\n}\n\n\n\n#[derive(Debug, FromMeta, Clone)]\n\npub struct IndexAttr {\n\n pub name: String,\n\n pub columns: String,\n\n #[darling(default)]\n\n pub unique: bool,\n\n}\n\n\n\n#[derive(Debug, FromMeta, Clone)]\n\npub struct EntityAttr {\n\n pub table_name: Option<String>,\n\n}\n", "file_path": "oxidizer-entity-macro/src/attrs.rs", "rank": 23, "score": 12.581791422941393 }, { "content": "use darling::FromMeta;\n\nuse inflector::cases::snakecase::to_snake_case;\n\nuse proc_macro::TokenStream;\n\nuse proc_macro2::TokenStream as TokenStream2;\n\nuse quote::{format_ident, quote, quote_spanned};\n\nuse syn::{parse_macro_input, spanned::Spanned, DeriveInput, Type};\n\n\n\nuse super::attrs::HasManyAttr;\n\nuse super::attrs::{EntityAttr, IndexAttr};\n\nuse super::field_extras::*;\n\nuse super::props::*;\n\n\n\npub struct EntityBuilder {}\n\n\n\nimpl EntityBuilder {\n\n pub fn new() -> Self {\n\n EntityBuilder {}\n\n }\n\n\n\n fn build_save_fn(&self, props: &Props) -> TokenStream2 {\n", "file_path": "oxidizer-entity-macro/src/entity_builder.rs", "rank": 24, "score": 12.47422643735618 }, { "content": "\n\n pub fn get_table_name(&self) -> String {\n\n let snaked_name = to_snake_case(&self.get_name().to_string());\n\n\n\n match self.attrs.as_ref() {\n\n Some(attrs) => match attrs.table_name.as_ref() {\n\n Some(name) => name.to_string(),\n\n None => snaked_name,\n\n },\n\n None => snaked_name,\n\n }\n\n }\n\n\n\n pub fn get_fields_all(&self) -> GetFieldsIter {\n\n let fields = match &self.input.data {\n\n Data::Struct(DataStruct {\n\n fields: Fields::Named(fields),\n\n ..\n\n }) => &fields.named,\n\n _ => panic!(\"expected a struct with named fields\"),\n", "file_path": "oxidizer-entity-macro/src/props.rs", "rank": 25, "score": 9.763382665067445 }, { "content": "//! #[oxidizer::async_trait]\n\n//! pub trait __AccessorHasManyTargetEntityToEntity {\n\n//! async fn get_all_test_entity(&self, db: &oxidizer::db::DB) -> oxidizer::db::DBResult<Vec<TestManyToMany>>;\n\n//! }\n\n//! ```\n\n//!\n\n//!\n\n\n\npub mod db;\n\npub use db::*;\n\n\n\npub mod entity;\n\npub use entity::*;\n\n\n\npub mod migration;\n\n\n\n/// Re-export of [async_trait::async_trait](https://crates.io/crates/async-trait)\n\npub use async_trait::async_trait;\n\npub use tokio_postgres;\n\npub use tokio_postgres::types as db_types;\n", "file_path": "oxidizer/src/lib.rs", "rank": 26, "score": 9.582462882732115 }, { "content": " };\n\n\n\n fields.iter().filter(|field| !field.is_ignore())\n\n }\n\n\n\n pub fn get_ignored_fields(&self) -> GetFieldsIter {\n\n let fields = match &self.input.data {\n\n Data::Struct(DataStruct {\n\n fields: Fields::Named(fields),\n\n ..\n\n }) => &fields.named,\n\n _ => panic!(\"expected a struct with named fields\"),\n\n };\n\n\n\n fields.iter().filter(|field| field.is_ignore())\n\n }\n\n\n\n pub fn get_fields_all_names(&self) -> Vec<&Option<Ident>> {\n\n self.get_fields_all().map(|field| &field.ident).collect()\n\n }\n", "file_path": "oxidizer-entity-macro/src/props.rs", "rank": 27, "score": 9.226229157130852 }, { "content": " let fields_all_names = props.get_fields_all_names();\n\n let fields_all_db_types = props.get_fields_all_db_types();\n\n let fields_all_nullable = props.get_fields_all_nullable();\n\n let fields_all_indexed = props.get_fields_all_indexed();\n\n\n\n let indexes: Vec<TokenStream2> = props\n\n .get_indexes()\n\n .iter()\n\n .map(|index| {\n\n let index_name = &index.name;\n\n let columns: Vec<&str> = index.columns.split(\",\").map(|c| c.trim()).collect();\n\n let unique = index.unique;\n\n quote! {\n\n t.add_index(\n\n #index_name,\n\n oxidizer::types::index(vec![ #(#columns),* ]).unique(#unique)\n\n );\n\n }\n\n })\n\n .collect();\n", "file_path": "oxidizer-entity-macro/src/entity_builder.rs", "rank": 28, "score": 9.109397318552308 }, { "content": "\n\n#[derive(Debug, FromMeta, Clone)]\n\npub struct HasManyAttr {\n\n pub model: String,\n\n pub field: String,\n\n #[darling(default)]\n\n pub through: Option<String>,\n\n}\n\n\n\n#[derive(Debug, FromMeta)]\n\npub struct CustomTypeAttr {\n\n pub ty: String,\n\n}\n", "file_path": "oxidizer-entity-macro/src/attrs.rs", "rank": 29, "score": 8.958287284439526 }, { "content": "use crate as oxidizer;\n\nuse oxidizer::*;\n\n\n\nuse chrono::{DateTime, Utc};\n\n\n\n#[derive(Entity, Default)]\n\npub struct TestEntity {\n\n #[primary_key]\n\n id: i32,\n\n name: String,\n\n\n\n #[indexed]\n\n integer: i32,\n\n integer64: i64,\n\n\n\n float: f32,\n\n double: f64,\n\n\n\n boolean: bool,\n\n\n\n datetime: Option<DateTime<Utc>>,\n\n}\n\n\n\n#[derive(Entity)]\n", "file_path": "oxidizer/src/tests_macro.rs", "rank": 30, "score": 8.73837002423275 }, { "content": " }\n\n\n\n fn build_delete_fn(&self, props: &Props) -> TokenStream2 {\n\n let primary_key_ident = &props.get_primary_key_field().unwrap().ident;\n\n let primary_key_type = &props.get_primary_key_field().unwrap().ty;\n\n let table_name = props.get_table_name();\n\n quote! {\n\n async fn delete(&mut self, db: &oxidizer::db::DB) -> oxidizer::db::DBResult<bool> {\n\n let key_default: #primary_key_type = Default::default();\n\n if self.#primary_key_ident == key_default {\n\n return Ok(false);\n\n }\n\n\n\n let condition = format!(\"{} = $1\", stringify!(#primary_key_ident));\n\n let query_str = format!(\"DELETE FROM {} WHERE {}\", #table_name, condition);\n\n match db.execute(&query_str, &[&self.#primary_key_ident]).await? {\n\n 0 => Ok(false),\n\n _ => {\n\n self.#primary_key_ident = 0;\n\n Ok(true)\n", "file_path": "oxidizer-entity-macro/src/entity_builder.rs", "rank": 31, "score": 8.655594243044336 }, { "content": " return false;\n\n }\n\n _ => {}\n\n }\n\n }\n\n return true;\n\n })\n\n .collect()\n\n }\n\n\n\n pub fn get_fields_plain_names(&self) -> Vec<&Option<Ident>> {\n\n self.get_fields_plain()\n\n .iter()\n\n .map(|field| &field.ident)\n\n .collect()\n\n }\n\n\n\n pub fn get_fields_plain_numbered(&self) -> Vec<String> {\n\n self.get_fields_plain_names()\n\n .iter()\n", "file_path": "oxidizer-entity-macro/src/props.rs", "rank": 32, "score": 8.41749424906979 }, { "content": "//! create reverse relation accessors\n\n//!\n\n//! ### #[primary_key]\n\n//! Required\n\n//! Field attribute used to mark the field as the primary key, this will make the field autoincrement\n\n//!\n\n//! ```\n\n//! use oxidizer::*;\n\n//! #[derive(Entity)]\n\n//! struct Entity {\n\n//! #[primary_key]\n\n//! id: i32\n\n//! }\n\n//! ```\n\n//!\n\n//! ### #[indexed]\n\n//! Make the specified field indexed in the db\n\n//!\n\n//! ```\n\n//! use oxidizer::*;\n", "file_path": "oxidizer/src/lib.rs", "rank": 33, "score": 8.075743815949016 }, { "content": " },\n\n }\n\n }\n\n }\n\n }\n\n\n\n fn build_is_synced_with_db_fn(&self, props: &Props) -> TokenStream2 {\n\n let primary_key_ident = &props.get_primary_key_field().unwrap().ident;\n\n let primary_key_type = &props.get_primary_key_field().unwrap().ty;\n\n quote! {\n\n fn is_synced_with_db(&self) -> bool {\n\n let key_default: #primary_key_type = Default::default();\n\n self.#primary_key_ident != key_default\n\n }\n\n }\n\n }\n\n\n\n fn build_foreign_helpers(&self, props: &Props) -> Vec<TokenStream2> {\n\n let name = props.get_name();\n\n\n", "file_path": "oxidizer-entity-macro/src/entity_builder.rs", "rank": 34, "score": 7.93878710202542 }, { "content": " .enumerate()\n\n .map(|(i, _)| \"$\".to_string() + &(i + 1).to_string())\n\n .collect()\n\n }\n\n\n\n pub fn get_fields_plain_numbered_next_index(&self) -> String {\n\n (self.get_fields_plain_numbered().len() + 1).to_string()\n\n }\n\n\n\n pub fn check(&self) -> Option<TokenStream> {\n\n if let None = self.get_primary_key_field() {\n\n return Some(TokenStream::from(\n\n quote! { compile_error!(\"No primary key defined\") },\n\n ));\n\n }\n\n\n\n if self\n\n .get_fields_all()\n\n .filter(|field| field.is_primary_key())\n\n .count()\n", "file_path": "oxidizer-entity-macro/src/props.rs", "rank": 35, "score": 7.5776459962640486 }, { "content": " .map(|field| &field.ident)\n\n .collect();\n\n let fields_ignored_types: Vec<&syn::Type> =\n\n props.get_ignored_fields().map(|field| &field.ty).collect();\n\n\n\n quote! {\n\n fn from_row(row: &oxidizer::tokio_postgres::Row) -> oxidizer::db::DBResult<Self> {\n\n let mut obj: Self = Self{\n\n #( #fields_all_loaders )*\n\n #(\n\n #fields_ignored_names: <#fields_ignored_types>::default(),\n\n )*\n\n };\n\n Ok(obj)\n\n }\n\n }\n\n }\n\n\n\n fn build_create_migration_fn(&self, props: &Props) -> TokenStream2 {\n\n let table_name = props.get_table_name();\n", "file_path": "oxidizer-entity-macro/src/entity_builder.rs", "rank": 36, "score": 7.313800026139698 }, { "content": "\n\n let comma_after_default = match fields_plain_names.len() {\n\n 0 => \"\",\n\n _ => \",\",\n\n };\n\n\n\n let numbered = props.get_fields_plain_numbered();\n\n let fields_plain_numbered: Vec<String> = numbered\n\n .iter()\n\n .enumerate()\n\n .map(|(i, v)| {\n\n if i == numbered.len() - 1 {\n\n return v.to_string();\n\n }\n\n return format!(\"{},\", v);\n\n })\n\n .collect();\n\n let fields_plain_numbered_next_index = props.get_fields_plain_numbered_next_index();\n\n\n\n let primary_key = props.get_primary_key_field().unwrap();\n", "file_path": "oxidizer-entity-macro/src/entity_builder.rs", "rank": 37, "score": 7.294697705016789 }, { "content": "pub mod db;\n\npub use db::DB;\n\npub mod error;\n\npub use error::*;\n\npub mod test_utils;\n", "file_path": "oxidizer/src/db/mod.rs", "rank": 38, "score": 7.123525862589904 }, { "content": " #[indexed]\n\n integer: i32,\n\n integer64: i64,\n\n\n\n float: f32,\n\n double: f64,\n\n\n\n boolean: bool,\n\n\n\n datetime: Option<DateTime<Utc>>,\n\n\n\n new_field: bool,\n\n }\n\n\n\n db.migrate_tables(&[TestEntityChanged::create_migration().unwrap()])\n\n .await\n\n .unwrap();\n\n}\n\n\n\n#[tokio::test]\n", "file_path": "oxidizer/src/tests_macro.rs", "rank": 39, "score": 7.123446416456046 }, { "content": "\n\npub use barrel::types;\n\n\n\npub use oxidizer_entity_macro::*;\n\n\n\n#[cfg(test)]\n\nmod tests_macro;\n\n\n\n#[cfg(test)]\n\nmod migrations;\n\n\n\npub use std::convert::TryFrom;\n", "file_path": "oxidizer/src/lib.rs", "rank": 40, "score": 6.885665004604991 }, { "content": " data: i32,\n\n}\n\n\n\n#[derive(Entity, Default)]\n\npub struct TestIgnoreField {\n\n #[primary_key]\n\n id: i32,\n\n name: String,\n\n\n\n #[field_ignore]\n\n ignored: TestIgnoredType,\n\n}\n\n\n\n#[derive(PartialEq, Debug)]\n\npub enum MyEnum {\n\n Item1,\n\n Item2,\n\n}\n\n\n\nimpl Default for MyEnum {\n", "file_path": "oxidizer/src/tests_macro.rs", "rank": 41, "score": 6.8688615940187185 }, { "content": " fn try_from(v: i32) -> Result<Self, Self::Error> {\n\n match v {\n\n 0 => Ok(MyEnum::Item1),\n\n 1 => Ok(MyEnum::Item2),\n\n _ => Err(ConvertError::Error),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Entity, Default)]\n\npub struct TestCustomType {\n\n #[primary_key]\n\n id: i32,\n\n\n\n #[custom_type(ty = \"i32\")]\n\n my_enum: MyEnum,\n\n}\n\n\n", "file_path": "oxidizer-tests/src/main.rs", "rank": 42, "score": 6.817517284921439 }, { "content": "\n\n let mut indexes: Vec<IndexAttr> = vec![];\n\n\n\n let mut has_many_attrs: Vec<HasManyAttr> = vec![];\n\n\n\n for option in input.attrs.iter() {\n\n let option = option.parse_meta().unwrap();\n\n if let Ok(v) = EntityAttr::from_meta(&option) {\n\n attrs = Some(v);\n\n }\n\n\n\n if let Ok(v) = IndexAttr::from_meta(&option) {\n\n indexes.push(v);\n\n }\n\n\n\n if let Ok(v) = HasManyAttr::from_meta(&option) {\n\n has_many_attrs.push(v);\n\n }\n\n }\n\n\n", "file_path": "oxidizer-entity-macro/src/entity_builder.rs", "rank": 43, "score": 6.797591823109856 }, { "content": " }\n\n\n\n pub fn get_indexes(&self) -> Vec<IndexAttr> {\n\n self.indexes.clone()\n\n }\n\n\n\n pub fn get_has_many_attrs(&self) -> Vec<HasManyAttr> {\n\n self.has_many_attrs.clone()\n\n }\n\n}\n", "file_path": "oxidizer-entity-macro/src/props.rs", "rank": 44, "score": 6.794544567897083 }, { "content": "use proc_macro::TokenStream;\n\n\n\nmod attrs;\n\nmod entity_builder;\n\nmod field_extras;\n\nmod props;\n\nmod utils;\n\n\n\n/// Entity derive macro\n\n#[proc_macro_derive(\n\n Entity,\n\n attributes(\n\n primary_key,\n\n indexed,\n\n relation,\n\n entity,\n\n index,\n\n has_many,\n\n field_ignore,\n\n custom_type\n\n )\n\n)]\n", "file_path": "oxidizer-entity-macro/src/lib.rs", "rank": 46, "score": 6.660379745681707 }, { "content": "//! #[derive(Entity)]\n\n//! #[derive(Default)]\n\n//! pub struct MyEntity {\n\n//! #[primary_key]\n\n//! id: i32,\n\n//!\n\n//! name: String,\n\n//!\n\n//! #[indexed]\n\n//! integer: i32,\n\n//! integer64: i64,\n\n//!\n\n//! float: f32,\n\n//! double: f64,\n\n//!\n\n//! boolean: bool,\n\n//!\n\n//! datetime: Option<DateTime<Utc>>,\n\n//! }\n\n//!\n", "file_path": "oxidizer/src/lib.rs", "rank": 47, "score": 6.572298655826778 }, { "content": "//! }\n\n//! ```\n\n//!\n\n//! ### #[custom_type]\n\n//! The custom type attribute lets you override the default type provided by oxidizer.\n\n//!\n\n//! ```\n\n//! use oxidizer::*;\n\n//! pub enum MyEnum {\n\n//! Item1,\n\n//! Item2,\n\n//! }\n\n//!\n\n//! pub enum ConvertError {\n\n//! Error\n\n//! }\n\n//!\n\n//! impl std::fmt::Display for ConvertError {\n\n//! fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n//! f.write_str(\"Error trying to convert\")\n", "file_path": "oxidizer/src/lib.rs", "rank": 48, "score": 6.4810271820984475 }, { "content": " let foreign_fields = props.get_fields_foreign();\n\n\n\n foreign_fields.iter().map(|field| {\n\n let relation = field.parse_relation().unwrap();\n\n let local_key = field.ident.clone().unwrap();\n\n let local_key_type = &field.ty;\n\n let get_ident = format_ident!(\"get_{}\", to_snake_case(&relation.model));\n\n let set_ident = format_ident!(\"set_{}\", to_snake_case(&relation.model));\n\n let trait_ident = format_ident!(\"__Accessor{}To{}\", name, relation.model);\n\n let model = format_ident!(\"{}\", relation.model);\n\n let key = format_ident!(\"{}\", relation.key);\n\n\n\n let local_key_set = match field.is_nullable() {\n\n true => quote! {\n\n self.#local_key = Some(v.#key);\n\n },\n\n false => quote! {\n\n self.#local_key = v.#key;\n\n },\n\n };\n", "file_path": "oxidizer-entity-macro/src/entity_builder.rs", "rank": 49, "score": 6.375419036242511 }, { "content": " let primary_key_ident = &primary_key.ident;\n\n let primary_key_type = &primary_key.ty;\n\n\n\n quote! {\n\n async fn save(&mut self, db: &oxidizer::db::DB) -> oxidizer::db::DBResult<bool> {\n\n let mut creating = false;\n\n let primary_key_default: #primary_key_type = Default::default();\n\n let _result = match self.#primary_key_ident {\n\n v if self.#primary_key_ident == primary_key_default => {\n\n creating = true;\n\n let query = concat!(\n\n \"INSERT INTO \",\n\n #table_name,\n\n \" (\",\n\n stringify!(#primary_key_ident),\n\n #comma_after_default,\n\n stringify!(#(#fields_plain_names),*),\n\n \") values(DEFAULT\",\n\n #comma_after_default,\n\n #( #fields_plain_numbered ,)*\n", "file_path": "oxidizer-entity-macro/src/entity_builder.rs", "rank": 50, "score": 6.289746903974384 }, { "content": "pub use refinery::include_migration_mods;\n\npub use refinery::*;\n\n\n\n/// Migration abstract layer\n\npub struct Migration {\n\n pub name: String,\n\n\n\n pub raw: RawMigration,\n\n}\n\n\n\nimpl Migration {\n\n /// Creates a new migration\n\n pub fn new(name: &str) -> Self {\n\n Migration {\n\n name: name.to_string(),\n\n\n\n raw: RawMigration::new(),\n\n }\n\n }\n\n\n", "file_path": "oxidizer/src/migration.rs", "rank": 51, "score": 6.274453857436976 }, { "content": " #[derive(Entity)]\n\n #[entity(table_name = \"test_entity\")]\n\n struct TestEntityChanged {\n\n #[primary_key]\n\n id: i32,\n\n name: String,\n\n\n\n #[indexed]\n\n integer: i32,\n\n integer64: i64,\n\n\n\n float: f32,\n\n double: f64,\n\n\n\n boolean: bool,\n\n\n\n datetime: Option<DateTime<Utc>>,\n\n }\n\n\n\n // Hash should match\n", "file_path": "oxidizer/src/tests_macro.rs", "rank": 52, "score": 6.177486621496001 }, { "content": "//!\n\n//! ```\n\n//! use oxidizer::*;\n\n//! #[derive(Entity)]\n\n//! #[entity(table_name=\"custom_table_name\")]\n\n//! struct Entity {\n\n//! #[primary_key]\n\n//! id: i32\n\n//! }\n\n//! ```\n\n//!\n\n//! ### #[index]\n\n//! Creates a custom index/constraint on one or more column\n\n//!\n\n//! ```\n\n//! use oxidizer::*;\n\n//! #[derive(Default, Entity)]\n\n//! #[index(name=\"myindex\", columns=\"name, email\", unique)]\n\n//! struct MyEntity {\n\n//! #[primary_key]\n", "file_path": "oxidizer/src/lib.rs", "rank": 53, "score": 5.90917815941099 }, { "content": " \"= $\",\n\n #fields_plain_numbered_next_index\n\n );\n\n db.execute(\n\n query,\n\n &[#( #fields_plain_value_acessors,)* &self.#primary_key_ident],\n\n ).await?\n\n }\n\n };\n\n\n\n Ok(creating)\n\n }\n\n }\n\n }\n\n\n\n fn build_from_row_fn(&self, props: &Props) -> TokenStream2 {\n\n let fields_all_loaders: Vec<TokenStream2> = props\n\n .get_fields_all()\n\n .map(|field| {\n\n let name = &field.ident;\n", "file_path": "oxidizer-entity-macro/src/entity_builder.rs", "rank": 54, "score": 5.875910783487832 }, { "content": "\n\n let ty = field.get_type();\n\n\n\n let mut converter = quote! {};\n\n let mut converter_pos = quote! {};\n\n\n\n if let Some(_) = field.parse_custom_type() {\n\n let custom_ty = &field.ty;\n\n converter = quote! { <#custom_ty>::try_from };\n\n converter_pos = quote! {?};\n\n }\n\n\n\n quote! {\n\n #name: #converter(row.get::<&str, #ty>(concat!(stringify!(#name))))#converter_pos,\n\n }\n\n })\n\n .collect();\n\n\n\n let fields_ignored_names: Vec<&Option<syn::Ident>> = props\n\n .get_ignored_fields()\n", "file_path": "oxidizer-entity-macro/src/entity_builder.rs", "rank": 55, "score": 5.840989171845891 }, { "content": "use tokio_postgres::Row;\n\n\n\nuse super::async_trait;\n\nuse super::db::{DBResult, DB};\n\nuse super::db_types::ToSql;\n\nuse super::migration::Migration;\n\n\n\n/// Trait implemented by all derived Entitities\n\n#[async_trait]\n", "file_path": "oxidizer/src/entity.rs", "rank": 56, "score": 5.829897786848244 }, { "content": " MyEnum::Item1 => Ok(0),\n\n MyEnum::Item2 => Ok(1),\n\n }\n\n }\n\n}\n\n\n\nimpl std::convert::TryFrom<i32> for MyEnum {\n\n type Error = ConvertError;\n\n\n\n fn try_from(v: i32) -> Result<Self, Self::Error> {\n\n match v {\n\n 0 => Ok(MyEnum::Item1),\n\n 1 => Ok(MyEnum::Item2),\n\n _ => Err(ConvertError::Error),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Entity, Default)]\n\npub struct TestCustomType {\n", "file_path": "oxidizer/src/tests_macro.rs", "rank": 58, "score": 5.691704584626784 }, { "content": " // eprintln!(\"{:#?}\", input);\n\n // eprintln!(\"{:#?}\", attrs);\n\n\n\n let props = Props::new(input, attrs, indexes, has_many_attrs);\n\n\n\n if let Some(ts) = props.check() {\n\n return ts;\n\n }\n\n\n\n let save_fn = self.build_save_fn(&props);\n\n let delete_fn = self.build_delete_fn(&props);\n\n let is_synced_with_db = self.build_is_synced_with_db_fn(&props);\n\n let from_row_fn = self.build_from_row_fn(&props);\n\n let create_migration_fn = self.build_create_migration_fn(&props);\n\n let find_fn = self.build_find_fn(&props);\n\n let first_fn = self.build_first_fn(&props);\n\n\n\n let name = props.get_name();\n\n let table_name = props.get_table_name();\n\n\n", "file_path": "oxidizer-entity-macro/src/entity_builder.rs", "rank": 60, "score": 5.4351833701637755 }, { "content": "use oxidizer::*;\n\n\n\n// mod tmp;\n\n\n\n#[derive(PartialEq, Debug)]\n\npub enum MyEnum {\n\n Item1,\n\n Item2,\n\n}\n\n\n\nimpl Default for MyEnum {\n\n fn default() -> Self {\n\n MyEnum::Item1\n\n }\n\n}\n\n\n\npub enum ConvertError {\n\n Error,\n\n}\n\n\n", "file_path": "oxidizer-tests/src/main.rs", "rank": 61, "score": 5.3415829509478066 }, { "content": "use async_trait::async_trait;\n\nuse mobc::Manager;\n\nuse mobc::Pool;\n\nuse openssl::ssl::{SslConnector, SslMethod};\n\nuse postgres_openssl::MakeTlsConnector;\n\nuse refinery::{Report, Runner};\n\nuse std::str::FromStr;\n\n\n\nuse super::super::migration::Migration;\n\nuse super::error::*;\n\n\n\nuse barrel::backend::Pg;\n\nuse tokio_postgres::{\n\n row::Row,\n\n tls::{MakeTlsConnect, TlsConnect},\n\n types::ToSql,\n\n Client, Config, NoTls, Socket,\n\n};\n\n\n\npub struct ConnectionManager<Tls> {\n", "file_path": "oxidizer/src/db/db.rs", "rank": 62, "score": 5.252524365927645 }, { "content": " db.execute(&query, &[&value]).await.unwrap();\n\n\n\n let result = TestCustomType::first(&db, \"id = $1\", &[&obj.id]).await;\n\n assert_eq!(true, result.is_err());\n\n\n\n let query = format!(\n\n \"update {} set my_enum = $1\",\n\n TestCustomType::get_table_name()\n\n );\n\n let value: i32 = 0;\n\n db.execute(&query, &[&value]).await.unwrap();\n\n\n\n let result = TestCustomType::first(&db, \"id = $1\", &[&obj.id]).await;\n\n assert_eq!(true, result.is_ok());\n\n}\n", "file_path": "oxidizer/src/tests_macro.rs", "rank": 63, "score": 5.158683280134882 }, { "content": "\n\n quote! {\n\n fn create_migration() -> oxidizer::db::DBResult<oxidizer::migration::Migration> {\n\n let mut m = oxidizer::migration::Migration::new(#table_name);\n\n m.raw.create_table(#table_name, |t| {\n\n #(t\n\n .add_column(\n\n stringify!(#fields_all_names),\n\n #fields_all_db_types\n\n .nullable(#fields_all_nullable)\n\n .indexed(#fields_all_indexed)\n\n )\n\n ;)*\n\n\n\n #(#indexes)*\n\n });\n\n\n\n Ok(m)\n\n }\n\n }\n", "file_path": "oxidizer-entity-macro/src/entity_builder.rs", "rank": 64, "score": 5.034966452843239 }, { "content": "//! # Oxidizer\n\n//! A simple orm based on [tokio-postgres](https://crates.io/crates/tokio-postgres) and [refinery](https://crates.io/crates/refinery)\n\n//! ```ignore\n\n//! #[async_trait]\n\n//! pub trait Entity: Sized {\n\n//! async fn save(&mut self, db: &DB) -> DBResult<bool>;\n\n//! async fn delete(&mut self, db: &DB) -> DBResult<bool>;\n\n//!\n\n//! fn from_row(row: &Row) -> Self;\n\n//! fn create_migration() -> DBResult<Migration>;\n\n//! fn get_table_name() -> String;\n\n//!\n\n//! async fn find(db: &DB, query: &str, params: &'_ [&'_ (dyn ToSql + Sync)]) -> DBResult<Vec<Self>>;\n\n//! async fn first(db: &DB, query: &str, params: &'_ [&'_ (dyn ToSql + Sync)]) -> DBResult<Option<Self>>;\n\n//! }\n\n//! ```\n\n//! ```\n\n//! use oxidizer::*;\n\n//! use chrono::{DateTime, Utc};\n\n//!\n", "file_path": "oxidizer/src/lib.rs", "rank": 65, "score": 5.000130286050657 }, { "content": " == 1\n\n {\n\n return None;\n\n }\n\n\n\n let last_primary_key = self\n\n .get_fields_all()\n\n .filter(|field| field.is_primary_key())\n\n .last()\n\n .unwrap();\n\n let expanded = quote_spanned! {\n\n last_primary_key.ident.as_ref().unwrap().span() => compile_error!(\"Multiple primary keys defined\")\n\n };\n\n Some(TokenStream::from(expanded))\n\n }\n\n\n\n pub fn get_fields_foreign(&self) -> Vec<&Field> {\n\n self.get_fields_all()\n\n .filter(|field| field.parse_relation().is_some())\n\n .collect()\n", "file_path": "oxidizer-entity-macro/src/props.rs", "rank": 66, "score": 4.993891215939234 }, { "content": " /// Builds the raw query from the migration\n\n pub fn make(&self) -> String {\n\n self.raw.make::<Pg>()\n\n }\n\n}\n\n\n\n/// Creates a new migration module\n\n#[macro_export]\n\nmacro_rules! create_migration_module {\n\n ($entity:ident) => {\n\n pub fn migration() -> String {\n\n let m = <$entity>::create_migration().expect(concat!(\n\n \"Could not create migration for \",\n\n stringify!($entity)\n\n ));\n\n m.make()\n\n }\n\n };\n\n}\n", "file_path": "oxidizer/src/migration.rs", "rank": 67, "score": 4.937206010600582 }, { "content": "async fn test_indexes() {\n\n let db = super::db::test_utils::create_test_db(\"test_indexes\").await;\n\n\n\n db.migrate_tables(&[TestCustomIndexes::create_migration().unwrap()])\n\n .await\n\n .unwrap();\n\n\n\n let mut obj = TestCustomIndexes {\n\n id: 0,\n\n name: \"test\".to_string(),\n\n date: \"07/19/2020\".to_string(),\n\n email: \"me@example.com\".to_string(),\n\n };\n\n let creating = obj.save(&db).await.unwrap();\n\n assert_eq!(true, creating);\n\n\n\n let mut obj2 = TestCustomIndexes {\n\n id: 0,\n\n name: \"test\".to_string(),\n\n date: \"07/19/2020\".to_string(),\n", "file_path": "oxidizer/src/tests_macro.rs", "rank": 68, "score": 4.815391363660461 }, { "content": "}\n\n\n\n#[tokio::test]\n\nasync fn test_entity_custom_type_error() {\n\n let db = super::db::test_utils::create_test_db(\"test_entity_custom_type_error\").await;\n\n\n\n db.migrate_tables(&[TestCustomType::create_migration().unwrap()])\n\n .await\n\n .unwrap();\n\n\n\n let mut obj = TestCustomType::default();\n\n obj.my_enum = MyEnum::Item2;\n\n let creating = obj.save(&db).await.unwrap();\n\n assert_eq!(creating, true);\n\n\n\n let query = format!(\n\n \"update {} set my_enum = $1\",\n\n TestCustomType::get_table_name()\n\n );\n\n let value: i32 = 33;\n", "file_path": "oxidizer/src/tests_macro.rs", "rank": 69, "score": 4.570670301118912 }, { "content": "}\n\n\n\n#[tokio::test]\n\nasync fn test_entity_field_ignore() {\n\n let db = super::db::test_utils::create_test_db(\"test_entity_field_ignore\").await;\n\n\n\n db.migrate_tables(&[TestIgnoreField::create_migration().unwrap()])\n\n .await\n\n .unwrap();\n\n\n\n let mut obj = TestIgnoreField::default();\n\n obj.name = \"test\".to_string();\n\n let creating = obj.save(&db).await.unwrap();\n\n assert_eq!(creating, true);\n\n\n\n let creating = obj.save(&db).await.unwrap();\n\n assert_eq!(creating, false);\n\n}\n\n\n\n#[tokio::test]\n", "file_path": "oxidizer/src/tests_macro.rs", "rank": 70, "score": 4.53640965798799 }, { "content": "//! #[tokio::test]\n\n//! async fn test_my_entity() {\n\n//! let uri = \"postgres://postgres:alkje2lkaj2e@db/postgres\";\n\n//! let max_open = 50; // mobc\n\n//! let ca_file: Option<&str> = None;\n\n//! let db = DB::connect(&uri, max_open, ca_file).await.unwrap();\n\n//!\n\n//! db.migrate_tables(&[MyEntity::create_migration().unwrap()]).await.unwrap();\n\n//!\n\n//! let mut entity = MyEntity::default();\n\n//! let creating = entity.save(&db).await.unwrap();\n\n//! assert_eq!(creating, true);\n\n//! }\n\n//!\n\n//! ```\n\n//!\n\n//!\n\n//! ## Attributes\n\n//!\n\n//! Derive attributes can be used to create indexes, change the default table name and\n", "file_path": "oxidizer/src/lib.rs", "rank": 71, "score": 4.470853619800266 }, { "content": "async fn test_entity_custom_type() {\n\n let db = super::db::test_utils::create_test_db(\"test_entity_custom_type\").await;\n\n\n\n db.migrate_tables(&[TestCustomType::create_migration().unwrap()])\n\n .await\n\n .unwrap();\n\n\n\n let mut obj = TestCustomType::default();\n\n obj.my_enum = MyEnum::Item2;\n\n let creating = obj.save(&db).await.unwrap();\n\n assert_eq!(creating, true);\n\n\n\n let creating = obj.save(&db).await.unwrap();\n\n assert_eq!(creating, false);\n\n\n\n let result = TestCustomType::first(&db, \"id = $1\", &[&obj.id])\n\n .await\n\n .unwrap()\n\n .unwrap();\n\n assert_eq!(result.my_enum, MyEnum::Item2);\n", "file_path": "oxidizer/src/tests_macro.rs", "rank": 72, "score": 4.3089781470030015 }, { "content": "#[has_many(model = \"TestEntity\", field = \"entity_id\", through = \"TestManyToMany\")]\n\npub struct TestReverseRelationTarget {\n\n #[primary_key]\n\n id: i32,\n\n}\n\n\n\n#[derive(Default, Entity)]\n\npub struct TestManyToMany {\n\n #[primary_key]\n\n id: i32,\n\n\n\n #[relation(model = \"TestReverseRelationTarget\", key = \"id\")]\n\n target_id: i32,\n\n\n\n #[relation(model = \"TestEntity\", key = \"id\")]\n\n entity_id: i32,\n\n}\n\n\n\n#[derive(Default)]\n\npub struct TestIgnoredType {\n", "file_path": "oxidizer/src/tests_macro.rs", "rank": 73, "score": 4.259216583234375 }, { "content": "//! 1 => Ok(MyEnum::Item2),\n\n//! _ => Err(ConvertError::Error),\n\n//! }\n\n//! }\n\n//! }\n\n//!\n\n//! #[derive(Entity)]\n\n//! pub struct TestCustomType {\n\n//! #[primary_key]\n\n//! id: i32,\n\n//!\n\n//! #[custom_type(ty = \"i32\")]\n\n//! my_enum: MyEnum,\n\n//! }\n\n//! ```\n\n//! The custom type requires you to explicity implement the related `TryFrom` trait functions to convert between the\n\n//! actual type and the overriden type. The error type from the `TryFrom` trait must implement the `std::fmt::Display` trait\n\n//!\n\n//!\n\n//! ## Relations\n", "file_path": "oxidizer/src/lib.rs", "rank": 74, "score": 4.247558066382217 }, { "content": "//! ```ignore\n\n//! #[oxidizer::async_trait]\n\n//! pub trait __AccessorHasManyTargetEntityToEntity {\n\n//! async fn get_all_test_entity(&self, db: &oxidizer::db::DB) -> oxidizer::db::DBResult<Vec<Entity>>;\n\n//! }\n\n//! ```\n\n//!\n\n//! ### With a through table (many-to-many)\n\n//! ```\n\n//! use oxidizer::*;\n\n//!\n\n//! #[derive(Entity)]\n\n//! #[derive(Default)]\n\n//! pub struct Entity {\n\n//! #[primary_key]\n\n//! id: i32,\n\n//! name: String\n\n//! }\n\n//!\n\n//! #[derive(Default, Entity)]\n", "file_path": "oxidizer/src/lib.rs", "rank": 75, "score": 4.246449288972141 }, { "content": " pub async fn create(\n\n &self,\n\n query: &str,\n\n params: &'_ [&'_ (dyn ToSql + Sync)],\n\n ) -> Result<u64, Error> {\n\n self.execute(query, params).await\n\n }\n\n\n\n pub async fn execute(\n\n &self,\n\n query: &str,\n\n params: &'_ [&'_ (dyn ToSql + Sync)],\n\n ) -> Result<u64, Error> {\n\n match &self.pool {\n\n ConnectionPool::TLS(pool) => {\n\n let client = pool.get().await.map_err(|err| Error::MobcError(err))?;\n\n\n\n let insert = client\n\n .prepare(query)\n\n .await\n", "file_path": "oxidizer/src/db/db.rs", "rank": 76, "score": 4.094831975969239 }, { "content": " let table_name = props.get_table_name();\n\n\n\n let fields_plain_value_acessors: Vec<TokenStream2> = props\n\n .get_fields_plain()\n\n .iter()\n\n .map(|field| {\n\n let name = &field.ident;\n\n if let Some(ct) = field.parse_custom_type() {\n\n let ty = ct.ty;\n\n\n\n let ty_ident = format_ident!(\"{}\", ty);\n\n\n\n return quote! { &<#ty_ident>::try_from(&self.#name)? };\n\n }\n\n\n\n quote! { &self.#name }\n\n })\n\n .collect();\n\n\n\n let fields_plain_names = props.get_fields_plain_names();\n", "file_path": "oxidizer-entity-macro/src/entity_builder.rs", "rank": 77, "score": 4.080963128976631 }, { "content": " let foreign_helpers = self.build_foreign_helpers(&props);\n\n\n\n let has_many_helpers = self.build_has_many_helpers(&props);\n\n\n\n let expanded = quote! {\n\n #[oxidizer::async_trait]\n\n impl oxidizer::entity::IEntity for #name {\n\n #save_fn\n\n\n\n #delete_fn\n\n\n\n #is_synced_with_db\n\n\n\n #find_fn\n\n\n\n #first_fn\n\n\n\n #from_row_fn\n\n\n\n #create_migration_fn\n", "file_path": "oxidizer-entity-macro/src/entity_builder.rs", "rank": 78, "score": 3.890737239696257 }, { "content": "\n\n fn build_first_fn(&self, props: &Props) -> TokenStream2 {\n\n let name = props.get_name();\n\n let table_name = props.get_table_name();\n\n quote! {\n\n async fn first(db: &oxidizer::db::DB, condition: &str, params: &'_ [&'_ (dyn oxidizer::db_types::ToSql + Sync)]) -> oxidizer::db::DBResult<std::option::Option<#name>> {\n\n let query_str = format!(\"SELECT * FROM {} WHERE {} LIMIT 1\", #table_name, condition);\n\n let rows = db.query(&query_str, params).await?;\n\n\n\n let mut results: Vec<#name> = Vec::with_capacity(rows.len());\n\n for row in rows.iter() {\n\n results.push(Self::from_row(row)?);\n\n }\n\n\n\n match results.len() {\n\n 0 => Ok(None),\n\n _ => Ok(Some(results.remove(0))),\n\n }\n\n }\n\n }\n", "file_path": "oxidizer-entity-macro/src/entity_builder.rs", "rank": 79, "score": 3.8781027233130714 }, { "content": "//! ```\n\n//!\n\n//! This will implement for `TestRelation` the following generated trait:\n\n//! ```ignore\n\n//! #[oxidizer::async_trait]\n\n//! pub trait __AccessorTestRelationToEntity {\n\n//! async fn get_test_entity(&self, db: &oxidizer::db::DB) -> oxidizer::db::DBResult<Entity>;\n\n//! async fn set_test_entity(&mut self, db: &oxidizer::db::DB, v: &Entity) -> oxidizer::db::DBResult<()>;\n\n//! }\n\n//! ```\n\n//!\n\n//! #[has_many]\n\n//! 1-to-many or many-to-many relations can be achieved using the `has_many` attribute\n\n//!\n\n//! ### basic (1-to-many)\n\n//!\n\n//! ```\n\n//! use oxidizer::*;\n\n//!\n\n//! #[derive(Entity)]\n", "file_path": "oxidizer/src/lib.rs", "rank": 80, "score": 3.752297775436644 }, { "content": " fn default() -> Self {\n\n MyEnum::Item1\n\n }\n\n}\n\n\n\npub enum ConvertError {\n\n Error,\n\n}\n\n\n\nimpl std::fmt::Display for ConvertError {\n\n fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n f.write_str(\"Error trying to convert\")\n\n }\n\n}\n\n\n\nimpl std::convert::TryFrom<&MyEnum> for i32 {\n\n type Error = ConvertError;\n\n\n\n fn try_from(v: &MyEnum) -> Result<Self, Self::Error> {\n\n match v {\n", "file_path": "oxidizer/src/tests_macro.rs", "rank": 81, "score": 3.645456376385383 }, { "content": "\n\nuse crate::create_migration_module;\n\nuse crate::entity::IEntity;\n\n\n\nuse crate::tests_macro::TestEntity;\n\n\n\ncreate_migration_module!(TestEntity);", "file_path": "oxidizer/src/migrations/V0001__entity.rs", "rank": 82, "score": 3.544846150395025 }, { "content": "//!\n\n//!\n\n//! - V00001__person.rs\n\n//!\n\n//! ```ignore\n\n//! use oxidizer::create_migration_module;\n\n//! use oxidizer::entity::IEntity;\n\n//!\n\n//! use crate::entities::Person;\n\n//!\n\n//! create_migration_module!(Person);\n\n//! ```\n\n//! - migrations/mod.rs\n\n//!\n\n//! ```ignore\n\n//! use oxidizer::include_migration_mods;\n\n//!\n\n//! include_migration_mods!();\n\n//!\n\n//! ```\n", "file_path": "oxidizer/src/migration.rs", "rank": 83, "score": 3.4900552080840006 }, { "content": " }\n\n }\n\n\n\n pub async fn query(\n\n &self,\n\n query: &str,\n\n params: &'_ [&'_ (dyn ToSql + Sync)],\n\n ) -> Result<Vec<Row>, Error> {\n\n match &self.pool {\n\n ConnectionPool::TLS(pool) => {\n\n let client = pool.get().await.map_err(|err| Error::MobcError(err))?;\n\n\n\n let insert = client\n\n .prepare(query)\n\n .await\n\n .map_err(|err| Error::PostgresError(err))?;\n\n\n\n client\n\n .query(&insert, params)\n\n .await\n", "file_path": "oxidizer/src/db/db.rs", "rank": 84, "score": 3.44835582565919 }, { "content": " \") RETURNING \",\n\n stringify!(#primary_key_ident),\n\n \";\"\n\n );\n\n let rows = db.query(\n\n query,\n\n &[#( #fields_plain_value_acessors ),*]\n\n ).await?;\n\n let first_row = rows.first().ok_or(oxidizer::db::Error::Other(\"Error while saving entity\".to_string()))?;\n\n self.#primary_key_ident = first_row.get::<&str, #primary_key_type>(stringify!(#primary_key_ident));\n\n 1\n\n },\n\n id => {\n\n let query = concat!(\n\n \"UPDATE \",\n\n #table_name,\n\n \" SET \",\n\n #(stringify!(#fields_plain_names =), #fields_plain_numbered,)*\n\n \" WHERE \",\n\n stringify!(#primary_key_ident),\n", "file_path": "oxidizer-entity-macro/src/entity_builder.rs", "rank": 85, "score": 3.1457207995161847 }, { "content": "use crate::migration::*;\n\n\n\ninclude_migration_mods!();\n", "file_path": "oxidizer/src/migrations/mod.rs", "rank": 86, "score": 3.1089591154051237 }, { "content": " let name = props.get_name();\n\n\n\n props.get_has_many_attrs().iter().map(|attr| {\n\n let model_snake_cased = to_snake_case(&attr.model);\n\n\n\n let get_ident = format_ident!(\"get_all_{}\", model_snake_cased);\n\n\n\n let trait_ident = format_ident!(\"__AccessorHasMany{}To{}\", name, attr.model);\n\n\n\n let model = match attr.through.as_ref() {\n\n Some(m) => format_ident!(\"{}\", m),\n\n None => format_ident!(\"{}\", attr.model),\n\n };\n\n\n\n let field = &attr.field;\n\n\n\n let pk = &props.get_primary_key_field().unwrap().ident;\n\n\n\n quote! {\n\n #[oxidizer::async_trait]\n", "file_path": "oxidizer-entity-macro/src/entity_builder.rs", "rank": 87, "score": 3.0548714906770216 }, { "content": "//! }\n\n//! }\n\n//!\n\n//! impl TryFrom<&MyEnum> for i32 {\n\n//! type Error = ConvertError;\n\n//!\n\n//! fn try_from(v: &MyEnum) -> Result<Self, Self::Error> {\n\n//! match v {\n\n//! MyEnum::Item1 => Ok(0),\n\n//! MyEnum::Item2 => Ok(1),\n\n//! }\n\n//! }\n\n//! }\n\n//!\n\n//! impl TryFrom<i32> for MyEnum {\n\n//! type Error = ConvertError;\n\n//!\n\n//! fn try_from(v: i32) -> Result<Self, Self::Error> {\n\n//! match v {\n\n//! 0 => Ok(MyEnum::Item1),\n", "file_path": "oxidizer/src/lib.rs", "rank": 88, "score": 2.831722990866709 }, { "content": " }\n\n\n\n fn build_find_fn(&self, props: &Props) -> TokenStream2 {\n\n let name = props.get_name();\n\n let table_name = props.get_table_name();\n\n quote! {\n\n async fn find(db: &oxidizer::db::DB, condition: &str, params: &'_ [&'_ (dyn oxidizer::db_types::ToSql + Sync)]) -> oxidizer::db::DBResult<Vec<#name>> {\n\n let query_str = format!(\"SELECT * FROM {} WHERE {}\", #table_name, condition);\n\n let rows = db.query(&query_str, params).await?;\n\n\n\n let mut results: Vec<#name> = Vec::with_capacity(rows.len());\n\n\n\n for row in rows.iter() {\n\n results.push(Self::from_row(row)?);\n\n }\n\n\n\n Ok(results)\n\n }\n\n }\n\n }\n", "file_path": "oxidizer-entity-macro/src/entity_builder.rs", "rank": 89, "score": 2.7742662160302505 }, { "content": "//!\n\n//! ### #[relation]\n\n//! Relations can be created using the `relation` attribute as in the example:\n\n//! ```\n\n//! use oxidizer::*;\n\n//! #[derive(Entity)]\n\n//! struct Entity {\n\n//! #[primary_key]\n\n//! id: i32,\n\n//! }\n\n//!\n\n//! #[derive(Entity)]\n\n//! struct TestRelation {\n\n//! #[primary_key]\n\n//! id: i32,\n\n//! device_id: String,\n\n//!\n\n//! #[relation(model=\"Entity\", key=\"id\")]\n\n//! entity_id: i32,\n\n//! }\n", "file_path": "oxidizer/src/lib.rs", "rank": 90, "score": 2.7484267347408338 }, { "content": "use super::super::db::*;\n\n\n\npub async fn create_test_db(name: &str) -> DB {\n\n let uri = \"postgres://postgres:alkje2lkaj2e@db/postgres\";\n\n let db = DB::connect(&uri, 50, None).await.unwrap();\n\n\n\n let query_str = format!(\"DROP DATABASE IF EXISTS db_test_{}\", name.to_lowercase());\n\n db.execute(&query_str, &[]).await.unwrap();\n\n let query_str = format!(\"CREATE DATABASE db_test_{}\", name.to_lowercase());\n\n db.execute(&query_str, &[]).await.unwrap();\n\n\n\n drop(db);\n\n\n\n let uri = format!(\"postgres://postgres:alkje2lkaj2e@db/db_test_{}\", name);\n\n let db = DB::connect(&uri, 50, None).await.unwrap();\n\n\n\n db\n\n}\n", "file_path": "oxidizer/src/db/test_utils.rs", "rank": 91, "score": 2.618418500700794 }, { "content": " .unwrap();\n\n\n\n let mut target = TestReverseRelationTarget::default();\n\n let creating = target.save(&db).await.unwrap();\n\n assert_eq!(creating, true);\n\n\n\n let mut entity = TestEntity::default();\n\n let creating = entity.save(&db).await.unwrap();\n\n assert_eq!(creating, true);\n\n\n\n let mut m2m = TestManyToMany::default();\n\n m2m.entity_id = entity.id;\n\n m2m.target_id = target.id;\n\n let creating = m2m.save(&db).await.unwrap();\n\n assert_eq!(creating, true);\n\n\n\n let loaded_entity = target.get_all_test_entity(&db).await.unwrap();\n\n assert_eq!(1, loaded_entity.len());\n\n\n\n assert_eq!(entity.id, loaded_entity[0].entity_id);\n", "file_path": "oxidizer/src/tests_macro.rs", "rank": 92, "score": 2.491513450777774 }, { "content": " email: \"me2@example.com\".to_string(),\n\n };\n\n assert!(obj2.save(&db).await.is_err());\n\n\n\n let mut obj2 = TestCustomIndexes {\n\n id: 0,\n\n name: \"test2\".to_string(),\n\n date: \"07/19/2020\".to_string(),\n\n email: \"me2@example.com\".to_string(),\n\n };\n\n assert!(obj2.save(&db).await.is_ok());\n\n\n\n let mut obj2 = TestCustomIndexes {\n\n id: 0,\n\n name: \"test3\".to_string(),\n\n date: \"07/19/2020\".to_string(),\n\n email: \"me2@example.com\".to_string(),\n\n };\n\n assert!(obj2.save(&db).await.is_err());\n\n\n", "file_path": "oxidizer/src/tests_macro.rs", "rank": 93, "score": 2.3888952833223187 }, { "content": "//! #[derive(Default)]\n\n//! #[has_many(model=\"TargetEntity\", field=\"entity_id\")]\n\n//! pub struct Entity {\n\n//! #[primary_key]\n\n//! id: i32,\n\n//! name: String\n\n//! }\n\n//!\n\n//! #[derive(Default, Entity)]\n\n//! pub struct TargetEntity {\n\n//! #[primary_key]\n\n//! id: i32,\n\n\n\n//! #[relation(model=\"Entity\", key=\"id\")]\n\n//! entity_id: i32\n\n//! }\n\n//! ```\n\n//! This will create helper functions to access all the `TargetEntity` that Entity has.\n\n//! This is what the generated trait and implementation looks like (implementaion is also generated).\n\n//!\n", "file_path": "oxidizer/src/lib.rs", "rank": 94, "score": 2.364520752942734 }, { "content": "//!\n\n//! With the correct file struct you can now create a runner and apply migrations with:\n\n//!\n\n//! ```\n\n//! use oxidizer::*;\n\n//! #[tokio::test]\n\n//! async fn test_migrate() {\n\n//! let runner = crate::migrations::runner();\n\n//!\n\n//! let uri = \"postgres://postgres:alkje2lkaj2e@db/postgres\";\n\n//! let max_open = 50; // mobc\n\n//! let ca_file: Option<&str> = None;\n\n//! let db = DB::connect(&uri, max_open, ca_file).await.unwrap();\n\n//! db.migrate(runner).await.unwrap();\n\n//! }\n\n//! ```\n\n//!\n\n\n\nuse barrel::{backend::Pg, Migration as RawMigration};\n\n\n", "file_path": "oxidizer/src/migration.rs", "rank": 95, "score": 2.3629230766256857 }, { "content": " TestRelation::create_migration().unwrap(),\n\n ])\n\n .await\n\n .unwrap();\n\n\n\n let mut entity = TestEntity::default();\n\n entity.name = \"test\".to_string();\n\n let creating = entity.save(&db).await.unwrap();\n\n assert_eq!(creating, true);\n\n\n\n let mut entity2 = TestEntity::default();\n\n entity2.name = \"test 2\".to_string();\n\n let creating = entity2.save(&db).await.unwrap();\n\n assert_eq!(creating, true);\n\n\n\n let mut obj = TestRelation {\n\n id: 0,\n\n device_id: \"abc12\".to_string(),\n\n entity_id: entity.id,\n\n };\n", "file_path": "oxidizer/src/tests_macro.rs", "rank": 96, "score": 2.356586158948668 }, { "content": "#[derive(Debug)]\n\npub enum Error {\n\n PostgresError(tokio_postgres::Error),\n\n OpensslError(openssl::error::ErrorStack),\n\n MobcError(mobc::Error<tokio_postgres::Error>),\n\n RefineryError(refinery::Error),\n\n DoesNotExist,\n\n ReferencedModelIsNotInDB,\n\n Other(String),\n\n}\n\n\n\npub type DBResult<T> = std::result::Result<T, Error>;\n\n\n\nimpl<R> std::convert::From<R> for Error\n\nwhere\n\n R: std::fmt::Display,\n\n{\n\n fn from(v: R) -> Self {\n\n Error::Other(v.to_string())\n\n }\n\n}\n", "file_path": "oxidizer/src/db/error.rs", "rank": 97, "score": 2.311764602169102 }, { "content": "\n\n quote! {\n\n #[oxidizer::async_trait]\n\n pub trait #trait_ident {\n\n async fn #get_ident(&self, db: &oxidizer::db::DB) -> oxidizer::db::DBResult<#model>;\n\n async fn #set_ident(&mut self, db: &oxidizer::db::DB, v: &#model) -> oxidizer::db::DBResult<()>;\n\n }\n\n\n\n #[oxidizer::async_trait]\n\n impl #trait_ident for #name {\n\n async fn #get_ident(&self, db: &oxidizer::db::DB) -> oxidizer::db::DBResult<#model> {\n\n if self.#local_key == <#local_key_type>::default() {\n\n return Err(oxidizer::db::Error::DoesNotExist);\n\n }\n\n\n\n let table_name = <#model>::get_table_name();\n\n let query = format!(\"select * from {} where {} = $1 limit 1\", &table_name, stringify!(#key));\n\n let results = db.query(&query, &[&self.#local_key]).await?;\n\n if results.len() == 0 {\n\n return Err(oxidizer::db::Error::DoesNotExist);\n", "file_path": "oxidizer-entity-macro/src/entity_builder.rs", "rank": 98, "score": 2.2835503481791153 }, { "content": "}\n\n\n\nmod migration_modules {\n\n use super::*;\n\n use crate::create_migration_module;\n\n\n\n create_migration_module!(TestEntity);\n\n}\n\n\n\n#[tokio::test]\n\nasync fn test_entity_macro_save() {\n\n let db = super::db::test_utils::create_test_db(\"test_entity_macro_save\").await;\n\n\n\n db.migrate_tables(&[TestEntity::create_migration().unwrap()])\n\n .await\n\n .unwrap();\n\n\n\n let mut obj = TestEntity::default();\n\n obj.name = \"test\".to_string();\n\n let creating = obj.save(&db).await.unwrap();\n", "file_path": "oxidizer/src/tests_macro.rs", "rank": 99, "score": 2.2788090024770575 } ]
Rust
src/day5.rs
mathstar/adventOfCode2021
19e843ebf1f0e2abbee5c4502b39bfdab5755a1e
use std::cmp::{max, min}; use std::collections::HashMap; use crate::day::Day; pub struct Day5 {} #[derive(Debug)] struct Line { start: (i32, i32), end: (i32, i32) } enum AxialClassification { X, Y, NonAxial } impl Line { fn axial_classification(&self) -> AxialClassification { if self.start.0 == self.end.0 { AxialClassification::X } else if self.start.1 == self.end.1 { AxialClassification::Y } else { AxialClassification::NonAxial } } } fn parse_input(input: &str) -> Vec<Line> { let mut lines = Vec::new(); for line in input.lines() { let mut split = line.split(" -> "); let mut p_split = split.next().unwrap().split(","); let start = (p_split.next().unwrap().parse().unwrap(), p_split.next().unwrap().parse().unwrap()); p_split = split.next().unwrap().split(","); let end = (p_split.next().unwrap().parse().unwrap(), p_split.next().unwrap().parse().unwrap()); lines.push(Line {start, end}); } lines } fn increment_grid_position(grid: &mut HashMap<i32, HashMap<i32, i32>>, overlap: &mut i32, x: i32, y: i32) { match grid.get_mut(&x) { Some(r) => { match r.get_mut(&y) { Some(existing) if *existing == 1 => { *overlap += 1; *existing += 1; } Some(existing) => { *existing += 1; } None => { r.insert(y, 1); } } }, None => { let mut m = HashMap::new(); m.insert(y, 1); grid.insert(x, m); } } } fn compute_overlap(lines: &Vec<Line>, consider_diagonal: bool) -> i32 { let mut grid : HashMap<i32, HashMap<i32, i32>> = HashMap::new(); let mut overlap = 0; for line in lines { match line.axial_classification() { AxialClassification::X => { let y_start = min(line.start.1, line.end.1); let y_end = max(line.start.1, line.end.1) + 1; for y in y_start .. y_end { increment_grid_position(&mut grid, &mut overlap, line.start.0, y); } }, AxialClassification::Y => { let x_start = min(line.start.0, line.end.0); let x_end = max(line.start.0, line.end.0) + 1; for x in x_start..x_end { increment_grid_position(&mut grid, &mut overlap, x, line.start.1); } }, AxialClassification::NonAxial => { if consider_diagonal { let x_start = line.start.0; let y_start = line.start.1; let x_sign = if line.end.0 - line.start.0 > 0 {1} else {-1}; let y_sign = if line.end.1 - line.start.1 > 0 {1} else {-1}; let length = (line.start.0 - line.end.0).abs() + 1; for inc in 0..length { let x = x_start + inc * x_sign; let y = y_start + inc * y_sign; increment_grid_position(&mut grid, &mut overlap, x, y); } } } } } overlap } impl Day for Day5 { fn part1(&self, input: &str) -> String { let lines = parse_input(input); compute_overlap(&lines, false).to_string() } fn part2(&self, input: &str) -> String { let lines = parse_input(input); compute_overlap(&lines, true).to_string() } } #[cfg(test)] mod tests { use super::*; #[test] fn part1_test1() { assert_eq!(Day5{}.part1("0,9 -> 5,9 8,0 -> 0,8 9,4 -> 3,4 2,2 -> 2,1 7,0 -> 7,4 6,4 -> 2,0 0,9 -> 2,9 3,4 -> 1,4 0,0 -> 8,8 5,5 -> 8,2"), "5"); } #[test] fn part2_test1() { assert_eq!(Day5{}.part2("0,9 -> 5,9 8,0 -> 0,8 9,4 -> 3,4 2,2 -> 2,1 7,0 -> 7,4 6,4 -> 2,0 0,9 -> 2,9 3,4 -> 1,4 0,0 -> 8,8 5,5 -> 8,2"), "12"); } }
use std::cmp::{max, min}; use std::collections::HashMap; use crate::day::Day; pub struct Day5 {} #[derive(Debug)] struct Line { start: (i32, i32), end: (i32, i32) } enum AxialClassification { X, Y, NonAxial } impl Line {
} fn parse_input(input: &str) -> Vec<Line> { let mut lines = Vec::new(); for line in input.lines() { let mut split = line.split(" -> "); let mut p_split = split.next().unwrap().split(","); let start = (p_split.next().unwrap().parse().unwrap(), p_split.next().unwrap().parse().unwrap()); p_split = split.next().unwrap().split(","); let end = (p_split.next().unwrap().parse().unwrap(), p_split.next().unwrap().parse().unwrap()); lines.push(Line {start, end}); } lines } fn increment_grid_position(grid: &mut HashMap<i32, HashMap<i32, i32>>, overlap: &mut i32, x: i32, y: i32) { match grid.get_mut(&x) { Some(r) => { match r.get_mut(&y) { Some(existing) if *existing == 1 => { *overlap += 1; *existing += 1; } Some(existing) => { *existing += 1; } None => { r.insert(y, 1); } } }, None => { let mut m = HashMap::new(); m.insert(y, 1); grid.insert(x, m); } } } fn compute_overlap(lines: &Vec<Line>, consider_diagonal: bool) -> i32 { let mut grid : HashMap<i32, HashMap<i32, i32>> = HashMap::new(); let mut overlap = 0; for line in lines { match line.axial_classification() { AxialClassification::X => { let y_start = min(line.start.1, line.end.1); let y_end = max(line.start.1, line.end.1) + 1; for y in y_start .. y_end { increment_grid_position(&mut grid, &mut overlap, line.start.0, y); } }, AxialClassification::Y => { let x_start = min(line.start.0, line.end.0); let x_end = max(line.start.0, line.end.0) + 1; for x in x_start..x_end { increment_grid_position(&mut grid, &mut overlap, x, line.start.1); } }, AxialClassification::NonAxial => { if consider_diagonal { let x_start = line.start.0; let y_start = line.start.1; let x_sign = if line.end.0 - line.start.0 > 0 {1} else {-1}; let y_sign = if line.end.1 - line.start.1 > 0 {1} else {-1}; let length = (line.start.0 - line.end.0).abs() + 1; for inc in 0..length { let x = x_start + inc * x_sign; let y = y_start + inc * y_sign; increment_grid_position(&mut grid, &mut overlap, x, y); } } } } } overlap } impl Day for Day5 { fn part1(&self, input: &str) -> String { let lines = parse_input(input); compute_overlap(&lines, false).to_string() } fn part2(&self, input: &str) -> String { let lines = parse_input(input); compute_overlap(&lines, true).to_string() } } #[cfg(test)] mod tests { use super::*; #[test] fn part1_test1() { assert_eq!(Day5{}.part1("0,9 -> 5,9 8,0 -> 0,8 9,4 -> 3,4 2,2 -> 2,1 7,0 -> 7,4 6,4 -> 2,0 0,9 -> 2,9 3,4 -> 1,4 0,0 -> 8,8 5,5 -> 8,2"), "5"); } #[test] fn part2_test1() { assert_eq!(Day5{}.part2("0,9 -> 5,9 8,0 -> 0,8 9,4 -> 3,4 2,2 -> 2,1 7,0 -> 7,4 6,4 -> 2,0 0,9 -> 2,9 3,4 -> 1,4 0,0 -> 8,8 5,5 -> 8,2"), "12"); } }
fn axial_classification(&self) -> AxialClassification { if self.start.0 == self.end.0 { AxialClassification::X } else if self.start.1 == self.end.1 { AxialClassification::Y } else { AxialClassification::NonAxial } }
function_block-full_function
[ { "content": "struct BingoBoard {\n\n values: Vec<Vec<i32>>,\n\n marked: Vec<Vec<bool>>\n\n}\n\n\n\nimpl BingoBoard {\n\n fn new(values: Vec<Vec<i32>>) -> BingoBoard {\n\n let mut marked = Vec::new();\n\n for a in &values {\n\n let mut row = Vec::new();\n\n for _ in a {\n\n row.push(false);\n\n }\n\n marked.push(row);\n\n }\n\n\n\n BingoBoard {\n\n values,\n\n marked\n\n }\n", "file_path": "src/day4.rs", "rank": 5, "score": 34418.05205837597 }, { "content": "struct BasicCost {}\n\n\n\nimpl Cost for BasicCost {\n\n fn cost(&mut self, a: i32, b: i32) -> i32 {\n\n (a - b).abs()\n\n }\n\n}\n\n\n", "file_path": "src/day7.rs", "rank": 6, "score": 34418.05205837597 }, { "content": "pub trait Day {\n\n fn part1(&self, input:&str) -> String;\n\n fn part2(&self, input:&str) -> String;\n\n}\n", "file_path": "src/day.rs", "rank": 7, "score": 34396.909695435796 }, { "content": "struct IncreasingCostCache {\n\n cache: Vec<i32>\n\n}\n\n\n\nimpl IncreasingCostCache {\n\n fn new() -> IncreasingCostCache {\n\n IncreasingCostCache {cache: vec![0]}\n\n }\n\n}\n\n\n\nimpl Cost for IncreasingCostCache {\n\n fn cost(&mut self, a: i32, b: i32) -> i32 {\n\n let dist = (a-b).abs() as usize;\n\n while self.cache.len() < dist + 1 {\n\n self.cache.push(self.cache[self.cache.len() - 1] + self.cache.len() as i32);\n\n }\n\n self.cache[dist]\n\n }\n\n}\n\n\n", "file_path": "src/day7.rs", "rank": 8, "score": 33314.37146030989 }, { "content": "fn parse_input(input: &str) -> (Vec<i32>, Vec<BingoBoard>) {\n\n let mut lines = input.lines();\n\n let calls : Vec<i32> = lines.next().unwrap().split(\",\").map(|i| i.parse().unwrap()).collect();\n\n lines.next();\n\n\n\n let mut boards = Vec::new();\n\n let mut board = Vec::new();\n\n while let Some(line) = lines.next() {\n\n if line.trim().is_empty() {\n\n boards.push(BingoBoard::new(board));\n\n board = Vec::new();\n\n } else {\n\n board.push(line.split_ascii_whitespace().map(|i| i.parse().unwrap()).collect());\n\n }\n\n }\n\n if !board.is_empty() {\n\n boards.push(BingoBoard::new(board));\n\n }\n\n\n\n (calls, boards)\n", "file_path": "src/day4.rs", "rank": 9, "score": 22930.220488385792 }, { "content": "fn compute_min_cost(input: &str, cost_method: &mut dyn Cost) -> String {\n\n let mut min = i32::MAX;\n\n let mut max = i32::MIN;\n\n let mut counts = HashMap::new();\n\n\n\n for pos in input.lines().next().unwrap().split(\",\").map(|n| n.parse().unwrap()) {\n\n min = cmp::min(min, pos);\n\n max = cmp::max(max, pos);\n\n match counts.get_mut(&pos) {\n\n None => {counts.insert(pos, 1);},\n\n Some(c) => *c += 1\n\n }\n\n }\n\n\n\n let mut best = i32::MAX;\n\n for i in min..max {\n\n let mut cost = 0;\n\n for (pos, count) in counts.iter() {\n\n cost += count * cost_method.cost(i, *pos);\n\n }\n", "file_path": "src/day7.rs", "rank": 14, "score": 12336.593177236653 }, { "content": "use crate::day::Day;\n\n\n\npub struct Day2 {}\n\n\n\nimpl Day for Day2 {\n\n fn part1(&self, input: &str) -> String {\n\n let mut x = 0;\n\n let mut y = 0;\n\n\n\n for line in input.lines() {\n\n let mut split = line.split(\" \");\n\n match split.next() {\n\n Some(\"forward\") => x += split.next().unwrap().parse::<i32>().unwrap(),\n\n Some(\"down\") => y += split.next().unwrap().parse::<i32>().unwrap(),\n\n Some(\"up\") => y -= split.next().unwrap().parse::<i32>().unwrap(),\n\n _ => panic!(\"invalid input\")\n\n }\n\n }\n\n\n\n (x * y).to_string()\n", "file_path": "src/day2.rs", "rank": 15, "score": 6.1722610446275095 }, { "content": "use crate::day::Day;\n\n\n\npub struct Day1 {}\n\n\n\nimpl Day for Day1 {\n\n fn part1(&self, input: &str) -> String {\n\n let mut increases = 0;\n\n let mut prev : Option<i32> = None;\n\n for n in input.lines() {\n\n let n = n.parse().unwrap();\n\n match prev {\n\n None => prev = Some(n),\n\n Some(p) => {\n\n if n > p {\n\n increases += 1;\n\n }\n\n prev = Some(n)\n\n }\n\n }\n\n }\n", "file_path": "src/day1.rs", "rank": 16, "score": 5.961795015341675 }, { "content": "use crate::day::Day;\n\n\n\npub struct Day3 {}\n\n\n\nimpl Day for Day3 {\n\n fn part1(&self, input: &str) -> String {\n\n let mut ones : Vec<u32> = Vec::new();\n\n let mut line_count = 0;\n\n\n\n for line in input.lines() {\n\n line_count += 1;\n\n for (i, c) in line.char_indices() {\n\n if c == '1' {\n\n while ones.len() < i + 1 {\n\n ones.push(0);\n\n }\n\n ones[i] += 1;\n\n }\n\n }\n\n }\n", "file_path": "src/day3.rs", "rank": 17, "score": 5.615420926124639 }, { "content": "use crate::day::Day;\n\n\n\npub struct Day4 {}\n\n\n", "file_path": "src/day4.rs", "rank": 18, "score": 4.095249148696858 }, { "content": "use crate::day::Day;\n\n\n\npub struct Day6 {}\n\n\n", "file_path": "src/day6.rs", "rank": 19, "score": 4.095249148696858 }, { "content": "use std::cmp;\n\nuse std::collections::HashMap;\n\nuse crate::day::Day;\n\n\n\npub struct Day7 {}\n\n\n", "file_path": "src/day7.rs", "rank": 20, "score": 4.05380590044519 }, { "content": " best = cmp::min(best, cost);\n\n }\n\n best.to_string()\n\n}\n\n\n\nimpl Day for Day7 {\n\n fn part1(&self, input: &str) -> String {\n\n compute_min_cost(input, &mut BasicCost{})\n\n }\n\n\n\n fn part2(&self, input: &str) -> String {\n\n compute_min_cost(input, &mut IncreasingCostCache::new())\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n", "file_path": "src/day7.rs", "rank": 21, "score": 3.9512836916706977 }, { "content": " }\n\n\n\n fn part2(&self, input: &str) -> String {\n\n let mut x = 0;\n\n let mut y = 0;\n\n let mut aim = 0;\n\n\n\n for line in input.lines() {\n\n let mut split = line.split(\" \");\n\n match split.next() {\n\n Some(\"forward\") => {\n\n let change = split.next().unwrap().parse::<i32>().unwrap();\n\n x += change;\n\n y += aim * change;\n\n },\n\n Some(\"down\") => aim += split.next().unwrap().parse::<i32>().unwrap(),\n\n Some(\"up\") => aim -= split.next().unwrap().parse::<i32>().unwrap(),\n\n _ => panic!(\"invalid input\")\n\n }\n\n }\n", "file_path": "src/day2.rs", "rank": 22, "score": 3.5866499756269965 }, { "content": " increases.to_string()\n\n }\n\n\n\n fn part2(&self, input: &str) -> String {\n\n let nums : Vec<i32> = input.lines().map(|l| l.parse().unwrap()).collect();\n\n let mut a = nums[0] + nums[1] + nums[2];\n\n let mut b = nums[1] + nums[2] + nums[3];\n\n\n\n let mut increases = 0;\n\n if b > a {\n\n increases += 1;\n\n }\n\n\n\n for i in 4..nums.len() {\n\n a -= nums[i - 4];\n\n a += nums[i - 1];\n\n b -= nums[i - 3];\n\n b += nums[i];\n\n\n\n if b > a {\n", "file_path": "src/day1.rs", "rank": 23, "score": 2.323998017695402 }, { "content": "// mod day20;\n\n// mod day21;\n\n// mod day22;\n\n// mod day23;\n\n// mod day24;\n\n// mod day25;\n\n\n\nuse std::fs;\n\nuse std::io;\n\nuse std::collections::HashMap;\n\nuse crate::day::Day;\n\n\n", "file_path": "src/main.rs", "rank": 24, "score": 2.2119353228492376 }, { "content": "\n\n let mut gamma = 0;\n\n let mut epsilon = 0;\n\n for n in ones {\n\n gamma <<= 1;\n\n epsilon <<= 1;\n\n\n\n if n > line_count / 2 {\n\n gamma += 1;\n\n } else {\n\n epsilon += 1;\n\n }\n\n }\n\n\n\n (gamma * epsilon).to_string()\n\n }\n\n\n\n fn part2(&self, input: &str) -> String {\n\n let mut oxygen_values : Vec<Vec<char>> = input.lines().map(|l| l.chars().collect()).collect();\n\n let mut evaluation_index = 0;\n", "file_path": "src/day3.rs", "rank": 25, "score": 1.6762930479641873 }, { "content": " }\n\n\n\n fn mark(&mut self, n: i32) {\n\n for (i, v) in self.values.iter().enumerate() {\n\n for (j, a) in v.iter().enumerate() {\n\n if *a == n {\n\n self.marked[i][j] = true;\n\n }\n\n }\n\n }\n\n }\n\n\n\n fn has_won(&self) -> bool {\n\n for v in &self.marked {\n\n if v.iter().all(|b| *b) {\n\n return true;\n\n }\n\n }\n\n for i in 0..self.marked[0].len() {\n\n let mut all_marked = true;\n", "file_path": "src/day4.rs", "rank": 26, "score": 1.4969552857600714 }, { "content": " increases += 1;\n\n }\n\n }\n\n\n\n increases.to_string()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn part1_test1() {\n\n assert_eq!(Day1{}.part1(\"199\n\n200\n\n208\n\n210\n\n200\n\n207\n", "file_path": "src/day1.rs", "rank": 27, "score": 1.484264047818144 }, { "content": " return (board.score() * call).to_string();\n\n }\n\n }\n\n }\n\n }\n\n\n\n String::new()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn part1_test1() {\n\n assert_eq!(Day4{}.part1(\"7,4,9,5,11,17,23,2,0,14,21,24,10,16,13,6,15,25,12,22,18,20,8,19,3,26,1\n\n\n\n22 13 17 11 0\n\n 8 2 23 4 24\n", "file_path": "src/day4.rs", "rank": 28, "score": 1.3843597368243206 }, { "content": "\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn part1_test1() {\n\n assert_eq!(Day6{}.part1(\"3,4,3,1,2\"), \"5934\");\n\n }\n\n\n\n #[test]\n\n fn part2_test1() {\n\n assert_eq!(Day6{}.part2(\"3,4,3,1,2\"), \"26984457539\");\n\n }\n\n}\n", "file_path": "src/day6.rs", "rank": 29, "score": 1.353981365004716 }, { "content": "\n\n (x * y).to_string()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn part1_test1() {\n\n assert_eq!(Day2{}.part1(\"forward 5\n\ndown 5\n\nforward 8\n\nup 3\n\ndown 8\n\nforward 2\"), \"150\");\n\n }\n\n\n\n #[test]\n", "file_path": "src/day2.rs", "rank": 30, "score": 1.353981365004716 }, { "content": " for v in &self.marked {\n\n if !v[i] {\n\n all_marked = false;\n\n break;\n\n }\n\n }\n\n if all_marked {\n\n return true;\n\n }\n\n }\n\n\n\n false\n\n }\n\n\n\n fn score(&self) -> i32 {\n\n self.marked.iter()\n\n .enumerate()\n\n .flat_map(|(i, v)| v.iter().enumerate().map(move |(j, val)| (i, j, val)))\n\n .filter(|(_,_,val)| !**val)\n\n .map(|(i,j,_)| self.values[i][j])\n\n .sum()\n\n }\n\n}\n\n\n", "file_path": "src/day4.rs", "rank": 31, "score": 1.3227913853962083 }, { "content": " oxygen_value += 1;\n\n }\n\n }\n\n\n\n let mut co2_value = 0;\n\n for c in &co2_values[0] {\n\n co2_value <<= 1;\n\n if *c == '1' {\n\n co2_value += 1;\n\n }\n\n }\n\n\n\n (oxygen_value * co2_value).to_string()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n", "file_path": "src/day3.rs", "rank": 32, "score": 1.2970561968276348 }, { "content": "}\n\n\n\nimpl Day for Day4 {\n\n fn part1(&self, input: &str) -> String {\n\n let (calls, mut boards) = parse_input(input);\n\n\n\n for call in calls {\n\n for board in &mut boards {\n\n board.mark(call);\n\n }\n\n\n\n for board in &boards {\n\n if board.has_won() {\n\n return (board.score() * call).to_string();\n\n }\n\n }\n\n }\n\n\n\n String::new()\n\n }\n", "file_path": "src/day4.rs", "rank": 33, "score": 1.2058762321588858 }, { "content": "mod day;\n\nmod day1;\n\nmod day2;\n\nmod day3;\n\nmod day4;\n\nmod day5;\n\nmod day6;\n\nmod day7;\n\n// mod day8;\n\n// mod day9;\n\n// mod day10;\n\n// mod day11;\n\n// mod day12;\n\n// mod day13;\n\n// mod day14;\n\n// mod day15;\n\n// mod day16;\n\n// mod day17;\n\n// mod day18;\n\n// mod day19;\n", "file_path": "src/main.rs", "rank": 34, "score": 1.0588061261801651 }, { "content": " counts = cycle_counts(counts);\n\n }\n\n\n\n counts.iter().sum::<u64>().to_string()\n\n }\n\n\n\n fn part2(&self, input: &str) -> String {\n\n let mut counts = vec!(0u64,0u64,0u64,0u64,0u64,0u64,0u64,0u64,0u64);\n\n\n\n for i in input.lines().next().unwrap().split(\",\") {\n\n counts[i.parse::<usize>().unwrap()] += 1;\n\n }\n\n\n\n for _ in 0..256 {\n\n counts = cycle_counts(counts);\n\n }\n\n\n\n counts.iter().sum::<u64>().to_string()\n\n }\n\n}\n", "file_path": "src/day6.rs", "rank": 35, "score": 0.956340086530151 }, { "content": " while oxygen_values.len() > 1 {\n\n let mut one_count = 0;\n\n let mut zero_count = 0;\n\n for v in &oxygen_values {\n\n if v[evaluation_index] == '1' {\n\n one_count += 1;\n\n } else {\n\n zero_count += 1;\n\n }\n\n }\n\n let desired_digit = if one_count >= zero_count {'1'} else {'0'};\n\n oxygen_values = oxygen_values.into_iter()\n\n .filter(|c| c[evaluation_index] == desired_digit)\n\n .collect();\n\n evaluation_index += 1;\n\n }\n\n\n\n let mut co2_values : Vec<Vec<char>> = input.lines().map(|l| l.chars().collect()).collect();\n\n let mut evaluation_index = 0;\n\n while co2_values.len() > 1 {\n", "file_path": "src/day3.rs", "rank": 36, "score": 0.8241336579925829 } ]
Rust
pongo-rs-derive/src/raw_index_options.rs
simoneromano96/pongo-rs
615e776990e4c0435efc1ff7b87aa0d39e3b9024
use darling::FromMeta; #[derive(Clone, Debug)] pub(crate) struct Document(mongodb::bson::Document); impl FromMeta for Document { fn from_string(value: &str) -> darling::Result<Self> { println!("{value:#?}"); let value = serde_json::from_str(value); match value { Ok(document) => Ok(Self(document)), Err(error) => Err(darling::Error::unsupported_shape(&format!("{error}"))), } } } #[derive(Clone, Debug)] pub(crate) struct Collation(mongodb::options::Collation); impl FromMeta for Collation { fn from_string(value: &str) -> darling::Result<Self> { println!("{value:#?}"); let value = serde_json::from_str(value); match value { Ok(document) => Ok(Self(document)), Err(error) => Err(darling::Error::unsupported_shape(&format!("{error}"))), } } } #[derive(Clone, Debug)] pub(crate) struct IndexOptions(mongodb::options::IndexOptions); impl FromMeta for IndexOptions { fn from_string(value: &str) -> darling::Result<Self> { println!("{value:#?}"); let value = serde_json::from_str(value); match value { Ok(document) => Ok(Self(document)), Err(error) => Err(darling::Error::unsupported_shape(&format!("{error}"))), } } } #[derive(Debug, Default, FromMeta)] pub(crate) struct RawIndexOptions { #[darling(default)] pub(crate) background: Option<bool>, #[darling(default)] pub(crate) expire_after: Option<u64>, #[darling(default)] pub(crate) name: Option<String>, #[darling(default)] pub(crate) sparse: Option<bool>, #[darling(default)] pub(crate) storage_engine: Option<Document>, #[darling(default)] pub(crate) unique: Option<bool>, #[darling(default)] pub(crate) version: Option<u32>, #[darling(default)] pub(crate) default_language: Option<String>, #[darling(default)] pub(crate) language_override: Option<String>, #[darling(default)] pub(crate) text_index_version: Option<u32>, #[darling(default)] pub(crate) weights: Option<Document>, #[darling(default)] pub(crate) sphere_2d_index_version: Option<u32>, #[darling(default)] pub(crate) bits: Option<u32>, #[darling(default)] pub(crate) max: Option<f64>, #[darling(default)] pub(crate) min: Option<f64>, #[darling(default)] pub(crate) bucket_size: Option<u32>, #[darling(default)] pub(crate) partial_filter_expression: Option<Document>, #[darling(default)] pub(crate) collation: Option<Collation>, #[darling(default)] pub(crate) wildcard_projection: Option<Document>, #[darling(default)] pub(crate) hidden: Option<bool>, } impl From<&RawIndexOptions> for mongodb::options::IndexOptions { fn from(raw_options: &RawIndexOptions) -> Self { let builder = mongodb::options::IndexOptions::builder(); builder .background(raw_options.background) .expire_after(raw_options.expire_after.map(std::time::Duration::from_secs)) .name(raw_options.name.clone()) .sparse(raw_options.sparse) .storage_engine( raw_options .storage_engine .clone() .map(|storage_engine| storage_engine.0), ) .unique(raw_options.unique) .version(raw_options.version.map(|version| match version { 0 => mongodb::options::IndexVersion::V0, 1 => mongodb::options::IndexVersion::V1, 2 => mongodb::options::IndexVersion::V2, _custom => mongodb::options::IndexVersion::Custom(_custom), })) .default_language(raw_options.default_language.clone()) .language_override(raw_options.language_override.clone()) .text_index_version(raw_options.text_index_version.map(|version| match version { 1 => mongodb::options::TextIndexVersion::V1, 2 => mongodb::options::TextIndexVersion::V2, 3 => mongodb::options::TextIndexVersion::V3, _custom => mongodb::options::TextIndexVersion::Custom(_custom), })) .weights(raw_options.weights.clone().map(|weights| weights.0)) .sphere_2d_index_version(raw_options.sphere_2d_index_version.map( |version| match version { 2 => mongodb::options::Sphere2DIndexVersion::V2, 3 => mongodb::options::Sphere2DIndexVersion::V3, _custom => mongodb::options::Sphere2DIndexVersion::Custom(_custom), }, )) .bits(raw_options.bits) .max(raw_options.max) .min(raw_options.min) .bucket_size(raw_options.bucket_size) .partial_filter_expression( raw_options .partial_filter_expression .clone() .map(|partial_filter_expression| partial_filter_expression.0), ) .collation(raw_options.collation.clone().map(|collation| collation.0)) .wildcard_projection( raw_options .wildcard_projection .clone() .map(|wildcard_projection| wildcard_projection.0), ) .hidden(raw_options.hidden) .build() } }
use darling::FromMeta; #[derive(Clone, Debug)] pub(crate) struct Document(mongodb::bson::Document); impl FromMeta for Document { fn from_string(value: &str) -> darling::Result<Self> { println!("{value:#?}"); let value = serde_json::from_str(value); match value { Ok(document) => Ok(Self(document)), Err(error) => Err(darling::Error::unsupported_shape(&format!("{error}"))), } } } #[derive(Clone, Debug)] pub(crate) struct Collation(mongodb::options::Collation); impl FromMeta for Collation { fn from_string(value: &str) -> darling::Result<Self> { println!("{value:#?}"); let value = serde_json::from_str(value);
t)] pub(crate) bits: Option<u32>, #[darling(default)] pub(crate) max: Option<f64>, #[darling(default)] pub(crate) min: Option<f64>, #[darling(default)] pub(crate) bucket_size: Option<u32>, #[darling(default)] pub(crate) partial_filter_expression: Option<Document>, #[darling(default)] pub(crate) collation: Option<Collation>, #[darling(default)] pub(crate) wildcard_projection: Option<Document>, #[darling(default)] pub(crate) hidden: Option<bool>, } impl From<&RawIndexOptions> for mongodb::options::IndexOptions { fn from(raw_options: &RawIndexOptions) -> Self { let builder = mongodb::options::IndexOptions::builder(); builder .background(raw_options.background) .expire_after(raw_options.expire_after.map(std::time::Duration::from_secs)) .name(raw_options.name.clone()) .sparse(raw_options.sparse) .storage_engine( raw_options .storage_engine .clone() .map(|storage_engine| storage_engine.0), ) .unique(raw_options.unique) .version(raw_options.version.map(|version| match version { 0 => mongodb::options::IndexVersion::V0, 1 => mongodb::options::IndexVersion::V1, 2 => mongodb::options::IndexVersion::V2, _custom => mongodb::options::IndexVersion::Custom(_custom), })) .default_language(raw_options.default_language.clone()) .language_override(raw_options.language_override.clone()) .text_index_version(raw_options.text_index_version.map(|version| match version { 1 => mongodb::options::TextIndexVersion::V1, 2 => mongodb::options::TextIndexVersion::V2, 3 => mongodb::options::TextIndexVersion::V3, _custom => mongodb::options::TextIndexVersion::Custom(_custom), })) .weights(raw_options.weights.clone().map(|weights| weights.0)) .sphere_2d_index_version(raw_options.sphere_2d_index_version.map( |version| match version { 2 => mongodb::options::Sphere2DIndexVersion::V2, 3 => mongodb::options::Sphere2DIndexVersion::V3, _custom => mongodb::options::Sphere2DIndexVersion::Custom(_custom), }, )) .bits(raw_options.bits) .max(raw_options.max) .min(raw_options.min) .bucket_size(raw_options.bucket_size) .partial_filter_expression( raw_options .partial_filter_expression .clone() .map(|partial_filter_expression| partial_filter_expression.0), ) .collation(raw_options.collation.clone().map(|collation| collation.0)) .wildcard_projection( raw_options .wildcard_projection .clone() .map(|wildcard_projection| wildcard_projection.0), ) .hidden(raw_options.hidden) .build() } }
match value { Ok(document) => Ok(Self(document)), Err(error) => Err(darling::Error::unsupported_shape(&format!("{error}"))), } } } #[derive(Clone, Debug)] pub(crate) struct IndexOptions(mongodb::options::IndexOptions); impl FromMeta for IndexOptions { fn from_string(value: &str) -> darling::Result<Self> { println!("{value:#?}"); let value = serde_json::from_str(value); match value { Ok(document) => Ok(Self(document)), Err(error) => Err(darling::Error::unsupported_shape(&format!("{error}"))), } } } #[derive(Debug, Default, FromMeta)] pub(crate) struct RawIndexOptions { #[darling(default)] pub(crate) background: Option<bool>, #[darling(default)] pub(crate) expire_after: Option<u64>, #[darling(default)] pub(crate) name: Option<String>, #[darling(default)] pub(crate) sparse: Option<bool>, #[darling(default)] pub(crate) storage_engine: Option<Document>, #[darling(default)] pub(crate) unique: Option<bool>, #[darling(default)] pub(crate) version: Option<u32>, #[darling(default)] pub(crate) default_language: Option<String>, #[darling(default)] pub(crate) language_override: Option<String>, #[darling(default)] pub(crate) text_index_version: Option<u32>, #[darling(default)] pub(crate) weights: Option<Document>, #[darling(default)] pub(crate) sphere_2d_index_version: Option<u32>, #[darling(defaul
random
[ { "content": "fn impl_model_derive_macro(ast: &syn::DeriveInput) -> TokenStream {\n\n let parsed: Model = FromDeriveInput::from_derive_input(ast).unwrap();\n\n println!(\"{parsed:#?}\");\n\n let name = &parsed.ident;\n\n\n\n let collection_name = match parsed.collection_options {\n\n Some(collection_options) if collection_options.name.is_some() => {\n\n collection_options.name.unwrap()\n\n }\n\n _ => name.to_string(),\n\n };\n\n let collection_name = format_ident!(\"{}\", collection_name);\n\n let indexes = parsed.indexes;\n\n\n\n let index_models: Vec<mongodb::IndexModel> = indexes.iter().map(|item| item.into()).collect();\n\n let vv: Vec<Vec<u8>> = index_models\n\n .iter()\n\n .map(|i| mongodb::bson::to_vec(&i).unwrap())\n\n .collect();\n\n\n", "file_path": "pongo-rs-derive/src/lib.rs", "rank": 0, "score": 35246.210318787445 }, { "content": "#[derive(Clone, Default, Debug, Serialize, Deserialize, Model)]\n\n#[model(collection_options(name = \"books\"))]\n\n#[model(index(key(title = 1), key(author = -1)))]\n\n#[model(index(key(title = -1), key(author = 1)))]\n\n#[model(index(key(title = 1), key(author = 1), options(background = true)))]\n\nstruct Book {\n\n /// The ID of the model.\n\n #[serde(rename = \"_id\", skip_serializing_if = \"Option::is_none\")]\n\n id: Option<ObjectId>,\n\n title: String,\n\n author: String,\n\n}\n\n\n\nasync fn make_connection() -> Result<Client, mongodb::error::Error> {\n\n // Parse a connection string into an options struct.\n\n let mut client_options = ClientOptions::parse(\"mongodb://root:example@localhost:27017\").await?;\n\n\n\n // Manually set an option.\n\n client_options.app_name = Some(\"My App\".to_string());\n\n\n\n // Get a handle to the deployment.\n\n let client = Client::with_options(client_options)?;\n\n\n\n Ok(client)\n\n}\n", "file_path": "example/src/main.rs", "rank": 1, "score": 29226.03063003064 }, { "content": "#[derive(Debug, FromDeriveInput)]\n\n#[darling(attributes(model), supports(struct_any))]\n\nstruct Model {\n\n ident: syn::Ident,\n\n attrs: Vec<syn::Attribute>,\n\n #[darling(default)]\n\n /// All collection options\n\n collection_options: Option<CollectionOptions>,\n\n #[darling(default)]\n\n #[darling(multiple)]\n\n #[darling(rename = \"index\")]\n\n /// Collection indexes\n\n indexes: Vec<RawIndexModel>,\n\n}\n\n\n", "file_path": "pongo-rs-derive/src/lib.rs", "rank": 2, "score": 27535.387951871446 }, { "content": "#[derive(Debug, Serialize, Deserialize)]\n\nstruct Book {\n\n title: String,\n\n author: String,\n\n}\n", "file_path": "pongo-rs/test/book_model.rs", "rank": 3, "score": 27533.20495239251 }, { "content": "#[derive(FromMeta, Debug)]\n\nstruct CollectionOptions {\n\n #[darling(default)]\n\n #[darling(map = \"CollectionOptions::lower_case\")]\n\n /// Collection name\n\n name: Option<String>,\n\n}\n\n\n\nimpl CollectionOptions {\n\n fn lower_case(arg: Option<String>) -> Option<String> {\n\n if let Some(name) = arg {\n\n let new_name = name.chars().enumerate().fold(\n\n String::with_capacity(name.capacity()),\n\n |mut acc, (index, character)| {\n\n if index == 0 {\n\n acc.push(character.to_ascii_lowercase());\n\n } else {\n\n acc.push(character);\n\n }\n\n acc\n\n },\n", "file_path": "pongo-rs-derive/src/lib.rs", "rank": 4, "score": 26785.4651847551 }, { "content": "#[derive(Debug, FromMeta)]\n\nstruct RawIndexModel {\n\n #[darling(default)]\n\n #[darling(multiple)]\n\n #[darling(rename = \"key\")]\n\n keys: Vec<HashMap<String, i32>>,\n\n #[darling(default)]\n\n options: Option<raw_index_options::RawIndexOptions>,\n\n}\n\n\n\nimpl From<&RawIndexModel> for mongodb::IndexModel {\n\n fn from(raw_index_model: &RawIndexModel) -> Self {\n\n let keys =\n\n raw_index_model\n\n .keys\n\n .iter()\n\n .fold(mongodb::bson::Document::new(), |mut acc, index| {\n\n index.iter().for_each(|(key, order)| {\n\n acc.extend([(key.clone(), order.into())]);\n\n });\n\n acc\n", "file_path": "pongo-rs-derive/src/lib.rs", "rank": 5, "score": 26093.807443554775 }, { "content": "#[proc_macro_derive(Model, attributes(model))]\n\npub fn model_derive(input: TokenStream) -> TokenStream {\n\n // Construct a representation of Rust code as a syntax tree\n\n // that we can manipulate\n\n let ast = syn::parse(input).unwrap();\n\n\n\n // Build the trait implementation\n\n impl_model_derive_macro(&ast)\n\n}\n\n\n", "file_path": "pongo-rs-derive/src/lib.rs", "rank": 6, "score": 21722.69614261131 }, { "content": "// use futures::stream::{TryStreamExt, StreamExt};\n\n// use mongodb::{bson::doc, options::FindOptions};\n\n// use mongodb::{options::ClientOptions, Client};\n\n// use serde::{Deserialize, Serialize};\n\n\n\n// #[derive(Debug, Serialize, Deserialize)]\n\n// struct Book {\n\n// title: String,\n\n// author: String,\n\n// }\n\nuse async_trait::async_trait;\n\nuse mongodb::{\n\n bson::{doc, oid::ObjectId, Document},\n\n options::FindOptions,\n\n results::InsertOneResult,\n\n Collection, Cursor, Database, IndexModel,\n\n};\n\nuse serde::{de::DeserializeOwned, Serialize};\n\n\n\npub type MongoError = mongodb::error::Error;\n\n\n\n#[async_trait]\n", "file_path": "pongo-rs/src/lib.rs", "rank": 11, "score": 4.52067411177782 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\n#[derive(Debug, Serialize, Deserialize)]\n", "file_path": "pongo-rs/test/book_model.rs", "rank": 12, "score": 4.380055498694053 }, { "content": "mod raw_index_options;\n\n\n\nuse std::collections::HashMap;\n\n\n\nuse darling::{FromDeriveInput, FromMeta};\n\nuse proc_macro::TokenStream;\n\nuse quote::{format_ident, quote};\n\n\n\n/// The raw model used for deriving indices on models.\n\n#[derive(Debug, FromMeta)]\n", "file_path": "pongo-rs-derive/src/lib.rs", "rank": 13, "score": 4.142775408585636 }, { "content": " .map(|bytes| mongodb::bson::from_reader(std::io::Cursor::new(bytes)).unwrap())\n\n .collect()\n\n }\n\n }\n\n };\n\n\n\n gen.into()\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::Model;\n\n use darling::FromDeriveInput;\n\n use syn::parse_str;\n\n\n\n #[test]\n\n fn test_specified_collection_name() {\n\n let input = r#\"\n\n #[derive(Model)]\n\n #[model(collection_options(name = \"test\"))]\n", "file_path": "pongo-rs-derive/src/lib.rs", "rank": 14, "score": 3.660462978377583 }, { "content": "use futures::TryStreamExt;\n\nuse mongodb::IndexModel;\n\nuse pongo_rs::prelude::*;\n\nuse pongo_rs_derive::*;\n\nuse serde::{Deserialize, Serialize};\n\n\n\n#[derive(Clone, Default, Debug, Serialize, Deserialize, Model)]\n\n#[model(collection_options(name = \"books\"))]\n\n#[model(index(key(title = 1), key(author = -1)))]\n\n#[model(index(key(title = -1), key(author = 1)))]\n\n#[model(index(key(title = 1), key(author = 1), options(background = true)))]\n", "file_path": "example/src/main.rs", "rank": 15, "score": 3.5907475367172097 }, { "content": "{\n\n collection.create_indexes(indexes, options).await?;\n\n Ok(())\n\n}\n\n\n\npub mod prelude {\n\n pub use super::{Model, MongoError};\n\n pub use async_trait::async_trait;\n\n pub use mongodb::{\n\n bson::{doc, oid::ObjectId, Document},\n\n options::ClientOptions,\n\n results::InsertOneResult,\n\n Client, Collection, Cursor, Database,\n\n };\n\n}\n", "file_path": "pongo-rs/src/lib.rs", "rank": 16, "score": 3.5904856841484847 }, { "content": " let gen = quote! {\n\n #[async_trait]\n\n impl Model for #name {\n\n const COLLECTION_NAME: &'static str = stringify!(#collection_name);\n\n\n\n /// Get the ID for this model instance.\n\n fn set_id(&mut self, id: ObjectId) {\n\n self.id = Some(id);\n\n }\n\n\n\n /// Set the ID for this model.\n\n fn get_id(&self) -> Option<ObjectId> {\n\n self.id\n\n }\n\n\n\n /// Get the vector of index models for this model.\n\n fn get_indexes() -> Vec<IndexModel> {\n\n let bytes = vec![#(vec![#(#vv),*]),*];\n\n bytes\n\n .iter()\n", "file_path": "pongo-rs-derive/src/lib.rs", "rank": 19, "score": 2.6510414066567813 }, { "content": " }\n\n\n\n async fn find_by_id(db: &Database, id: &ObjectId) -> Result<Option<Self>, MongoError> {\n\n let typed_collection = Self::get_collection(db);\n\n let filter = doc! { \"_id\": id };\n\n typed_collection.find_one(filter, None).await\n\n }\n\n\n\n /// Find all instances of this model matching the given query.\n\n async fn find<F, O>(db: &Database, filter: F, options: O) -> Result<Cursor<Self>, MongoError>\n\n where\n\n F: Into<Option<Document>> + Send,\n\n O: Into<Option<FindOptions>> + Send,\n\n {\n\n let typed_collection = Self::get_collection(db);\n\n typed_collection.find(filter, options).await\n\n }\n\n\n\n async fn save(&self, db: &Database) -> Result<(), MongoError> {\n\n match self.get_id() {\n", "file_path": "pongo-rs/src/lib.rs", "rank": 20, "score": 2.559606696609248 }, { "content": "\n\n Apache License\n\n Version 2.0, January 2004\n\n http://www.apache.org/licenses/\n\n\n\n TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION\n\n\n\n 1. Definitions.\n\n\n\n \"License\" shall mean the terms and conditions for use, reproduction,\n\n and distribution as defined by Sections 1 through 9 of this document.\n\n\n\n \"Licensor\" shall mean the copyright owner or entity authorized by\n\n the copyright owner that is granting the License.\n\n\n\n \"Legal Entity\" shall mean the union of the acting entity and all\n\n other entities that control, are controlled by, or are under common\n\n control with that entity. For the purposes of this definition,\n\n \"control\" means (i) the power, direct or indirect, to cause the\n\n direction or management of such entity, whether by contract or\n\n otherwise, or (ii) ownership of fifty percent (50%) or more of the\n\n outstanding shares, or (iii) beneficial ownership of such entity.\n\n\n\n \"You\" (or \"Your\") shall mean an individual or Legal Entity\n\n exercising permissions granted by this License.\n\n\n\n \"Source\" form shall mean the preferred form for making modifications,\n\n including but not limited to software source code, documentation\n\n source, and configuration files.\n\n\n\n \"Object\" form shall mean any form resulting from mechanical\n\n transformation or translation of a Source form, including but\n\n not limited to compiled object code, generated documentation,\n\n and conversions to other media types.\n\n\n\n \"Work\" shall mean the work of authorship, whether in Source or\n\n Object form, made available under the License, as indicated by a\n\n copyright notice that is included in or attached to the work\n", "file_path": "LICENSE.md", "rank": 21, "score": 2.4402146015659887 }, { "content": "pongo-rs\n\n\n\nAnother Mongo ODM\n\n\n\n\"Pongo\" in italian is a modelling paste, so you use \"Pongo\" to model collections.\n", "file_path": "README.md", "rank": 24, "score": 1.9502729650909345 }, { "content": " struct Book {\n\n title: String,\n\n author: String,\n\n }\n\n \"#;\n\n\n\n let parsed = parse_str(input).unwrap();\n\n let parsed: Model = FromDeriveInput::from_derive_input(&parsed).unwrap();\n\n\n\n assert_eq!(parsed.collection_options.unwrap().name.unwrap(), \"test\");\n\n }\n\n\n\n #[test]\n\n fn test_default_collection_name() {\n\n let input = r#\"\n\n #[derive(Model)]\n\n struct Book {\n\n title: String,\n\n author: String,\n\n }\n\n \"#;\n\n\n\n let parsed = parse_str(input).unwrap();\n\n let parsed: Model = FromDeriveInput::from_derive_input(&parsed).unwrap();\n\n\n\n assert!(parsed.collection_options.is_none());\n\n }\n\n}\n", "file_path": "pongo-rs-derive/src/lib.rs", "rank": 25, "score": 1.840280951883255 }, { "content": " stating that You changed the files; and\n\n\n\n (c) You must retain, in the Source form of any Derivative Works\n\n that You distribute, all copyright, patent, trademark, and\n\n attribution notices from the Source form of the Work,\n\n excluding those notices that do not pertain to any part of\n\n the Derivative Works; and\n\n\n\n (d) If the Work includes a \"NOTICE\" text file as part of its\n\n distribution, then any Derivative Works that You distribute must\n\n include a readable copy of the attribution notices contained\n\n within such NOTICE file, excluding those notices that do not\n\n pertain to any part of the Derivative Works, in at least one\n\n of the following places: within a NOTICE text file distributed\n\n as part of the Derivative Works; within the Source form or\n\n documentation, if provided along with the Derivative Works; or,\n\n within a display generated by the Derivative Works, if and\n\n wherever such third-party notices normally appear. The contents\n\n of the NOTICE file are for informational purposes only and\n\n do not modify the License. You may add Your own attribution\n\n notices within Derivative Works that You distribute, alongside\n\n or as an addendum to the NOTICE text from the Work, provided\n\n that such additional attribution notices cannot be construed\n\n as modifying the License.\n\n\n\n You may add Your own copyright statement to Your modifications and\n\n may provide additional or different license terms and conditions\n\n for use, reproduction, or distribution of Your modifications, or\n\n for any such Derivative Works as a whole, provided Your use,\n\n reproduction, and distribution of the Work otherwise complies with\n", "file_path": "LICENSE.md", "rank": 26, "score": 1.740525155515941 }, { "content": " });\n\n let index_builder = mongodb::IndexModel::builder();\n\n let index_builder = index_builder.keys(keys);\n\n let index_builder = match &raw_index_model.options {\n\n Some(options) => index_builder.options(Some(options.into())),\n\n _ => index_builder.options(None),\n\n };\n\n \n\n index_builder.build()\n\n }\n\n}\n\n\n", "file_path": "pongo-rs-derive/src/lib.rs", "rank": 27, "score": 1.6145694039069567 }, { "content": " the conditions stated in this License.\n\n\n\n 5. Submission of Contributions. Unless You explicitly state otherwise,\n\n any Contribution intentionally submitted for inclusion in the Work\n\n by You to the Licensor shall be under the terms and conditions of\n\n this License, without any additional terms or conditions.\n\n Notwithstanding the above, nothing herein shall supersede or modify\n\n the terms of any separate license agreement you may have executed\n\n with Licensor regarding such Contributions.\n\n\n\n 6. Trademarks. This License does not grant permission to use the trade\n\n names, trademarks, service marks, or product names of the Licensor,\n\n except as required for reasonable and customary use in describing the\n\n origin of the Work and reproducing the content of the NOTICE file.\n\n\n\n 7. Disclaimer of Warranty. Unless required by applicable law or\n\n agreed to in writing, Licensor provides the Work (and each\n\n Contributor provides its Contributions) on an \"AS IS\" BASIS,\n\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or\n\n implied, including, without limitation, any warranties or conditions\n\n of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A\n\n PARTICULAR PURPOSE. You are solely responsible for determining the\n\n appropriateness of using or redistributing the Work and assume any\n", "file_path": "LICENSE.md", "rank": 28, "score": 1.5386058484490486 }, { "content": "\n\n let insert_result = Book::insert_one(&db, &instance).await.unwrap();\n\n\n\n let id = insert_result.inserted_id;\n\n\n\n match id {\n\n mongodb::bson::Bson::ObjectId(id) => {\n\n let book = Book::find_by_id(&db, &id).await;\n\n println!(\"Created book: {book:#?}\");\n\n }\n\n _ => {}\n\n }\n\n\n\n let books: Vec<Book> = Book::find(&db, None, None)\n\n .await\n\n .unwrap()\n\n .try_collect()\n\n .await\n\n .unwrap();\n\n\n", "file_path": "example/src/main.rs", "rank": 30, "score": 1.3499853426308746 }, { "content": " Some(_) => {\n\n let mut document = mongodb::bson::to_document(&self).unwrap();\n\n println!(\"{:#?}\", document);\n\n if let Some(id) = document.remove(\"_id\") {\n\n let update_query = doc! { \"$set\": document };\n\n let typed_collection = Self::get_collection(db);\n\n typed_collection\n\n .update_one(doc! { \"_id\": id }, update_query, None)\n\n .await?;\n\n }\n\n }\n\n None => {\n\n Self::insert_one(db, self).await?;\n\n }\n\n };\n\n Ok(())\n\n }\n\n\n\n /// Get the vector of index models for this model.\n\n fn get_indexes() -> Vec<IndexModel> {\n", "file_path": "pongo-rs/src/lib.rs", "rank": 31, "score": 1.3123769304615722 }, { "content": " risks associated with Your exercise of permissions under this License.\n\n\n\n 8. Limitation of Liability. In no event and under no legal theory,\n\n whether in tort (including negligence), contract, or otherwise,\n\n unless required by applicable law (such as deliberate and grossly\n\n negligent acts) or agreed to in writing, shall any Contributor be\n\n liable to You for damages, including any direct, indirect, special,\n\n incidental, or consequential damages of any character arising as a\n\n result of this License or out of the use or inability to use the\n\n Work (including but not limited to damages for loss of goodwill,\n\n work stoppage, computer failure or malfunction, or any and all\n\n other commercial damages or losses), even if such Contributor\n\n has been advised of the possibility of such damages.\n\n\n\n 9. Accepting Warranty or Additional Liability. While redistributing\n\n the Work or Derivative Works thereof, You may choose to offer,\n\n and charge a fee for, acceptance of support, warranty, indemnity,\n\n or other liability obligations and/or rights consistent with this\n\n License. However, in accepting such obligations, You may act only\n\n on Your own behalf and on Your sole responsibility, not on behalf\n\n of any other Contributor, and only if You agree to indemnify,\n\n defend, and hold each Contributor harmless for any liability\n\n incurred by, or claims asserted against, such Contributor by reason\n\n of your accepting any such warranty or additional liability.\n\n\n\n END OF TERMS AND CONDITIONS\n\n\n", "file_path": "LICENSE.md", "rank": 32, "score": 1.1674938347339632 }, { "content": " APPENDIX: How to apply the Apache License to your work.\n\n\n\n To apply the Apache License to your work, attach the following\n\n boilerplate notice, with the fields enclosed by brackets \"{}\"\n\n replaced with your own identifying information. (Don't include\n\n the brackets!) The text should be enclosed in the appropriate\n\n comment syntax for the file format. We also recommend that a\n\n file or class name and description of purpose be included on the\n\n same \"printed page\" as the copyright notice for easier\n\n identification within third-party archives.\n\n\n\n Copyright {yyyy} {name of copyright owner}\n\n\n\n Licensed under the Apache License, Version 2.0 (the \"License\");\n\n you may not use this file except in compliance with the License.\n\n You may obtain a copy of the License at\n\n\n\n http://www.apache.org/licenses/LICENSE-2.0\n\n\n\n Unless required by applicable law or agreed to in writing, software\n\n distributed under the License is distributed on an \"AS IS\" BASIS,\n\n WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n See the License for the specific language governing permissions and\n\n limitations under the License.\n", "file_path": "LICENSE.md", "rank": 34, "score": 0.8181355154254897 }, { "content": " subsequently incorporated within the Work.\n\n\n\n 2. Grant of Copyright License. Subject to the terms and conditions of\n\n this License, each Contributor hereby grants to You a perpetual,\n\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n\n copyright license to reproduce, prepare Derivative Works of,\n\n publicly display, publicly perform, sublicense, and distribute the\n\n Work and such Derivative Works in Source or Object form.\n\n\n\n 3. Grant of Patent License. Subject to the terms and conditions of\n\n this License, each Contributor hereby grants to You a perpetual,\n\n worldwide, non-exclusive, no-charge, royalty-free, irrevocable\n\n (except as stated in this section) patent license to make, have made,\n\n use, offer to sell, sell, import, and otherwise transfer the Work,\n\n where such license applies only to those patent claims licensable\n\n by such Contributor that are necessarily infringed by their\n\n Contribution(s) alone or by combination of their Contribution(s)\n\n with the Work to which such Contribution(s) was submitted. If You\n\n institute patent litigation against any entity (including a\n\n cross-claim or counterclaim in a lawsuit) alleging that the Work\n\n or a Contribution incorporated within the Work constitutes direct\n\n or contributory patent infringement, then any patent licenses\n\n granted to You under this License for that Work shall terminate\n\n as of the date such litigation is filed.\n\n\n\n 4. Redistribution. You may reproduce and distribute copies of the\n\n Work or Derivative Works thereof in any medium, with or without\n\n modifications, and in Source or Object form, provided that You\n\n meet the following conditions:\n\n\n\n (a) You must give any other recipients of the Work or\n\n Derivative Works a copy of this License; and\n\n\n\n (b) You must cause any modified files to carry prominent notices\n", "file_path": "LICENSE.md", "rank": 35, "score": 0.5808552917701175 } ]
Rust
src/python_module.rs
gchers/fbleau
fe66cc859efaba47526cb864fbe8438bfd6bdc35
use numpy::*; use pyo3::prelude::*; use pyo3::types::PyDict; use crate::estimates::*; use crate::fbleau_estimation::{run_fbleau, Logger}; use crate::Label; #[pymodule(fbleau)] fn pyfbleau(_py: Python, m: &PyModule) -> PyResult<()> { #[pyfn(m, "run_fbleau")] fn run_fbleau_py( py: Python, train_x: &PyArray2<f64>, train_y: &PyArray1<Label>, test_x: &PyArray2<f64>, test_y: &PyArray1<Label>, estimate: &str, knn_strategy: Option<&str>, distance: Option<String>, log_errors: bool, log_individual_errors: bool, delta: Option<f64>, qstop: Option<usize>, absolute: bool, scale: bool, ) -> PyResult<PyObject> { let train_x = unsafe { train_x.as_array().to_owned() }; let train_y = unsafe { train_y.as_array().to_owned() }; let test_x = unsafe { test_x.as_array().to_owned() }; let test_y = unsafe { test_y.as_array().to_owned() }; let estimate = match estimate { "nn" => Estimate::NN, "knn" => Estimate::KNN, "frequentist" => Estimate::Frequentist, "nn-bound" => Estimate::NNBound, _ => { unimplemented!() } }; let knn_strategy = if let Some(strategy) = knn_strategy { match strategy { "ln" => Some(KNNStrategy::Ln), "log10" => Some(KNNStrategy::Log10), _ => { unimplemented!() } } } else { None }; let mut error_logger = if log_errors { Some(Logger::LogVec(vec![])) } else { None }; let mut individual_error_logger = if log_individual_errors { Some(Logger::LogVec(vec![])) } else { None }; let (min_error, last_error, random_guessing) = run_fbleau( train_x, train_y, test_x, test_y, estimate, knn_strategy, distance, &mut error_logger, &mut individual_error_logger, delta, qstop, absolute, scale, ); let res = PyDict::new(py); res.set_item("min-estimate", min_error)?; res.set_item("last-estimate", last_error)?; res.set_item("random-guessing", random_guessing)?; res.set_item( "estimates", if let Some(Logger::LogVec(v)) = error_logger { v } else { vec![] }, )?; res.set_item( "min-individual-errors", if let Some(Logger::LogVec(v)) = individual_error_logger { v } else { vec![] }, )?; Ok(res.into()) } Ok(()) }
use numpy::*; use pyo3::prelude::*; use pyo3::types::PyDict; use crate::estimates::*; use crate::fbleau_estimation::{run_fbleau, Logger}; use crate::Label; #[pymodule(fbleau)] fn pyfbleau(_py: Python, m: &PyModule) -> PyResult<()> { #[pyfn(m, "run_fbleau")] fn run_fbleau_py( py: Python, train_x: &PyArray2<f64>, train_y: &PyArray1<Label>, test_x: &PyArray2<f64>, test_y: &PyArray1<Label>, estimate: &str, knn_strategy: Option<&str>, distance: Option<String>, log_errors: bool, log_individual_errors: bool, delta: Option<f64>, qstop: Option<usize>, absolute: bool, scale: bool, ) -> PyResult<PyObject> { let train_x = unsafe { train_x.as_array().to_owned() }; let train_y = unsafe { train_y.as_array().to_owned() }; let test_x = unsafe { test_x.as_array().to_owned() }; let test_y = unsafe { test_y.as_array().to_owned() }; let estimate = match estimate { "nn" => Estimate::NN, "knn" => Estimate::KNN, "frequentist" => Estimate::Frequentist, "nn-bound" => Estimate::NNBound, _ => { unimplemented!() } }; let knn_strategy = if let Some(strategy) = knn_strategy { match strategy { "ln" => Some(KNNStrategy::Ln), "log10" => Some(KNNStrategy::Log10), _ => { unimplemented!() } } } else { None }; let mut error_logger = if log_errors { Some(Logger::LogVec(vec![])) } else { None }; let mut individual_error_logger = if log_individual_errors { Some(Logger::LogVec(vec![])) } else { None }; let (min_error, last_error, random_guessing) = run_fbleau( train_x, train_y, test_x, test_y, estimate, knn_strategy, distance, &mut error_logger, &mut individual_error_logger, delta, qstop, absolute, scale, ); let res = PyDict::new(py); res.set_item("min-estimate", min_error)?; res.set_item("last-estimate", last_error)?; res.set_item("random-guessing", random_guessing)?; res.set_item( "
estimates", if let Some(Logger::LogVec(v)) = error_logger { v } else { vec![] }, )?; res.set_item( "min-individual-errors", if let Some(Logger::LogVec(v)) = individual_error_logger { v } else { vec![] }, )?; Ok(res.into()) } Ok(()) }
function_block-function_prefixed
[ { "content": "pub fn knn_strategy(strategy: KNNStrategy) -> Box<dyn Fn(usize) -> usize> {\n\n match strategy {\n\n KNNStrategy::NN => Box::new(move |_| 1),\n\n KNNStrategy::FixedK(k) => Box::new(move |_| k),\n\n KNNStrategy::Ln => Box::new(move |n| {\n\n next_odd(if n != 0 {\n\n (n as f64).ln().ceil() as usize\n\n } else {\n\n 1\n\n })\n\n }),\n\n KNNStrategy::Log10 => Box::new(move |n| {\n\n next_odd(if n != 0 {\n\n (n as f64).log10().ceil() as usize\n\n } else {\n\n 1\n\n })\n\n }),\n\n KNNStrategy::Custom(custom) => custom,\n\n }\n\n}\n\n\n", "file_path": "src/estimates/knn_utils.rs", "rank": 1, "score": 104196.39092354353 }, { "content": "fn bench_knn_forward(b: &mut Bencher) {\n\n let (train_x, train_y, test_x, test_y) = load_boston();\n\n let n_train = train_x.nrows();\n\n\n\n b.iter(|| {\n\n let mut knn = KNNEstimator::new(\n\n &test_x.view(),\n\n &test_y.view(),\n\n n_train,\n\n euclidean_distance,\n\n KNNStrategy::Ln,\n\n );\n\n for (n, (x, y)) in train_x.outer_iter().zip(train_y.iter()).enumerate() {\n\n let k = if n != 0 {\n\n let k = (n as f64).ln().ceil() as usize;\n\n if k % 2 == 0 {\n\n k + 1\n\n } else {\n\n k\n\n }\n", "file_path": "benches/knn.rs", "rank": 2, "score": 97972.58864728869 }, { "content": "fn bench_knn_init(b: &mut Bencher) {\n\n let (train_x, train_y, test_x, test_y) = load_boston();\n\n let max_n = train_x.nrows();\n\n\n\n b.iter(|| {\n\n let _knn = KNNEstimator::from_data(\n\n &train_x.view(),\n\n &train_y.view(),\n\n &test_x.view(),\n\n &test_y.view(),\n\n max_n,\n\n euclidean_distance,\n\n KNNStrategy::Ln,\n\n );\n\n })\n\n}\n\n\n", "file_path": "benches/knn.rs", "rank": 3, "score": 97972.58864728869 }, { "content": "/// Forward strategy for estimation.\n\n///\n\n/// Estimates security measures with a forward strategy: the estimator\n\n/// is trained with an increasing number of examples, and its estimate\n\n/// is progressively logged.\n\n/// This function returns:\n\n/// - smallest estimate\n\n/// - final estimate (i.e., the estimate when all the training data was\n\n/// available).\n\nfn run_forward_strategy<E>(\n\n mut estimator: E,\n\n mut convergence_checker: Option<ForwardChecker>,\n\n error_logger: &mut Option<Logger<f64>>,\n\n individual_error_logger: &mut Option<Logger<bool>>,\n\n train_x: Array2<f64>,\n\n train_y: Array1<Label>,\n\n) -> (f64, f64)\n\nwhere\n\n E: BayesEstimator,\n\n{\n\n // Init logfile, if specified.\n\n if let Some(ref mut logger) = error_logger {\n\n if let Logger::LogFile(file) = logger {\n\n writeln!(file, \"n, error-count, estimate\").expect(\"Could not write to log file\");\n\n }\n\n }\n\n\n\n // We keep track both of the minimum and of the last estimate.\n\n let mut min_error = 1.0;\n", "file_path": "src/fbleau_estimation.rs", "rank": 4, "score": 76572.40479050002 }, { "content": "/// Prepares everything for running F-BLEAU, and runs a forward\n\n/// estimation strategy.\n\npub fn run_fbleau(\n\n train_x: Array2<f64>,\n\n train_y: Array1<Label>,\n\n test_x: Array2<f64>,\n\n test_y: Array1<Label>,\n\n estimate: Estimate,\n\n knn_strategy: Option<KNNStrategy>,\n\n distance: Option<String>,\n\n error_logger: &mut Option<Logger<f64>>,\n\n individual_error_logger: &mut Option<Logger<bool>>,\n\n delta: Option<f64>,\n\n qstop: Option<usize>,\n\n absolute: bool,\n\n scale: bool,\n\n) -> (f64, f64, f64) {\n\n // Check label's indexes, and scale data if required.\n\n let (train_x, train_y, test_x, test_y, nlabels) =\n\n prepare_data(train_x, train_y, test_x, test_y, scale);\n\n\n\n // Convergence with (delta, q)-convergence checker.\n", "file_path": "src/fbleau_estimation.rs", "rank": 5, "score": 75654.75500253041 }, { "content": "/// Computes the NN bound derived from Cover&Hart, given\n\n/// the error and the number of labels.\n\npub fn nn_bound(error: f64, nlabels: usize) -> f64 {\n\n let nl = nlabels as f64;\n\n // Computing: (L-1)/L * (1 - (1 - L/(L-1)*error).sqrt())\n\n // with error = min(error, rg).\n\n let rg = (nl - 1.) / nl;\n\n match error {\n\n e if e < rg => rg * (1. - (1. - nl / (nl - 1.) * error).sqrt()),\n\n _ => rg,\n\n }\n\n}\n\n\n", "file_path": "src/estimates/knn_utils.rs", "rank": 6, "score": 72895.64772376219 }, { "content": "/// Returns relative or absolute change between two measurements.\n\nfn change(a: f64, b: f64, relative: bool) -> f64 {\n\n if relative {\n\n (a - b).abs() / b\n\n } else {\n\n (a - b).abs()\n\n }\n\n}\n\n\n\n/// `ForwardChecker` should be used for checking convergence of\n\n/// estimates in a \"forward\" direction (i.e., when one training example\n\n/// is _added_ each time).\n\n///\n\n/// It allows checking for relative or absolute convergence:\n\n/// we declare convergence if an estimate did not change (in relative\n\n/// or absolute sense) more than some `delta` for at least `q` steps.\n\n/// `ForwardChecker` allows measuring `delta`-convergence for several\n\n/// values of `delta`.\n\npub struct ForwardChecker {\n\n // A double-ended queue keeping track of all the estimates for which\n\n // next_delta-convergence happens.\n", "file_path": "src/estimates/convergence.rs", "rank": 7, "score": 72704.69769344572 }, { "content": "/// Returns `n` if `n` is odd, otherwise `n+1`.\n\nfn next_odd(n: usize) -> usize {\n\n match n % 2 {\n\n 0 => n + 1,\n\n _ => n,\n\n }\n\n}\n", "file_path": "src/estimates/knn_utils.rs", "rank": 8, "score": 67926.30236682115 }, { "content": "/// Scales columns' values in [0,1] with min-max scaling.\n\npub fn scale01(matrix: &mut Array2<f64>) {\n\n let mut max = Array::ones(matrix.ncols()) * -f64::INFINITY;\n\n let mut min = Array::ones(matrix.ncols()) * f64::INFINITY;\n\n\n\n for row in matrix.outer_iter() {\n\n for i in 0..row.len() {\n\n if min[i] > row[i] {\n\n min[i] = row[i];\n\n }\n\n if max[i] < row[i] {\n\n max[i] = row[i];\n\n }\n\n }\n\n }\n\n\n\n for mut row in matrix.outer_iter_mut() {\n\n for i in 0..row.len() {\n\n row[i] = (row[i] - min[i]) / (max[i] - min[i]);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 9, "score": 62890.37731768688 }, { "content": "/// Returns the Levenshtein distance between two vectors of f64 values.\n\npub fn levenshtein_distance(v1: &ArrayView1<f64>, v2: &ArrayView1<f64>) -> f64 {\n\n generic_levenshtein(v1, v2) as f64\n\n}\n\n\n\n/// Strategies for selecting `k` for k-NN given the number of\n\n/// training examples `n`.\n\n#[derive(Deserialize)]\n\n#[serde(rename_all = \"lowercase\")]\n\npub enum KNNStrategy {\n\n Ln,\n\n Log10,\n\n // We do not want to parse the following ones from the command line.\n\n // So we ask serde to skip them.\n\n #[serde(skip)]\n\n NN,\n\n #[serde(skip)]\n\n FixedK(usize),\n\n #[serde(skip)]\n\n Custom(Box<dyn Fn(usize) -> usize>),\n\n}\n\n\n", "file_path": "src/estimates/knn_utils.rs", "rank": 10, "score": 62713.374193365 }, { "content": "/// Returns the Euclidean distance between two vectors of f64 values.\n\npub fn euclidean_distance(v1: &ArrayView1<f64>, v2: &ArrayView1<f64>) -> f64 {\n\n v1.iter()\n\n .zip(v2.iter())\n\n .map(|(x, y)| (x - y).powi(2))\n\n .sum::<f64>()\n\n .sqrt()\n\n}\n\n\n", "file_path": "src/estimates/knn_utils.rs", "rank": 11, "score": 62713.374193365 }, { "content": "/// Returns true if all the elements of the array\n\n/// can be converted into integers without loss.\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # extern crate ndarray;\n\n/// # extern crate fbleau;\n\n/// # use ndarray::prelude::*;\n\n/// # use fbleau::utils::has_integer_support;\n\n/// assert!(has_integer_support(&array![[3.], [6.], [0.], [-4.]]));\n\n/// assert!(has_integer_support(&array![3., 6., 0., -4.]));\n\n/// assert!(!has_integer_support(&array![2., 5.5, 3.]));\n\n/// ```\n\npub fn has_integer_support<D: ndarray::Dimension>(v: &Array<f64, D>) -> bool {\n\n for x in v.iter() {\n\n if x.fract() != 0. {\n\n return false;\n\n }\n\n }\n\n true\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 12, "score": 50270.24994299132 }, { "content": " }\n\n }\n\n }\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl BayesEstimator for FrequentistEstimator {\n\n /// Adds a new training example.\n\n fn add_example(&mut self, x: &ArrayView1<f64>, y: Label) -> Result<(), ()> {\n\n let x = self.array_to_index.map(*x);\n\n self.train_x.push(x);\n\n self.train_y.push(y);\n\n\n\n let mut old_priors_pred = match self.priors_count.predict() {\n\n Some(pred) => pred,\n\n None => {\n\n self.add_first_example(x, y);\n\n return Ok(());\n", "file_path": "src/estimates/frequentist.rs", "rank": 13, "score": 46977.777598558925 }, { "content": "//! Frequentist Bayes risks estimate for discrete secret and output space.\n\nuse itertools::Itertools;\n\nuse ndarray::*;\n\nuse ordered_float::OrderedFloat;\n\nuse std::collections::HashMap;\n\n\n\nuse crate::estimates::{some_or_error, BayesEstimator};\n\nuse crate::Label;\n\n\n\n// Type of the elements of a feature vector.\n", "file_path": "src/estimates/frequentist.rs", "rank": 14, "score": 46975.22929227398 }, { "content": " assert_eq!(freq.joint_count.get(&1).unwrap().predict().unwrap(), 2);\n\n assert_eq!(freq.joint_count.get(&2).unwrap().predict().unwrap(), 1);\n\n assert!(freq.joint_count.get(&8).unwrap().predict().is_none());\n\n\n\n //// Estimate.\n\n assert_eq!(freq.error_count, 3);\n\n assert_eq!(freq.get_error(), 3. / 7.);\n\n }\n\n\n\n #[test]\n\n fn frequentist_estimate_backward() {\n\n let n_labels = 3;\n\n let train_x = array![0, 0, 0, 1, 1, 2, 2, 2, 2, 2, 2, 6];\n\n let train_y = array![0, 1, 1, 2, 2, 1, 2, 2, 0, 1, 1, 1];\n\n\n\n let test_x = array![0, 0, 1, 2, 2, 8, 8];\n\n let test_y = array![1, 1, 1, 1, 2, 1, 0];\n\n\n\n let mut freq = FrequentistEstimator::from_data(\n\n n_labels,\n", "file_path": "src/estimates/frequentist.rs", "rank": 15, "score": 46974.82175440298 }, { "content": " }\n\n\n\n if self.count[new_pred] == 0 {\n\n // We have no more information.\n\n self.prediction = None;\n\n return true;\n\n }\n\n\n\n if self.prediction != Some(new_pred) {\n\n self.prediction = Some(new_pred);\n\n return true;\n\n }\n\n }\n\n false\n\n }\n\n}\n\n\n\n/// Keeps track of the frequentist estimate, allowing to reduce the\n\n/// size of training data.\n\npub struct FrequentistEstimator {\n", "file_path": "src/estimates/frequentist.rs", "rank": 16, "score": 46974.54428254273 }, { "content": " *next_id - 1\n\n });\n\n *id\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn frequentist_init() {\n\n let n_labels = 3;\n\n let train_x = array![0, 0, 0, 1, 1, 2, 2, 2, 2, 2, 2, 6];\n\n let train_y = array![0, 1, 1, 2, 2, 1, 2, 2, 0, 1, 1, 1];\n\n\n\n let test_x = array![0, 0, 1, 2, 2, 8, 8];\n\n let test_y = array![1, 1, 1, 1, 2, 1, 0];\n\n\n\n let freq = FrequentistEstimator::from_data(\n", "file_path": "src/estimates/frequentist.rs", "rank": 17, "score": 46973.95403611167 }, { "content": " [1.],\n\n [1.],\n\n [2.],\n\n [2.],\n\n [2.],\n\n [2.],\n\n [2.],\n\n [2.],\n\n [6.]\n\n ];\n\n let train_y = array![0, 1, 1, 2, 2, 1, 2, 2, 0, 1, 1, 1];\n\n\n\n let test_x = array![[0.], [0.], [1.], [2.], [2.], [8.], [8.]];\n\n let test_y = array![1, 1, 1, 1, 2, 1, 0];\n\n\n\n let mut freq = FrequentistEstimator::new(n_labels, &test_x.view(), &test_y.view());\n\n\n\n // Estimate.\n\n // 11)\n\n freq.add_example(&train_x.row(0), train_y[0]).unwrap();\n", "file_path": "src/estimates/frequentist.rs", "rank": 18, "score": 46973.90775248981 }, { "content": " }\n\n\n\n /// Returns the current errors for each test point.\n\n fn get_individual_errors(&self) -> Vec<bool> {\n\n let mut errors = Vec::with_capacity(self.test_x.len());\n\n\n\n for (xi, &yi) in self.test_x.iter().zip(&self.test_y) {\n\n let pred = if let Some(joint) = self.joint_count.get(&xi) {\n\n joint.predict().unwrap()\n\n } else {\n\n match self.priors_count.predict() {\n\n Some(pred) => pred,\n\n None => panic!(\"Call get_individual_errors() after training\"),\n\n }\n\n };\n\n\n\n errors.push(pred == yi);\n\n }\n\n errors\n\n }\n\n}\n\n\n", "file_path": "src/estimates/frequentist.rs", "rank": 19, "score": 46973.49040786457 }, { "content": " // Keeps the count of each label y associated with each object x.\n\n joint_count: HashMap<ObjectValue, FrequencyCount>,\n\n // Keeps the count of each label.\n\n priors_count: FrequencyCount,\n\n // Bayes risk estimate.\n\n error_count: usize,\n\n // NOTE: at the moment I'm not sure how we could remove training and\n\n // test data from here.\n\n // Of course, it'd be possible to do so with an increasing training set\n\n // strategy, but here we're removing from the training set.\n\n train_x: Vec<ObjectValue>,\n\n train_y: Vec<Label>,\n\n test_x: Vec<ObjectValue>,\n\n test_y: Vec<Label>,\n\n // Mapping from objects (feature vectors) to indexes.\n\n array_to_index: ArrayToIndex,\n\n}\n\n\n\nimpl FrequentistEstimator {\n\n pub fn new(\n", "file_path": "src/estimates/frequentist.rs", "rank": 20, "score": 46972.83102113343 }, { "content": " for (x, &y) in train_x.iter().zip(train_y) {\n\n assert!(y < n_labels, \"labels' values must be < number of labels\");\n\n priors_count.add_example(y);\n\n if let Some(jx) = joint_count.get_mut(x) {\n\n jx.add_example(y);\n\n }\n\n }\n\n\n\n // Compute Bayes risk.\n\n let mut error_count = 0;\n\n\n\n for (x, &y) in test_x.iter().zip(test_y) {\n\n let jx = joint_count.get(x).expect(\"shouldn't happen\");\n\n\n\n let pred = match jx.predict() {\n\n Some(pred) => pred,\n\n None => priors_count.predict().expect(\"not enough info for priors\"),\n\n };\n\n\n\n if y != pred {\n", "file_path": "src/estimates/frequentist.rs", "rank": 21, "score": 46971.98111164502 }, { "content": " error_count += 1;\n\n }\n\n }\n\n\n\n FrequentistEstimator {\n\n joint_count,\n\n priors_count,\n\n error_count,\n\n train_x: train_x.to_vec(),\n\n train_y: train_y.to_vec(),\n\n test_x: test_x.to_vec(),\n\n test_y: test_y.to_vec(),\n\n array_to_index: ArrayToIndex::new(),\n\n }\n\n }\n\n\n\n /// Updates the predictions when the very first example is added,\n\n /// and therefore we don't even have any information on priors.\n\n fn add_first_example(&mut self, x: ObjectValue, y: Label) {\n\n self.error_count = 0;\n", "file_path": "src/estimates/frequentist.rs", "rank": 22, "score": 46971.53350244408 }, { "content": " /// the prediction if needed.\n\n fn add_example(&mut self, y: Label) -> bool {\n\n self.count[y] += 1;\n\n let mut updated = false;\n\n\n\n if let Some(pred) = self.prediction {\n\n if y != pred {\n\n // Did the maximum prior change?\n\n if self.count[y] > self.count[pred] {\n\n self.prediction = Some(y);\n\n updated = true;\n\n }\n\n }\n\n } else {\n\n self.prediction = Some(y);\n\n updated = true;\n\n }\n\n updated\n\n }\n\n\n", "file_path": "src/estimates/frequentist.rs", "rank": 23, "score": 46971.44816141227 }, { "content": " train_y: &ArrayView1<Label>,\n\n test_x: &ArrayView1<ObjectValue>,\n\n test_y: &ArrayView1<Label>,\n\n ) -> FrequentistEstimator {\n\n // FIXME: instantiate from new().\n\n // Init counts.\n\n let mut joint_count: HashMap<ObjectValue, FrequencyCount> = HashMap::new();\n\n let mut priors_count = FrequencyCount::new(n_labels);\n\n\n\n // Instantiate points for which we need a prediction.\n\n // We'll only have information for the intersection of\n\n // train_x and test_x; for the others we'll have to guess\n\n // according to priors.\n\n for &x in test_x.iter().unique() {\n\n joint_count\n\n .entry(x)\n\n .or_insert_with(|| FrequencyCount::new(n_labels));\n\n }\n\n\n\n // Count frequencies in training data.\n", "file_path": "src/estimates/frequentist.rs", "rank": 24, "score": 46971.20274751579 }, { "content": " n_labels: usize,\n\n test_x: &ArrayView2<f64>,\n\n test_y: &ArrayView1<Label>,\n\n ) -> FrequentistEstimator {\n\n // Init counts.\n\n let priors_count = FrequencyCount::new(n_labels);\n\n let mut joint_count: HashMap<ObjectValue, FrequencyCount> = HashMap::new();\n\n\n\n // Converts objects (feature vectors) into ids.\n\n let mut array_to_index = ArrayToIndex::new();\n\n let test_x = test_x\n\n .outer_iter()\n\n .map(|x| array_to_index.map(x))\n\n .collect::<Vec<_>>();\n\n\n\n // Instantiate points for which we need a prediction.\n\n // We'll only have information for the intersection of\n\n // train_x and test_x; for the others we'll have to guess\n\n // according to priors.\n\n for &x in test_x.iter().unique() {\n", "file_path": "src/estimates/frequentist.rs", "rank": 25, "score": 46971.16157718145 }, { "content": " // because otherwise we'll have issues when updating w.r.t.\n\n // the joint distribution later in this function.\n\n old_priors_pred = new_pred;\n\n }\n\n\n\n // Update joint counts (and error), but only if `x` appears\n\n // in the test set.\n\n if let Some(joint) = self.joint_count.get_mut(&x) {\n\n let old_pred = match joint.predict() {\n\n Some(pred) => pred,\n\n None => old_priors_pred,\n\n };\n\n // Only update prediction if max P(o,s) changed.\n\n let joint_changed = joint.add_example(y);\n\n if joint_changed {\n\n // Predict again.\n\n let new_pred = joint.predict().unwrap();\n\n\n\n for (&xi, &yi) in self.test_x.iter().zip(&self.test_y) {\n\n // Only update predictions for observations with value `x`.\n", "file_path": "src/estimates/frequentist.rs", "rank": 26, "score": 46970.73095044418 }, { "content": " // in the test set.\n\n if let Some(joint) = self.joint_count.get_mut(&x) {\n\n let old_joint_pred = joint.predict().expect(\"shouldn't fail here\");\n\n let joint_changed = joint.remove_example(y);\n\n\n\n if joint_changed {\n\n // Predict again.\n\n let new_pred = match self.priors_count.predict() {\n\n Some(pred) => pred,\n\n // This means we don't have any more information on\n\n // P(x, y), and we'll need to predict via priors.\n\n None => some_or_error(self.priors_count.predict())?,\n\n };\n\n\n\n for (&xi, &yi) in self.test_x.iter().zip(&self.test_y) {\n\n if xi == x {\n\n let old_error = if yi != old_joint_pred { 1 } else { 0 };\n\n let new_error = if yi != new_pred { 1 } else { 0 };\n\n\n\n self.error_count = self.error_count + new_error - old_error;\n", "file_path": "src/estimates/frequentist.rs", "rank": 27, "score": 46970.47998866027 }, { "content": " /// Removes one observed label `y`, and changes the prediction\n\n /// accordingly.\n\n ///\n\n /// `true` is returned if the prediction changed, `false`, otherwise.\n\n fn remove_example(&mut self, y: Label) -> bool {\n\n self.count[y] -= 1;\n\n\n\n // Don't need to change prediction if y wasn't the predicted\n\n // label before.\n\n if Some(y) == self.prediction {\n\n // Check if there's a more likely prediction.\n\n let mut new_pred = y;\n\n for (yi, &c) in self.count.iter().enumerate() {\n\n if c > self.count[y] {\n\n new_pred = yi;\n\n // If this count is larger than the count for the\n\n // original prediction, then necessarily it is also\n\n // the largest count right now, so we can stop here.\n\n //break;\n\n }\n", "file_path": "src/estimates/frequentist.rs", "rank": 28, "score": 46970.153507863826 }, { "content": " joint_count\n\n .entry(x)\n\n .or_insert_with(|| FrequencyCount::new(n_labels));\n\n }\n\n\n\n FrequentistEstimator {\n\n joint_count,\n\n priors_count,\n\n error_count: 0,\n\n train_x: vec![],\n\n train_y: vec![],\n\n test_x: test_x.to_vec(),\n\n test_y: test_y.to_vec(),\n\n array_to_index,\n\n }\n\n }\n\n\n\n pub fn from_data(\n\n n_labels: usize,\n\n train_x: &ArrayView1<ObjectValue>,\n", "file_path": "src/estimates/frequentist.rs", "rank": 29, "score": 46969.910092450984 }, { "content": " assert_eq!(freq.priors_count.count, vec![1, 1, 0]);\n\n assert_eq!(freq.error_count, 2);\n\n\n\n // 11)\n\n freq.remove_one().unwrap();\n\n assert_eq!(freq.joint_count.get(&0).unwrap().count, vec![1, 0, 0]);\n\n assert_eq!(freq.joint_count.get(&0).unwrap().predict().unwrap(), 0);\n\n assert_eq!(freq.priors_count.count, vec![1, 0, 0]);\n\n assert_eq!(freq.error_count, 6);\n\n\n\n assert!(freq.remove_one().is_err());\n\n }\n\n\n\n #[test]\n\n fn frequentist_estimate_forward() {\n\n let n_labels = 3;\n\n let train_x = array![\n\n [0.],\n\n [0.],\n\n [0.],\n", "file_path": "src/estimates/frequentist.rs", "rank": 30, "score": 46969.092959627866 }, { "content": " }\n\n };\n\n\n\n // If max prior changed, update predictions for those that were\n\n // predicted with priors.\n\n let priors_changed = self.priors_count.add_example(y);\n\n if priors_changed {\n\n let new_pred = self.priors_count.predict().unwrap();\n\n\n\n for (xi, &yi) in self.test_x.iter().zip(&self.test_y) {\n\n // Match points for which we random guess.\n\n let joint = self.joint_count.get(xi).expect(\"shouldn't happen\");\n\n if joint.predict().is_none() {\n\n let old_error = if yi != old_priors_pred { 1 } else { 0 };\n\n let new_error = if yi != new_pred { 1 } else { 0 };\n\n\n\n self.error_count = self.error_count + new_error - old_error;\n\n }\n\n }\n\n // NOTE: we also need to update the value of old_priors_pred,\n", "file_path": "src/estimates/frequentist.rs", "rank": 31, "score": 46968.937485666065 }, { "content": " let old_priors_pred = some_or_error(self.priors_count.predict())?;\n\n let priors_changed = self.priors_count.remove_example(y);\n\n\n\n if priors_changed {\n\n // Predict again those that were predicted with priors.\n\n let new_pred = some_or_error(self.priors_count.predict())?;\n\n\n\n for (xi, &yi) in self.test_x.iter().zip(&self.test_y) {\n\n // Match points for which we random guess.\n\n let joint = self.joint_count.get(xi).expect(\"shouldn't happen\");\n\n if joint.predict().is_none() {\n\n let old_error = if yi != old_priors_pred { 1 } else { 0 };\n\n let new_error = if yi != new_pred { 1 } else { 0 };\n\n\n\n self.error_count = self.error_count + new_error - old_error;\n\n }\n\n }\n\n }\n\n\n\n // Update joint counts (and error), but only if `x` appears\n", "file_path": "src/estimates/frequentist.rs", "rank": 32, "score": 46968.937485666065 }, { "content": "\n\n self.priors_count.add_example(y);\n\n let pred = y;\n\n\n\n if let Some(jx) = self.joint_count.get_mut(&x) {\n\n jx.add_example(y);\n\n }\n\n\n\n for yi in &self.test_y {\n\n let error = if *yi != pred { 1 } else { 0 };\n\n self.error_count += error;\n\n }\n\n }\n\n\n\n pub fn remove_one(&mut self) -> Result<(), ()> {\n\n // TODO: better error handling.\n\n let x = some_or_error(self.train_x.pop())?;\n\n let y = some_or_error(self.train_y.pop())?;\n\n\n\n // Update priors and if they changed update the error count.\n", "file_path": "src/estimates/frequentist.rs", "rank": 33, "score": 46968.52101378203 }, { "content": " &train_x.view(),\n\n &train_y.view(),\n\n &test_x.view(),\n\n &test_y.view(),\n\n );\n\n\n\n // Estimate.\n\n // 0)\n\n assert_eq!(freq.error_count, 3);\n\n assert_eq!(freq.priors_count.count, vec![2, 6, 4]);\n\n\n\n // 1)\n\n freq.remove_one().unwrap();\n\n assert_eq!(freq.priors_count.count, vec![2, 5, 4]);\n\n assert_eq!(freq.error_count, 3);\n\n\n\n // 2)\n\n freq.remove_one().unwrap();\n\n assert_eq!(freq.joint_count.get(&2).unwrap().count, vec![1, 2, 2]);\n\n let pred = freq.joint_count.get(&2).unwrap().predict().unwrap();\n", "file_path": "src/estimates/frequentist.rs", "rank": 34, "score": 46967.28000304066 }, { "content": " assert_eq!(freq.error_count, 3);\n\n\n\n // 8)\n\n freq.remove_one().unwrap();\n\n assert_eq!(freq.joint_count.get(&1).unwrap().count, vec![0, 0, 1]);\n\n assert_eq!(freq.joint_count.get(&1).unwrap().predict().unwrap(), 2);\n\n assert_eq!(freq.priors_count.count, vec![1, 2, 1]);\n\n assert_eq!(freq.error_count, 3);\n\n\n\n // 9)\n\n freq.remove_one().unwrap();\n\n assert_eq!(freq.joint_count.get(&1).unwrap().count, vec![0, 0, 0]);\n\n assert!(freq.joint_count.get(&1).unwrap().predict().is_none());\n\n assert_eq!(freq.priors_count.count, vec![1, 2, 0]);\n\n assert_eq!(freq.error_count, 2);\n\n\n\n // 10)\n\n freq.remove_one().unwrap();\n\n assert_eq!(freq.joint_count.get(&0).unwrap().count, vec![1, 1, 0]);\n\n //assert_eq!(freq.joint_count.get(&0).unwrap().predict().unwrap(), 0);\n", "file_path": "src/estimates/frequentist.rs", "rank": 35, "score": 46967.00703956894 }, { "content": " assert_eq!(freq.priors_count.count, vec![1, 2, 0]);\n\n assert_eq!(freq.error_count, 2);\n\n\n\n // 8)\n\n freq.add_example(&train_x.row(3), train_y[3]).unwrap();\n\n assert_eq!(freq.joint_count.get(&1).unwrap().count, vec![0, 0, 1]);\n\n assert_eq!(freq.joint_count.get(&1).unwrap().predict().unwrap(), 2);\n\n assert_eq!(freq.priors_count.count, vec![1, 2, 1]);\n\n assert_eq!(freq.error_count, 3);\n\n\n\n // 7)\n\n freq.add_example(&train_x.row(4), train_y[4]).unwrap();\n\n assert_eq!(freq.joint_count.get(&2).unwrap().count, vec![0, 0, 0]);\n\n // Starts predicting with priors also for 2.\n\n assert!(freq.joint_count.get(&2).unwrap().predict().is_none());\n\n assert_eq!(freq.priors_count.count, vec![1, 2, 2]);\n\n assert_eq!(freq.error_count, 3);\n\n\n\n // 6)\n\n freq.add_example(&train_x.row(5), train_y[5]).unwrap();\n", "file_path": "src/estimates/frequentist.rs", "rank": 36, "score": 46966.98867888537 }, { "content": " // 5)\n\n freq.remove_one().unwrap();\n\n assert_eq!(freq.joint_count.get(&2).unwrap().count, vec![0, 1, 1]);\n\n assert_eq!(freq.joint_count.get(&2).unwrap().predict().unwrap(), 2);\n\n assert_eq!(freq.priors_count.count, vec![1, 3, 3]);\n\n assert_eq!(freq.error_count, 4);\n\n\n\n // 6)\n\n freq.remove_one().unwrap();\n\n assert_eq!(freq.joint_count.get(&2).unwrap().count, vec![0, 1, 0]);\n\n assert_eq!(freq.joint_count.get(&2).unwrap().predict().unwrap(), 1);\n\n assert_eq!(freq.priors_count.count, vec![1, 3, 2]);\n\n assert_eq!(freq.error_count, 3);\n\n\n\n // 7)\n\n freq.remove_one().unwrap();\n\n assert_eq!(freq.joint_count.get(&2).unwrap().count, vec![0, 0, 0]);\n\n // Starts predicting with priors also for 2.\n\n assert!(freq.joint_count.get(&2).unwrap().predict().is_none());\n\n assert_eq!(freq.priors_count.count, vec![1, 2, 2]);\n", "file_path": "src/estimates/frequentist.rs", "rank": 37, "score": 46966.95341299655 }, { "content": " assert_eq!(freq.joint_count.get(&0).unwrap().count, vec![1, 0, 0]);\n\n assert_eq!(freq.joint_count.get(&0).unwrap().predict().unwrap(), 0);\n\n assert_eq!(freq.priors_count.count, vec![1, 0, 0]);\n\n assert_eq!(freq.error_count, 6);\n\n\n\n // 10)\n\n freq.add_example(&train_x.row(1), train_y[1]).unwrap();\n\n assert_eq!(freq.joint_count.get(&0).unwrap().count, vec![1, 1, 0]);\n\n assert_eq!(freq.priors_count.count, vec![1, 1, 0]);\n\n //assert_eq!(freq.error_count, 2);\n\n // The prediction of priors could be either 0 or 1, and so\n\n // the prediction of joint probability for object 0.\n\n // However, I'll keep the assertion strict.\n\n //assert!(freq.error_count == 2 || freq.error_count == 6);\n\n assert_eq!(freq.error_count, 6);\n\n\n\n // 9)\n\n freq.add_example(&train_x.row(2), train_y[2]).unwrap();\n\n assert_eq!(freq.joint_count.get(&1).unwrap().count, vec![0, 0, 0]);\n\n assert!(freq.joint_count.get(&1).unwrap().predict().is_none());\n", "file_path": "src/estimates/frequentist.rs", "rank": 38, "score": 46966.91186769218 }, { "content": " assert_eq!(pred, 2);\n\n assert_eq!(freq.priors_count.count, vec![2, 4, 4]);\n\n // Should be assert!(freq.error_count == 3 || freq.error_count == 4);\n\n assert_eq!(freq.error_count, 4);\n\n\n\n // 1)\n\n freq.add_example(&train_x.row(10), train_y[10]).unwrap();\n\n assert_eq!(freq.priors_count.count, vec![2, 5, 4]);\n\n assert_eq!(freq.error_count, 3);\n\n\n\n // 0)\n\n freq.add_example(&train_x.row(11), train_y[11]).unwrap();\n\n assert_eq!(freq.error_count, 3);\n\n assert_eq!(freq.priors_count.count, vec![2, 6, 4]);\n\n }\n\n}\n", "file_path": "src/estimates/frequentist.rs", "rank": 39, "score": 46965.65405408059 }, { "content": " // 4)\n\n freq.add_example(&train_x.row(7), train_y[7]).unwrap();\n\n assert_eq!(freq.joint_count.get(&2).unwrap().count, vec![0, 1, 2]);\n\n assert_eq!(freq.joint_count.get(&2).unwrap().predict().unwrap(), 2);\n\n assert_eq!(freq.priors_count.count, vec![1, 3, 4]);\n\n assert_eq!(freq.error_count, 4);\n\n\n\n // 3)\n\n freq.add_example(&train_x.row(8), train_y[8]).unwrap();\n\n assert_eq!(freq.joint_count.get(&2).unwrap().count, vec![1, 1, 2]);\n\n assert_eq!(freq.joint_count.get(&2).unwrap().predict().unwrap(), 2);\n\n assert_eq!(freq.priors_count.count, vec![2, 3, 4]);\n\n assert_eq!(freq.priors_count.predict().unwrap(), 2);\n\n assert_eq!(freq.error_count, 4); // Increases because of priors.\n\n\n\n // 2)\n\n freq.add_example(&train_x.row(9), train_y[9]).unwrap();\n\n assert_eq!(freq.joint_count.get(&2).unwrap().count, vec![1, 2, 2]);\n\n let pred = freq.joint_count.get(&2).unwrap().predict().unwrap();\n\n // More properly it should be: assert!(pred == 1 || pred == 2);\n", "file_path": "src/estimates/frequentist.rs", "rank": 40, "score": 46965.65405408059 }, { "content": " if xi == x {\n\n let old_error = if yi != old_pred { 1 } else { 0 };\n\n let new_error = if yi != new_pred { 1 } else { 0 };\n\n\n\n self.error_count = self.error_count + new_error - old_error;\n\n }\n\n }\n\n }\n\n }\n\n Ok(())\n\n }\n\n\n\n /// Returns the current number of errors.\n\n fn get_error_count(&self) -> usize {\n\n self.error_count\n\n }\n\n\n\n /// Returns the current error rate.\n\n fn get_error(&self) -> f64 {\n\n (self.error_count as f64) / (self.test_y.len() as f64)\n", "file_path": "src/estimates/frequentist.rs", "rank": 41, "score": 46965.65405408059 }, { "content": " assert_eq!(freq.joint_count.get(&2).unwrap().count, vec![0, 1, 0]);\n\n assert_eq!(freq.joint_count.get(&2).unwrap().predict().unwrap(), 1);\n\n assert_eq!(freq.priors_count.count, vec![1, 3, 2]);\n\n assert_eq!(freq.error_count, 3);\n\n\n\n // 5)\n\n freq.add_example(&train_x.row(6), train_y[6]).unwrap();\n\n assert_eq!(freq.joint_count.get(&2).unwrap().count, vec![0, 1, 1]);\n\n //assert_eq!(freq.joint_count.get(&2).unwrap().predict().unwrap(), 2);\n\n let pred = freq.joint_count.get(&2).unwrap().predict().unwrap();\n\n // NOTE: pred could either be 2 or 1, but I'll keep the condition\n\n // strict so that if anything changes we know.\n\n //assert!(pred == 2 || pred == 1);\n\n assert_eq!(pred, 1);\n\n assert_eq!(freq.priors_count.count, vec![1, 3, 3]);\n\n // NOTE: the following could be 3 or 4, but I'll set the condition\n\n // strict.\n\n //assert!(freq.error_count == 4 | freq.error_count == 3);\n\n assert!(freq.error_count == 3);\n\n\n", "file_path": "src/estimates/frequentist.rs", "rank": 42, "score": 46965.65405408059 }, { "content": " // More properly it should be: assert!(pred == 1 || pred == 2);\n\n assert_eq!(pred, 1);\n\n assert_eq!(freq.priors_count.count, vec![2, 4, 4]);\n\n assert_eq!(freq.error_count, 3);\n\n\n\n // 3)\n\n freq.remove_one().unwrap();\n\n assert_eq!(freq.joint_count.get(&2).unwrap().count, vec![1, 1, 2]);\n\n assert_eq!(freq.joint_count.get(&2).unwrap().predict().unwrap(), 2);\n\n assert_eq!(freq.priors_count.count, vec![2, 3, 4]);\n\n assert_eq!(freq.priors_count.predict().unwrap(), 2);\n\n assert_eq!(freq.error_count, 4); // Increases because of priors.\n\n\n\n // 4)\n\n freq.remove_one().unwrap();\n\n assert_eq!(freq.joint_count.get(&2).unwrap().count, vec![0, 1, 2]);\n\n assert_eq!(freq.joint_count.get(&2).unwrap().predict().unwrap(), 2);\n\n assert_eq!(freq.priors_count.count, vec![1, 3, 4]);\n\n assert_eq!(freq.error_count, 4);\n\n\n", "file_path": "src/estimates/frequentist.rs", "rank": 43, "score": 46965.65405408059 }, { "content": " n_labels,\n\n &train_x.view(),\n\n &train_y.view(),\n\n &test_x.view(),\n\n &test_y.view(),\n\n );\n\n\n\n // Only keeps track of (unique) points that are in train_x.\n\n assert_eq!(freq.joint_count.len(), 4);\n\n\n\n // Priors counts.\n\n assert_eq!(freq.priors_count.count, vec![2, 6, 4]);\n\n // Joint probability counts for objects 0, 1, 2, 8.\n\n assert_eq!(freq.joint_count.get(&0).unwrap().count, vec![1, 2, 0]);\n\n assert_eq!(freq.joint_count.get(&1).unwrap().count, vec![0, 0, 2]);\n\n assert_eq!(freq.joint_count.get(&2).unwrap().count, vec![1, 3, 2]);\n\n assert_eq!(freq.joint_count.get(&8).unwrap().count, vec![0; 3]);\n\n\n\n // Individual predictions.\n\n assert_eq!(freq.joint_count.get(&0).unwrap().predict().unwrap(), 1);\n", "file_path": "src/estimates/frequentist.rs", "rank": 44, "score": 46965.65405408059 }, { "content": "fn main() {\n\n // Parse args from command line.\n\n let args: Args = Docopt::new(USAGE)\n\n .and_then(|d| {\n\n d.version(Some(env!(\"CARGO_PKG_VERSION\").to_string()))\n\n .deserialize()\n\n })\n\n .unwrap_or_else(|e| e.exit());\n\n\n\n // Load data.\n\n let (train_x, train_y) =\n\n load_data::<f64>(&args.arg_train).expect(\"[!] failed to load training data\");\n\n let (eval_x, eval_y) =\n\n load_data::<f64>(&args.arg_eval).expect(\"[!] failed to load evaluation data\");\n\n\n\n // Logging.\n\n let mut error_logger = match args.flag_logfile {\n\n Some(fname) => Some(Logger::LogFile(\n\n File::create(&fname).expect(\"Couldn't open file for logging\"),\n\n )),\n", "file_path": "src/main.rs", "rank": 45, "score": 46631.4525103952 }, { "content": " // add_example() is n-1.\n\n n: usize,\n\n // Function k(n) used to determine the value of k given n.\n\n // TODO: might be replaced with a closure in the future, for\n\n // better performances.\n\n k_from_n: Box<dyn Fn(usize) -> usize>,\n\n}\n\n\n\nimpl<D> KNNEstimator<D>\n\nwhere\n\n D: Fn(&ArrayView1<f64>, &ArrayView1<f64>) -> f64 + Send + Sync + Copy,\n\n{\n\n /// Create a new k-NN estimator.\n\n pub fn new(\n\n test_x: &ArrayView2<f64>,\n\n test_y: &ArrayView1<Label>,\n\n max_n: usize,\n\n distance: D,\n\n strategy: KNNStrategy,\n\n ) -> KNNEstimator<D> {\n", "file_path": "src/estimates/knn.rs", "rank": 46, "score": 45735.590406398005 }, { "content": " let test_y = array![0, 0, 2, 1, 0, 1, 0];\n\n let max_n = train_x.nrows();\n\n let distance = euclidean_distance;\n\n\n\n // Test for k = 1.\n\n let mut knn = KNNEstimator::new(\n\n &test_x.view(),\n\n &test_y.view(),\n\n max_n,\n\n distance,\n\n KNNStrategy::NN,\n\n );\n\n\n\n // FIXME: I'm not sure why in this case, differently from the\n\n // backward test, I need to include one more error and prediction.\n\n // The rest is exactly identical.\n\n let expected_preds = vec![\n\n [1, 2, 0, 2, 0, 0, 1],\n\n [1, 1, 0, 1, 0, 0, 1],\n\n [1, 1, 0, 1, 0, 0, 1],\n", "file_path": "src/estimates/knn.rs", "rank": 47, "score": 45734.448218548845 }, { "content": " let max_n = 5; // Essential that max_k > k for this test, otherwise\n\n // it's a different check.\n\n\n\n let mut knn = KNNEstimator::new(&test_x.view(), &test_y.view(),\n\n max_n, euclidean_distance,\n\n KNNStrategy::FixedK(5));\n\n knn.k_from_n = knn_strategy(KNNStrategy::NN);\n\n\n\n\n\n // We'll only observe examples with distance 2 from x.\n\n\n\n // First all with label 0.\n\n for _ in 0..5 {\n\n knn.add_example(&array![2.].view(), 0).unwrap();\n\n }\n\n\n\n assert_eq!(knn.predictions, vec![0]);\n\n\n\n // Now we change the ties' label distribution to 1.\n\n for _ in 0..6 {\n\n knn.add_example(&array![2.].view(), 1).unwrap();\n\n }\n\n\n\n assert_eq!(knn.predictions, vec![1]);\n\n }\n\n */\n\n}\n", "file_path": "src/estimates/knn.rs", "rank": 48, "score": 45733.99791068495 }, { "content": "use ordered_float::OrderedFloat;\n\nuse std;\n\nuse std::cmp::Ordering;\n\nuse std::collections::HashMap;\n\n\n\nuse crate::estimates::{knn_strategy, BayesEstimator, KNNStrategy};\n\nuse crate::Label;\n\n\n\n/// Nearest neighbors to a test object.\n\n#[derive(Debug)]\n", "file_path": "src/estimates/knn.rs", "rank": 49, "score": 45732.64597667067 }, { "content": "//! let mut knn = KNNEstimator::from_data(&train_x.view(), &train_y.view(),\n\n//! &test_x.view(), &test_y.view(), max_n,\n\n//! euclidean_distance, KNNStrategy::FixedK(k));\n\n//!\n\n//! assert_eq!(knn.get_error(), 0.42857142857142855);\n\n//!\n\n//! knn.add_example(&array![3.].view(), 1);\n\n//! assert_eq!(knn.get_error(), 0.42857142857142855);\n\n//!\n\n//! // Change k.\n\n//! knn.set_k(5);\n\n//! knn.add_example(&array![2.].view(), 1);\n\n//! assert_eq!(knn.get_error(), 0.42857142857142855);\n\n//!\n\n//! knn.add_example(&array![1.].view(), 2);\n\n//! assert_eq!(knn.get_error(), 0.42857142857142855);\n\n//! # }\n\n//! ```\n\nuse float_cmp::approx_eq;\n\nuse ndarray::*;\n", "file_path": "src/estimates/knn.rs", "rank": 50, "score": 45732.4925203718 }, { "content": " } else {\n\n // TODO: could remove this check if we're sure it is\n\n // updated elsewhere correctly.\n\n assert_eq!(Some(removed.distance), self.extra_ties_dist);\n\n }\n\n let count = self.extra_ties.entry(removed.label).or_insert(0);\n\n *count += 1;\n\n } else {\n\n self.extra_ties.clear();\n\n self.extra_ties_dist = None;\n\n }\n\n }\n\n }\n\n true\n\n }\n\n}\n\n\n\n/// Keeps track of the error of a k-NN classifier, with the possibility\n\n/// of changing k and removing training examples.\n\npub struct KNNEstimator<D>\n", "file_path": "src/estimates/knn.rs", "rank": 51, "score": 45731.50918229006 }, { "content": " Ok(())\n\n }\n\n\n\n /// Changes the k for which k-NN predictions are given.\n\n pub fn set_k(&mut self, k: usize) -> Result<(), ()> {\n\n if k != self.current_k {\n\n self.current_k = k;\n\n self.update_all()?;\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl<D> BayesEstimator for KNNEstimator<D>\n\nwhere\n\n D: Fn(&ArrayView1<f64>, &ArrayView1<f64>) -> f64 + Send + Sync + Copy,\n\n{\n\n /// Adds a new example to the k-NN estimator's training data.\n\n ///\n\n /// This also updates the prediction, if necessary.\n", "file_path": "src/estimates/knn.rs", "rank": 52, "score": 45731.46693566422 }, { "content": "//! Fast k-NN error estimates for discrete and continuous\n\n//! output space.\n\n//!\n\n//! This allows estimating the error of a k-NN classifier (with possibly\n\n//! changing k) on a test set, given training data.\n\n//! This module only exposes one public structure: `KNNEstimator`, which can be\n\n//! used as follows:\n\n//! 1) Init `KNNEstimator` with selected evaluation (test) data;\n\n//! 2) Add a training example with `add_example()`.\n\n//! 3) Get the estimate (i.e., error) on the test data with `get_error()`.\n\n//! 4) Repeat from 2), until training data is finished.\n\n//!\n\n//! # Examples\n\n//!\n\n//! ```\n\n//! #[macro_use(array)]\n\n//! extern crate ndarray;\n\n//! extern crate fbleau;\n\n//!\n\n//! # fn main() {\n", "file_path": "src/estimates/knn.rs", "rank": 53, "score": 45730.268795664444 }, { "content": " .iter()\n\n .map(|e| match e {\n\n 0 => false,\n\n 1 => true,\n\n _ => panic!(\"{}\", \"errors must contain values in {0,1}\"),\n\n })\n\n .collect::<Vec<_>>()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::estimates::*;\n\n\n\n #[test]\n\n fn knn_init() {\n\n let train_x = array![[8.], [3.], [1.], [4.], [5.], [7.], [2.], [6.]];\n\n let train_y = array![0, 0, 0, 1, 0, 1, 1, 2];\n\n let x = array![0.];\n", "file_path": "src/estimates/knn.rs", "rank": 54, "score": 45729.70065423505 }, { "content": " knn.add_example(&x, *y).unwrap();\n\n assert_eq!(\n\n knn.predictions,\n\n expected_preds[expected_preds.len() - 1 - i]\n\n );\n\n assert_eq!(\n\n knn.get_error(),\n\n expected_error[expected_error.len() - 1 - i]\n\n );\n\n }\n\n\n\n // Test when changing k.\n\n let max_n = train_x.nrows();\n\n // Custom k_from_n.\n\n let k_from_n = Box::new(|n| match n {\n\n 0..=3 => 1,\n\n 4..=6 => 4,\n\n _ => 5,\n\n });\n\n let mut knn = KNNEstimator::new(\n", "file_path": "src/estimates/knn.rs", "rank": 55, "score": 45729.67870639965 }, { "content": " pub fn from_data(\n\n train_x: &ArrayView2<f64>,\n\n train_y: &ArrayView1<Label>,\n\n test_x: &ArrayView2<f64>,\n\n test_y: &ArrayView1<Label>,\n\n max_n: usize,\n\n distance: D,\n\n strategy: KNNStrategy,\n\n ) -> KNNEstimator<D> {\n\n assert_eq!(train_x.ncols(), test_x.ncols());\n\n assert_eq!(train_x.nrows(), train_y.len());\n\n assert_eq!(test_x.nrows(), test_y.len());\n\n assert!(!train_x.is_empty());\n\n assert!(!test_x.is_empty());\n\n\n\n // How we select k given n.\n\n let k_from_n = knn_strategy(strategy);\n\n // Maximum number of neighbors to store (excluding ties).\n\n let max_k = k_from_n(max_n);\n\n\n", "file_path": "src/estimates/knn.rs", "rank": 56, "score": 45728.835005831235 }, { "content": " fn ties_after_max_k() {\n\n let max_k = 5;\n\n let distance = euclidean_distance;\n\n\n\n let train_x = array![[0.], [0.], [0.], [1.], [1.], [1.], [1.], [1.], [0.]];\n\n let train_y = array![0, 1, 1, 1, 0, 0, 0, 0, 1];\n\n\n\n let x = array![0.];\n\n let mut nn = NearestNeighbors::from_data(\n\n &x.view(),\n\n &train_x.view(),\n\n &train_y.view(),\n\n max_k,\n\n distance,\n\n );\n\n let distances_from_x: Vec<_> = nn.neighbors.iter().map(|neigh| neigh.distance).collect();\n\n assert_eq!(distances_from_x, vec![0., 0., 0., 0., 1.]);\n\n assert_eq!(nn.extra_ties_dist, Some(1.));\n\n println!(\"{:?}\", nn.extra_ties);\n\n // NOTE: equally valid would be to have extra_ties[0] == Some(4)\n", "file_path": "src/estimates/knn.rs", "rank": 57, "score": 45728.596970395956 }, { "content": " // Different labels.\n\n let train_y = array![0, 0, 1, 1, 0, 0, 0, 0, 1];\n\n let nn = NearestNeighbors::from_data(\n\n &x.view(),\n\n &train_x.view(),\n\n &train_y.view(),\n\n max_k,\n\n euclidean_distance,\n\n );\n\n assert_eq!(nn.predict(5).unwrap(), 0);\n\n }\n\n\n\n /* FIXME: verify this test is still necessary.\n\n #[test]\n\n /// KNNEstimator's parameter updated_k should take ties into account.\n\n fn test_updated_k() {\n\n let test_x = array![[0.]];\n\n let test_y = array![1];\n\n\n\n let k = 1;\n", "file_path": "src/estimates/knn.rs", "rank": 58, "score": 45728.51969372989 }, { "content": " let mut max_k = 8;\n\n\n\n let knn = NearestNeighbors::from_data(\n\n &x.view(),\n\n &train_x.view(),\n\n &train_y.view(),\n\n max_k,\n\n euclidean_distance,\n\n );\n\n\n\n let distances_from_x: Vec<_> = knn.neighbors.iter().map(|neigh| neigh.distance).collect();\n\n assert_eq!(distances_from_x, vec![1., 2., 3., 4., 5., 6., 7., 8.]);\n\n\n\n // Reduce max_k.\n\n max_k = 5;\n\n let knn = NearestNeighbors::from_data(\n\n &x.view(),\n\n &train_x.view(),\n\n &train_y.view(),\n\n max_k,\n", "file_path": "src/estimates/knn.rs", "rank": 59, "score": 45728.44352834962 }, { "content": " max_k: usize,\n\n distance: D,\n\n ) -> NearestNeighbors<D> {\n\n assert!(max_k > 0);\n\n\n\n let mut knn = NearestNeighbors::new(x, max_k, distance);\n\n\n\n for (xi, yi) in train_x.outer_iter().zip(train_y) {\n\n // NOTE: we use std::usize::MAX as a bogus label to split ties in\n\n // predict(); if this becomes a problem, predict() needs to be\n\n // fixed as well.\n\n assert!(\n\n *yi != std::usize::MAX,\n\n \"label {} is too large and currently not supported\",\n\n *yi\n\n );\n\n\n\n knn.add_example(&xi, *yi);\n\n }\n\n\n", "file_path": "src/estimates/knn.rs", "rank": 60, "score": 45728.33996703566 }, { "content": " &test_x.view(),\n\n &test_y.view(),\n\n max_n,\n\n distance,\n\n KNNStrategy::Custom(k_from_n),\n\n );\n\n let expected_error = vec![\n\n 0.42857142857142855,\n\n 0.42857142857142855,\n\n 0.42857142857142855,\n\n 0.5714285714285714,\n\n 0.42857142857142855,\n\n 0.42857142857142855,\n\n 0.42857142857142855,\n\n 0.42857142857142855,\n\n ];\n\n let expected_preds = vec![\n\n vec![0; 7],\n\n vec![0; 7],\n\n vec![0; 7],\n", "file_path": "src/estimates/knn.rs", "rank": 61, "score": 45727.73725035119 }, { "content": " .neighbors\n\n .get(i)\n\n .expect(\"first_of_ties() called on wrong index\")\n\n .distance;\n\n\n\n while let Some(neigh) = self.neighbors.get(i - 1) {\n\n if !approx_eq!(f64, neigh.distance, d) {\n\n break;\n\n }\n\n i -= 1;\n\n if i == 0 {\n\n break;\n\n }\n\n }\n\n i\n\n }\n\n\n\n /// Adds a new example.\n\n fn add_example(&mut self, x: &ArrayView1<f64>, y: Label) -> bool {\n\n let d = (self.distance)(x, &self.x.view());\n", "file_path": "src/estimates/knn.rs", "rank": 62, "score": 45727.646174162015 }, { "content": " }\n\n\n\n KNNEstimator {\n\n neighbors,\n\n errors,\n\n predictions,\n\n labels: test_y.to_vec(),\n\n current_k: k,\n\n k_error_count: knn_error,\n\n n,\n\n k_from_n,\n\n }\n\n }\n\n\n\n /// Update all predictions and errors.\n\n ///\n\n /// Called when when k changes.\n\n fn update_all(&mut self) -> Result<(), ()> {\n\n for (neigh, y, old_pred, old_error) in izip!(\n\n &self.neighbors,\n", "file_path": "src/estimates/knn.rs", "rank": 63, "score": 45727.24427244257 }, { "content": " fn add_example(&mut self, x: &ArrayView1<f64>, y: Label) -> Result<(), ()> {\n\n // Update k with respect to n.\n\n self.set_k((self.k_from_n)(self.n))?;\n\n // We update n here, before error are (possibly) raised.\n\n self.n += 1;\n\n // We copy because we're using them in the closure below.\n\n let current_k = self.current_k;\n\n\n\n // Update errors and neighbors as appropriate.\n\n for (neigh, true_y, old_pred, old_error) in izip!(\n\n &mut self.neighbors,\n\n &self.labels,\n\n &mut self.predictions,\n\n &mut self.errors\n\n ) {\n\n if neigh.add_example(x, y) {\n\n if neigh.updated_k > current_k {\n\n continue;\n\n }\n\n\n", "file_path": "src/estimates/knn.rs", "rank": 64, "score": 45727.005323716025 }, { "content": " let neighbors = test_x\n\n .outer_iter()\n\n .map(|x| {\n\n NearestNeighbors::from_data(&x, &train_x.view(), &train_y.view(), max_k, distance)\n\n })\n\n .collect::<Vec<_>>();\n\n\n\n let n = train_y.len();\n\n let k = k_from_n(n);\n\n let mut knn_error = 0;\n\n let mut errors = Vec::with_capacity(test_y.len());\n\n let mut predictions = Vec::with_capacity(test_y.len());\n\n\n\n for (neigh, y) in neighbors.iter().zip(test_y) {\n\n let pred = neigh.predict(k).expect(\"unexpected error\");\n\n let error = if pred != *y { 1 } else { 0 };\n\n\n\n predictions.push(pred);\n\n errors.push(error);\n\n knn_error += error;\n", "file_path": "src/estimates/knn.rs", "rank": 65, "score": 45726.901519338055 }, { "content": " // TODO: should we?\n\n let errors = test_y\n\n .iter()\n\n .map(|y| if *y != 0 { 1 } else { 0 })\n\n .collect::<Vec<_>>();\n\n let error_count = errors.iter().sum();\n\n\n\n KNNEstimator {\n\n neighbors,\n\n errors,\n\n predictions: vec![0; test_y.len()],\n\n labels: test_y.to_vec(),\n\n current_k: 1,\n\n k_error_count: error_count,\n\n n: 0,\n\n k_from_n,\n\n }\n\n }\n\n\n\n /// Create a k-NN estimator from training and test set.\n", "file_path": "src/estimates/knn.rs", "rank": 66, "score": 45726.76594428244 }, { "content": " knn\n\n }\n\n\n\n /// Predict label according to k-NN rule, for specified k, without\n\n /// accounting for ties.\n\n fn predict_no_ties(&self, k: usize) -> Result<Label, ()> {\n\n if k > self.neighbors.len() {\n\n return Err(());\n\n }\n\n\n\n let mut label_count = HashMap::new();\n\n\n\n let mut y_pred = 0;\n\n let mut y_count = 0;\n\n\n\n for neigh in self.neighbors.iter().take(k) {\n\n let count = label_count.entry(neigh.label).or_insert(0);\n\n *count += 1;\n\n if *count > y_count {\n\n y_pred = neigh.label;\n", "file_path": "src/estimates/knn.rs", "rank": 67, "score": 45726.66044086246 }, { "content": " euclidean_distance,\n\n );\n\n\n\n let distances_from_x: Vec<_> = knn.neighbors.iter().map(|neigh| neigh.distance).collect();\n\n assert_eq!(distances_from_x, vec![1., 2., 3., 4., 5.]);\n\n }\n\n\n\n #[test]\n\n fn knn_predictions_ties() {\n\n let train_x = array![[0.], [1.], [1.], [1.], [1.], [1.], [1.], [2.], [2.]];\n\n let train_y = array![0, 1, 1, 1, 0, 1, 0, 0, 0];\n\n\n\n let x = array![0.];\n\n let max_k = 10;\n\n\n\n let knn = NearestNeighbors::from_data(\n\n &x.view(),\n\n &train_x.view(),\n\n &train_y.view(),\n\n max_k,\n", "file_path": "src/estimates/knn.rs", "rank": 68, "score": 45726.56998505183 }, { "content": " euclidean_distance,\n\n );\n\n\n\n assert_eq!(knn.predict(1), Ok(0));\n\n assert_eq!(knn.predict(3), Ok(1));\n\n assert_eq!(knn.predict(5), Ok(1));\n\n\n\n // The same should happen if examples appear in a different\n\n // order.\n\n let train_x = array![[1.], [1.], [1.], [1.], [1.], [1.], [2.], [2.], [0.]];\n\n let train_y = array![0, 1, 1, 1, 0, 1, 0, 0, 0];\n\n\n\n let knn = NearestNeighbors::from_data(\n\n &x.view(),\n\n &train_x.view(),\n\n &train_y.view(),\n\n max_k,\n\n euclidean_distance,\n\n );\n\n\n", "file_path": "src/estimates/knn.rs", "rank": 69, "score": 45726.04881012197 }, { "content": "//! use ndarray::*;\n\n//! use fbleau::estimates::*;\n\n//!\n\n//! let train_x = array![[8.],\n\n//! [7.],\n\n//! [6.],\n\n//! [5.],\n\n//! [4.]];\n\n//! let train_y = array![0, 0, 0, 1, 0];\n\n//! let test_x = array![[3.],\n\n//! [0.],\n\n//! [6.],\n\n//! [1.],\n\n//! [6.],\n\n//! [4.],\n\n//! [5.]];\n\n//! let test_y = array![0, 0, 2, 1, 0, 1, 0];\n\n//! let max_n = train_x.nrows();\n\n//!\n\n//! let k = 3;\n", "file_path": "src/estimates/knn.rs", "rank": 70, "score": 45725.68210518608 }, { "content": "\n\n let max_k = 10;\n\n let distance = euclidean_distance;\n\n\n\n let mut knn1 = NearestNeighbors::from_data(\n\n &x1.view(),\n\n &train_x.view(),\n\n &train_y.view(),\n\n max_k,\n\n distance,\n\n );\n\n let knn2 = NearestNeighbors::from_data(\n\n &x2.view(),\n\n &train_x.view(),\n\n &train_y.view(),\n\n max_k,\n\n distance,\n\n );\n\n\n\n assert_eq!(knn1.predict(1), Ok(2));\n", "file_path": "src/estimates/knn.rs", "rank": 71, "score": 45725.40377117757 }, { "content": " self_d.cmp(&other_d)\n\n }\n\n}\n\n\n\nimpl PartialOrd for Neighbor {\n\n fn partial_cmp(&self, other: &Neighbor) -> Option<Ordering> {\n\n Some(self.cmp(other))\n\n }\n\n}\n\n\n\nimpl PartialEq for Neighbor {\n\n fn eq(&self, other: &Neighbor) -> bool {\n\n self.distance == other.distance\n\n }\n\n}\n\n\n\nimpl Eq for Neighbor {}\n\n\n\n/// Contains the nearest neighbors of some test object x.\n", "file_path": "src/estimates/knn.rs", "rank": 72, "score": 45725.225058275726 }, { "content": " // and extra_ties[1] == None.\n\n assert_eq!(nn.extra_ties.get(&0), Some(&3));\n\n assert_eq!(nn.extra_ties.get(&1), Some(&1));\n\n\n\n assert_eq!(nn.predict(3).unwrap(), 1);\n\n assert_eq!(nn.predict(5).unwrap(), 1);\n\n assert!(nn.predict(6).is_err());\n\n\n\n // Test ties count again.\n\n nn.add_example(&array![1.].view(), 0);\n\n nn.add_example(&array![1.].view(), 1);\n\n nn.add_example(&array![1.].view(), 2);\n\n nn.add_example(&array![1.].view(), 2);\n\n nn.add_example(&array![1.].view(), 3);\n\n\n\n assert_eq!(nn.extra_ties.get(&0), Some(&4));\n\n assert_eq!(nn.extra_ties.get(&1), Some(&2));\n\n assert_eq!(nn.extra_ties.get(&2), Some(&2));\n\n assert_eq!(nn.extra_ties.get(&3), Some(&1));\n\n\n", "file_path": "src/estimates/knn.rs", "rank": 73, "score": 45725.11671779065 }, { "content": " // NNs of x.\n\n let mut knn = NearestNeighbors::new(&x.view(), max_k, distance);\n\n\n\n let expected_preds_1 = vec![Ok(0), Ok(0), Ok(0), Ok(1), Ok(0), Ok(1), Ok(1), Ok(2)];\n\n let expected_preds_3 = vec![Err(()), Err(()), Ok(0), Ok(0), Ok(0), Ok(1), Ok(1), Ok(1)];\n\n let expected_preds_5 = vec![\n\n Err(()),\n\n Err(()),\n\n Err(()),\n\n Err(()),\n\n Ok(0),\n\n Ok(0),\n\n Ok(1),\n\n Ok(1),\n\n ];\n\n let expected_preds_7 = vec![\n\n Err(()),\n\n Err(()),\n\n Err(()),\n\n Err(()),\n", "file_path": "src/estimates/knn.rs", "rank": 74, "score": 45725.05527984264 }, { "content": " &self.labels,\n\n &mut self.predictions,\n\n &mut self.errors\n\n ) {\n\n let pred = neigh.predict(self.current_k)?;\n\n if pred == *old_pred {\n\n continue;\n\n }\n\n let error = if pred != *y { 1 } else { 0 };\n\n\n\n match (error, *old_error) {\n\n (1, 0) => self.k_error_count += 1,\n\n (0, 1) => self.k_error_count -= 1,\n\n // No need to update.\n\n _ => {}\n\n };\n\n\n\n *old_pred = pred;\n\n *old_error = error;\n\n }\n", "file_path": "src/estimates/knn.rs", "rank": 75, "score": 45724.93526570164 }, { "content": "where\n\n D: Fn(&ArrayView1<f64>, &ArrayView1<f64>) -> f64,\n\n{\n\n //K: Fn(usize) -> usize {\n\n // max_k nearest neighbors for each test object.\n\n neighbors: Vec<NearestNeighbors<D>>,\n\n // Error for each test object.\n\n // TODO: we could have bit vectors (e.g., Vec<bool> or BitVec)\n\n // for errors. This should (very slightly) improve memory performance.\n\n pub errors: Vec<u32>,\n\n // Current prediction for each test label.\n\n pub predictions: Vec<Label>,\n\n // True test labels.\n\n labels: Vec<Label>,\n\n // Last queried k.\n\n current_k: usize,\n\n // k-NN count, for k = current_k.\n\n pub k_error_count: u32,\n\n // Size of training data. The next training example to be removed by\n\n // remove_one() is n-1. The next training example to be added by\n", "file_path": "src/estimates/knn.rs", "rank": 76, "score": 45724.85372150912 }, { "content": " assert_eq!(test_x.nrows(), test_y.len());\n\n assert!(!test_y.is_empty());\n\n\n\n // How we select k given n.\n\n let k_from_n = knn_strategy(strategy);\n\n // max_k specifies the maximum number of neighbors to store\n\n // (excluding ties); a smaller max_k improves performances, but\n\n // its value should be sufficiently large to give correct\n\n // results once we've seen all the training data.\n\n let max_k = k_from_n(max_n);\n\n\n\n let neighbors = test_x\n\n .outer_iter()\n\n .map(|x| NearestNeighbors::new(&x, max_k, distance))\n\n .collect::<Vec<_>>();\n\n // We initially set all predictions to 0. Therefore, we need to\n\n // adjust the error count accordingly. Note that this is updated as\n\n // soon as add_example() is called.\n\n // A more proper way to do this in Rust would be to set\n\n // predictions (and errors, k_error_count, ...) to an Option value.\n", "file_path": "src/estimates/knn.rs", "rank": 77, "score": 45724.78686697632 }, { "content": " y_count = *count;\n\n }\n\n }\n\n Ok(y_pred)\n\n }\n\n\n\n /// Predict label according to k-NN rule, for specified k.\n\n fn predict(&self, k: usize) -> Result<Label, ()> {\n\n if k > self.neighbors.len() {\n\n return Err(());\n\n }\n\n\n\n // TODO: remember prediction counts for some k,\n\n // and update them when add_example() is called?\n\n\n\n // If the k-th element does not exist or has distance larger\n\n // than the (k-1)-th element then we don't split ties.\n\n let ties_d = self.neighbors[k - 1].distance;\n\n let no_ties = (self.neighbors.len() <= k) || (self.neighbors[k].distance > ties_d);\n\n // Do self.extra_ties play a part in this prediction?\n", "file_path": "src/estimates/knn.rs", "rank": 78, "score": 45724.56159787458 }, { "content": " if let Some(c) = label_count.get(&ties_y_pred) {\n\n count += c;\n\n }\n\n\n\n if count > y_count {\n\n y_pred = ties_y_pred;\n\n }\n\n }\n\n\n\n Ok(y_pred)\n\n }\n\n\n\n /// Returns the index of the first neighbor with the same distance as\n\n /// self.neighbors[i].\n\n fn first_of_ties(&self, mut i: usize) -> usize {\n\n if i == 0 || self.neighbors.is_empty() {\n\n return 0;\n\n }\n\n\n\n let d = self\n", "file_path": "src/estimates/knn.rs", "rank": 79, "score": 45724.34751402843 }, { "content": " // neighbors' distances are [0, 1, 1, 2, 2, 2, 2], and the last updated\n\n // index was 5 (distance 2), then updated_k = 3 (the first \"2\").\n\n updated_k: usize,\n\n // Maximum number of neighbors (excluding extra_ties).\n\n max_k: usize,\n\n distance: D,\n\n}\n\n\n\nimpl<D> NearestNeighbors<D>\n\nwhere\n\n D: Fn(&ArrayView1<f64>, &ArrayView1<f64>) -> f64 + Copy,\n\n{\n\n /// Init a list of neighbors for a specified test object x.\n\n fn new(x: &ArrayView1<f64>, max_k: usize, distance: D) -> NearestNeighbors<D> {\n\n NearestNeighbors {\n\n x: x.to_owned(),\n\n // Capacity: max_k + 1 for when we insert a new element and then\n\n // remove another one from the tail.\n\n neighbors: Vec::with_capacity(max_k + 1),\n\n extra_ties: HashMap::new(),\n", "file_path": "src/estimates/knn.rs", "rank": 80, "score": 45724.217454928745 }, { "content": "\n\n knn1.add_example(&array![2., 1.].view(), 2);\n\n\n\n assert_eq!(knn1.predict(1), Ok(2));\n\n assert_eq!(knn1.predict(3), Ok(2));\n\n assert!(knn1.predict(5) == Ok(2) || knn1.predict(5) == Ok(0));\n\n\n\n assert_eq!(knn2.predict(1), Ok(2));\n\n assert_eq!(knn2.predict(3), Ok(2));\n\n assert!(knn2.predict(5) == Ok(2) || knn2.predict(5) == Ok(1));\n\n }\n\n\n\n #[test]\n\n fn knn_forward_predictions() {\n\n let train_x = array![[8.], [7.], [6.], [5.], [4.], [3.], [2.], [1.]];\n\n let train_y = array![0, 0, 0, 1, 0, 1, 1, 2];\n\n let x = array![0.];\n\n let max_k = 8;\n\n let distance = euclidean_distance;\n\n\n", "file_path": "src/estimates/knn.rs", "rank": 81, "score": 45723.9345820763 }, { "content": " // Update prediction.\n\n let pred = neigh.predict(current_k)?;\n\n if pred == *old_pred {\n\n continue;\n\n }\n\n\n\n // Update error.\n\n let error = if pred != *true_y { 1 } else { 0 };\n\n\n\n *old_pred = pred;\n\n\n\n match (error, *old_error) {\n\n (1, 0) => self.k_error_count += 1,\n\n (0, 1) => self.k_error_count -= 1,\n\n // No need to update. Note that we do not need\n\n // to set *old_error = error either, as they're\n\n // also the same. So we can return now.\n\n _ => continue,\n\n };\n\n\n", "file_path": "src/estimates/knn.rs", "rank": 82, "score": 45723.785974660255 }, { "content": " extra_ties_dist: None,\n\n updated_k: 0,\n\n max_k,\n\n distance,\n\n }\n\n }\n\n\n\n /// Init a list of neighbors for a specified test object x, and given\n\n /// training data.\n\n ///\n\n /// # Arguments\n\n /// * `x` - Test object.\n\n /// * `train_x` - Training objects.\n\n /// * `train_y` - Training labels.\n\n /// * `max_k` - Maximum number of neighbors to store for test object x.\n\n ///\n\n fn from_data(\n\n x: &ArrayView1<f64>,\n\n train_x: &ArrayView2<f64>,\n\n train_y: &ArrayView1<Label>,\n", "file_path": "src/estimates/knn.rs", "rank": 83, "score": 45723.65708281904 }, { "content": " assert_eq!(knn.predict(1), Ok(0));\n\n assert_eq!(knn.predict(3), Ok(1));\n\n assert_eq!(knn.predict(5), Ok(1));\n\n }\n\n\n\n #[test]\n\n fn knn_predictions_multivariate() {\n\n let train_x = array![\n\n [1., 3.],\n\n [1., 2.],\n\n [2., 3.],\n\n [2., 2.],\n\n [3., 2.],\n\n [2., 2.],\n\n [2., 2.]\n\n ];\n\n let train_y = array![0, 0, 0, 1, 1, 2, 2];\n\n\n\n let x1 = array![2., 2.];\n\n let x2 = array![2., 2.];\n", "file_path": "src/estimates/knn.rs", "rank": 84, "score": 45723.268585427846 }, { "content": " Err(()),\n\n Err(()),\n\n Ok(0),\n\n Ok(1),\n\n ];\n\n\n\n for (i, (x, y)) in train_x.outer_iter().zip(train_y.iter()).enumerate() {\n\n knn.add_example(&x, *y);\n\n assert_eq!(knn.predict(1), expected_preds_1[i]);\n\n assert_eq!(knn.predict(3), expected_preds_3[i]);\n\n assert_eq!(knn.predict(5), expected_preds_5[i]);\n\n assert_eq!(knn.predict(7), expected_preds_7[i]);\n\n }\n\n }\n\n\n\n #[test]\n\n fn knn_forward_errors() {\n\n let train_x = array![[8.], [7.], [6.], [5.], [4.], [3.], [2.], [1.]];\n\n let train_y = array![0, 0, 0, 1, 0, 1, 1, 2];\n\n let test_x = array![[3.], [0.], [6.], [1.], [6.], [4.], [5.]];\n", "file_path": "src/estimates/knn.rs", "rank": 85, "score": 45723.24810843276 }, { "content": " let mut ties_y_count = 0;\n\n // \"Symbolic\" value for ties label.\n\n const TIES_LABEL: usize = std::usize::MAX;\n\n\n\n for (i, neigh) in self.neighbors.iter().enumerate() {\n\n if !approx_eq!(f64, neigh.distance, ties_d) {\n\n if i >= k {\n\n break;\n\n }\n\n let count = label_count.entry(neigh.label).or_insert(0);\n\n *count += 1;\n\n if *count > y_count {\n\n y_pred = neigh.label;\n\n y_count = *count;\n\n }\n\n } else {\n\n // Count labels within ties.\n\n let count = ties_label_count.entry(neigh.label).or_insert(0);\n\n *count += 1;\n\n if *count > ties_y_count {\n", "file_path": "src/estimates/knn.rs", "rank": 86, "score": 45722.99856565351 }, { "content": " //NOTE: the prediction vector right\n\n //below this comment may also be\n\n //vec![1, 1, 1, 1, 0, 1, 0].\n\n vec![1, 1, 0, 1, 0, 1, 1],\n\n vec![0; 7],\n\n vec![1, 1, 0, 1, 0, 1, 0],\n\n vec![1, 1, 0, 1, 0, 1, 0],\n\n vec![1, 1, 0, 1, 0, 1, 0],\n\n ];\n\n let ks = vec![1, 1, 1, 1, 3, 3, 5, 5];\n\n\n\n for (i, (x, y)) in train_x.outer_iter().zip(train_y.iter()).enumerate() {\n\n knn.set_k(ks[i]).unwrap();\n\n knn.add_example(&x, *y).unwrap();\n\n assert_eq!(knn.get_error(), expected_error[i]);\n\n assert_eq!(knn.predictions, expected_preds[i]);\n\n }\n\n }\n\n\n\n #[test]\n", "file_path": "src/estimates/knn.rs", "rank": 87, "score": 45722.961129813964 }, { "content": " let extra_ties_matter = !self.extra_ties.is_empty() && (k == self.neighbors.len());\n\n\n\n if no_ties && !extra_ties_matter {\n\n return self.predict_no_ties(k);\n\n }\n\n\n\n // Behold!\n\n // Follows a couple of hours worth of code craziness which makes\n\n // our ties splitting code run in O(max_k) (in fact, it's usually going\n\n // to run in O(k+k_ties), where k_ties is the number of ties,\n\n // and generally k+k_ties << max_k).\n\n // We keep two hash maps: one accounting for labels' count (with\n\n // a placeholder symbol std::usize::MAX when a label is part of ties),\n\n // and the other one counting labels in ties.\n\n let mut label_count = HashMap::new();\n\n let mut ties_label_count = HashMap::new();\n\n\n\n let mut y_pred = 0;\n\n let mut y_count = 0;\n\n let mut ties_y_pred = 0;\n", "file_path": "src/estimates/knn.rs", "rank": 88, "score": 45722.701387853245 }, { "content": " }\n\n\n\n // Update updated_k with ties.\n\n self.updated_k = self.first_of_ties(self.neighbors.len() - 1);\n\n } else {\n\n // Insert sorted.\n\n let new = Neighbor::new(d, y);\n\n let pos = self.neighbors.binary_search(&new).unwrap_or_else(|e| e);\n\n self.neighbors.insert(pos, new);\n\n\n\n // Update updated_k with ties.\n\n self.updated_k = self.first_of_ties(pos);\n\n\n\n if let Some(removed) = self.neighbors.pop() {\n\n let last_neigh = self.neighbors.last().unwrap();\n\n // Either add to ties, or remove all ties.\n\n if approx_eq!(f64, last_neigh.distance, removed.distance) {\n\n //self.ties.push(removed);\n\n if self.extra_ties.is_empty() {\n\n self.extra_ties_dist = Some(removed.distance);\n", "file_path": "src/estimates/knn.rs", "rank": 89, "score": 45722.3830668153 }, { "content": "\n\n if self.neighbors.len() < self.max_k {\n\n // If still filling, insert sorted.\n\n let new = Neighbor::new(d, y);\n\n let pos = self.neighbors.binary_search(&new).unwrap_or_else(|e| e);\n\n self.neighbors.insert(pos, new);\n\n\n\n // Update updated_k with ties.\n\n self.updated_k = self.first_of_ties(pos);\n\n } else if self.neighbors.last().unwrap().distance < d {\n\n return false;\n\n } else if approx_eq!(f64, self.neighbors.last().unwrap().distance, d) {\n\n // Handle ties.\n\n if self.extra_ties.is_empty() {\n\n self.extra_ties_dist = Some(d);\n\n }\n\n // Could do this after.\n\n {\n\n let count = self.extra_ties.entry(y).or_insert(0);\n\n *count += 1;\n", "file_path": "src/estimates/knn.rs", "rank": 90, "score": 45722.05457821873 }, { "content": " *old_error = error;\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n /// Returns the error count for the current k.\n\n fn get_error_count(&self) -> usize {\n\n self.k_error_count as usize\n\n }\n\n\n\n /// Returns the error for the current k.\n\n fn get_error(&self) -> f64 {\n\n f64::from(self.k_error_count) / (self.labels.len() as f64)\n\n }\n\n\n\n /// Returns the current errors for each test point.\n\n fn get_individual_errors(&self) -> Vec<bool> {\n\n self.errors\n", "file_path": "src/estimates/knn.rs", "rank": 91, "score": 45721.64630337173 }, { "content": " ties_y_pred = neigh.label;\n\n ties_y_count = *count;\n\n }\n\n // We have a special \"symbol\" for counting\n\n // how many elements with ties exist: ties_y.\n\n // However, we only count them up to k.\n\n let count = label_count.entry(TIES_LABEL).or_insert(0);\n\n if i >= k {\n\n continue;\n\n }\n\n *count += 1;\n\n if *count > y_count {\n\n y_pred = TIES_LABEL;\n\n y_count = *count;\n\n }\n\n }\n\n }\n\n\n\n // Include self.extra_ties, if extra ties have the same distance\n\n // as ties_d.\n", "file_path": "src/estimates/knn.rs", "rank": 92, "score": 45721.50455286433 }, { "content": " if Some(ties_d) == self.extra_ties_dist {\n\n for (y, extra_count) in &self.extra_ties {\n\n let count = ties_label_count.entry(*y).or_insert(0);\n\n *count += extra_count;\n\n if *count > ties_y_count {\n\n ties_y_pred = *y;\n\n ties_y_count = *count;\n\n }\n\n }\n\n }\n\n\n\n // If the predicted label is the same as the TIES_LABEL placeholder,\n\n // we can output the best label among ties. Otherwise, we need to\n\n // count which is the most frequent overall.\n\n if y_pred == TIES_LABEL {\n\n y_pred = ties_y_pred;\n\n } else {\n\n let mut count = *label_count\n\n .get(&TIES_LABEL)\n\n .expect(\"[!] unexpected error in splitting ties\");\n", "file_path": "src/estimates/knn.rs", "rank": 93, "score": 45721.12523786033 }, { "content": " [0, 0, 0, 0, 0, 0, 1],\n\n [1, 1, 0, 1, 0, 1, 1],\n\n [0, 0, 0, 0, 0, 0, 0],\n\n [0, 0, 0, 0, 0, 0, 0],\n\n [0, 0, 0, 0, 0, 0, 0],\n\n ];\n\n let expected_error = vec![\n\n 0.8571428571428571,\n\n 0.7142857142857143,\n\n 0.7142857142857143,\n\n 0.5714285714285714,\n\n 0.5714285714285714,\n\n 0.42857142857142855,\n\n 0.42857142857142855,\n\n 0.42857142857142855,\n\n ];\n\n // NOTE: expected_error correspond to the following error counts:\n\n // [6, 5, 5, 4, 4, 3, 3]\n\n\n\n for (i, (x, y)) in train_x.outer_iter().zip(train_y.iter()).enumerate() {\n", "file_path": "src/estimates/knn.rs", "rank": 94, "score": 45719.399266574714 }, { "content": " D: Fn(&ArrayView1<f64>, &ArrayView1<f64>) -> f64 + Send + Sync + Copy,\n\n{\n\n /// Create a new NN bound estimator.\n\n pub fn new(\n\n test_x: &ArrayView2<f64>,\n\n test_y: &ArrayView1<Label>,\n\n distance: D,\n\n nlabels: usize,\n\n ) -> NNBoundEstimator<D> {\n\n // NOTE: the value of max_n here does not matter, as it is\n\n // only used for computing max_k, which is fixed to 1\n\n // for the KNNStrategy:NN.\n\n let max_n = 1;\n\n\n\n NNBoundEstimator {\n\n knn: KNNEstimator::new(test_x, test_y, max_n, distance, KNNStrategy::NN),\n\n nlabels,\n\n }\n\n }\n\n}\n", "file_path": "src/estimates/nn_bound.rs", "rank": 95, "score": 45157.94073048585 }, { "content": "//! An estimator returning the bound based on the NN classifier.\n\nuse ndarray::*;\n\n\n\nuse crate::estimates::{nn_bound, BayesEstimator, KNNEstimator, KNNStrategy};\n\nuse crate::Label;\n\n\n\n/// Defines an estimator that returns the NN bound by Cover&Hart.\n\n///\n\n/// This estimate is asymptotically guaranteed to lower bound the\n\n/// true Bayes risk.\n\npub struct NNBoundEstimator<D>\n\nwhere\n\n D: Fn(&ArrayView1<f64>, &ArrayView1<f64>) -> f64 + Send + Sync + Copy,\n\n{\n\n knn: KNNEstimator<D>,\n\n nlabels: usize,\n\n}\n\n\n\nimpl<D> NNBoundEstimator<D>\n\nwhere\n", "file_path": "src/estimates/nn_bound.rs", "rank": 96, "score": 45156.428660254984 }, { "content": "\n\n/// This implementation maps exactly that of KNNEstimator,\n\n/// except for get_error(), which returns the bound.\n\nimpl<D> BayesEstimator for NNBoundEstimator<D>\n\nwhere\n\n D: Fn(&ArrayView1<f64>, &ArrayView1<f64>) -> f64 + Send + Sync + Copy,\n\n{\n\n /// Adds a new example.\n\n fn add_example(&mut self, x: &ArrayView1<f64>, y: Label) -> Result<(), ()> {\n\n self.knn.add_example(x, y)\n\n }\n\n /// Returns the error count.\n\n fn get_error_count(&self) -> usize {\n\n self.knn.get_error_count()\n\n }\n\n\n\n /// Returns the error for the current k.\n\n fn get_error(&self) -> f64 {\n\n let error = self.knn.get_error();\n\n nn_bound(error, self.nlabels)\n\n }\n\n\n\n /// Returns the current errors for each test point.\n\n fn get_individual_errors(&self) -> Vec<bool> {\n\n self.knn.get_individual_errors()\n\n }\n\n}\n", "file_path": "src/estimates/nn_bound.rs", "rank": 97, "score": 45152.00854992695 }, { "content": "use ndarray::prelude::*;\n\nuse strsim::generic_levenshtein;\n\n\n\n/// Computes the NN bound derived from Cover&Hart, given\n\n/// the error and the number of labels.\n", "file_path": "src/estimates/knn_utils.rs", "rank": 98, "score": 42996.55449853342 }, { "content": "#[derive(Debug)]\n\nstruct Neighbor {\n\n // Distance from the object of which this is a neighbor.\n\n distance: f64,\n\n // Class of this object.\n\n label: Label,\n\n}\n\n\n\nimpl Neighbor {\n\n /// Constructs a new Neighbor.\n\n fn new(distance: f64, label: Label) -> Neighbor {\n\n Neighbor { distance, label }\n\n }\n\n}\n\n\n\n// Ordering for Neighbor.\n\nimpl Ord for Neighbor {\n\n fn cmp(&self, other: &Neighbor) -> Ordering {\n\n let self_d = OrderedFloat::from(self.distance);\n\n let other_d = OrderedFloat::from(other.distance);\n\n\n", "file_path": "src/estimates/knn.rs", "rank": 99, "score": 42990.08862972183 } ]
Rust
src/emit/flatbin.rs
kubasz/rvasm
f28c2e857bd41c5f511bd5494ae89d8425882535
use crate::arch; use crate::parser::Node; use smallvec::SmallVec; use std::collections::HashMap; #[derive(Clone, Debug)] pub enum EmitError { UnexpectedNodeType(String), InvalidInstruction(String), InvalidArgumentCount(String), InvalidArgumentType(String, usize), InvalidEncoding(String), DuplicateLabel(String), DuplicateConstant(String), } pub fn emit_flat_binary(spec: &arch::RiscVSpec, ast: &Node) -> Result<Vec<u8>, EmitError> { let mut state = BinaryEmitState { out_buf: Vec::new(), out_pos: 0, deferred: Vec::new(), label_set: HashMap::new(), local_label_set: HashMap::new(), const_set: HashMap::new(), }; emit_binary_recurse(spec, &mut state, ast).map(move |_| state.out_buf) } #[derive(Debug)] struct BinaryEmitState { out_buf: Vec<u8>, out_pos: usize, deferred: Vec<(usize, Node)>, label_set: HashMap<String, u64>, local_label_set: HashMap<String, u64>, const_set: HashMap<String, u64>, } impl BinaryEmitState { fn accomodate_bytes(&mut self, byte_count: usize) -> &mut [u8] { let start_pos = self.out_pos; let end_pos = start_pos + byte_count; if self.out_buf.len() < end_pos { self.out_buf.resize(end_pos, 0); } self.out_pos = end_pos; &mut self.out_buf[start_pos..end_pos] } fn find_const(&self, key: &str, spec: &arch::RiscVSpec) -> Option<u64> { self.label_set .get(key) .or_else(|| self.local_label_set.get(key)) .or_else(|| self.const_set.get(key)) .copied() .or_else(|| spec.get_const(key)) } } fn emit_deferred(spec: &arch::RiscVSpec, state: &mut BinaryEmitState) -> Result<(), EmitError> { let mut to_remove = Vec::new(); let mut to_emit = Vec::new(); for (i, (pos, insn)) in state.deferred.iter().enumerate() { let pc = *pos as u64; let simp = insn.emitter_simplify(&|cname| state.find_const(cname, spec), pc); if !simp.1 { continue; } to_emit.push((*pos, simp.0)); to_remove.push(i); } for i in to_remove.iter().rev() { state.deferred.swap_remove(*i); } for (pos, insn) in to_emit.into_iter() { let saved_pos = state.out_pos; state.out_pos = pos; emit_binary_recurse(&spec, state, &insn)?; state.out_pos = saved_pos; } Ok(()) } fn emit_binary_recurse( spec: &arch::RiscVSpec, state: &mut BinaryEmitState, node: &Node, ) -> Result<(), EmitError> { use Node::*; let ialign_bytes = (spec.get_const("IALIGN").unwrap_or(32) as usize + 7) / 8; let max_ilen_bytes = (spec.get_const("ILEN").unwrap_or(32) as usize + 7) / 8; match node { Root(nodes) => { for node in nodes.iter() { emit_binary_recurse(spec, state, node)?; } emit_deferred(spec, state)?; if let Some(defnode) = state.deferred.first() { return Err(EmitError::UnexpectedNodeType(format!("{:?}", defnode))); } Ok(()) } Label(lname) => { if lname.starts_with('.') { if state .local_label_set .insert(lname.to_owned(), state.out_pos as u64) .is_some() { return Err(EmitError::DuplicateLabel(lname.to_owned())); } } else { emit_deferred(spec, state)?; state.local_label_set.clear(); if state .label_set .insert(lname.to_owned(), state.out_pos as u64) .is_some() { return Err(EmitError::DuplicateLabel(lname.to_owned())); } } Ok(()) } Instruction(iname, args) => { match iname.as_ref() { ".org" | ".ORG" => { if args.len() != 1 { return Err(EmitError::InvalidArgumentCount(iname.clone())); } if let (Node::Argument(box Node::Integer(adr)), _) = args[0].emitter_simplify( &|cname| state.find_const(cname, spec), state.out_pos as u64, ) { let new_out_pos = adr as usize; if new_out_pos > state.out_buf.len() { state .out_buf .reserve(new_out_pos - state.out_buf.len() + 32 * 32); state.out_buf.resize(new_out_pos, 0); } state.out_pos = new_out_pos; Ok(()) } else { Err(EmitError::InvalidArgumentType(iname.clone(), 0)) } } ".equ" | ".EQU" | ".define" | ".DEFINE" => { if args.len() != 2 { return Err(EmitError::InvalidArgumentCount(iname.clone())); } if let Node::Argument(box Node::Identifier(defname)) = &args[0] { if let (Node::Argument(box Node::Integer(val)), _) = args[1] .emitter_simplify( &|cname| state.find_const(cname, spec), state.out_pos as u64, ) { if state.const_set.insert(defname.to_owned(), val).is_none() { Ok(()) } else { Err(EmitError::DuplicateConstant(defname.to_owned())) } } else { Err(EmitError::InvalidArgumentType(iname.clone(), 1)) } } else { Err(EmitError::InvalidArgumentType(iname.clone(), 0)) } } _ => { let specinsn = spec .get_instruction_by_name(iname) .ok_or_else(|| EmitError::InvalidInstruction(iname.clone()))?; let fmt = specinsn.get_format(&spec); if args.len() != specinsn.args.len() { return Err(EmitError::InvalidArgumentCount(iname.clone())); } let ilen_bytes = (fmt.ilen + 7) / 8; if ilen_bytes > max_ilen_bytes { return Err(EmitError::InvalidEncoding(iname.clone())); } let aligned_pos = (state.out_pos + ialign_bytes - 1) / ialign_bytes * ialign_bytes; if state.out_pos != aligned_pos { state.accomodate_bytes(aligned_pos - state.out_pos); } let simpinsn = node.emitter_simplify( &|cname| state.find_const(cname, spec), state.out_pos as u64, ); if !simpinsn.1 { state.deferred.push((state.out_pos, simpinsn.0)); state.accomodate_bytes(ilen_bytes); return Ok(()); } let args; if let Node::Instruction(_, sargs) = simpinsn.0 { args = sargs; } else { panic!("Simplified instruction is now a {:?}", simpinsn.0); } let mut argv: SmallVec<[u64; 4]> = SmallVec::new(); for (i, arg) in args.iter().enumerate() { match fmt.fields[specinsn.args[i]].vtype { arch::FieldType::Value => { if let Node::Argument(box Node::Integer(val)) = arg { argv.push(*val); } else { return Err(EmitError::InvalidArgumentType(iname.clone(), i)); } } arch::FieldType::Register => { if let Node::Argument(box Node::Register(rid)) = arg { argv.push(*rid as u64); } else { return Err(EmitError::InvalidArgumentType(iname.clone(), i)); } } } } assert_eq!(argv.len(), specinsn.args.len()); let bytes = state.accomodate_bytes(ilen_bytes); specinsn .encode_into(bytes, spec, argv.as_slice()) .map_err(|_| EmitError::InvalidEncoding(iname.clone())) } } } _ => Err(EmitError::UnexpectedNodeType(format!("{:?}", node))), } }
use crate::arch; use crate::parser::Node; use smallvec::SmallVec; use std::collections::HashMap; #[derive(Clone, Debug)] pub enum EmitError { UnexpectedNodeType(String), InvalidInstruction(String), InvalidArgumentCount(String), InvalidArgumentType(String, usize), InvalidEncoding(String), DuplicateLabel(String), DuplicateConstant(String), } pub fn emit_flat_binary(spec: &arch::RiscVSpec, ast: &Node) -> Result<Vec<u8>, EmitError> { let mut state = BinaryEmitState { out_buf: Vec::new(), out_pos: 0, deferred: Vec::new(), label_set: HashMap::new(), local_label_set: HashMap::new(), const_set: HashMap::new(), }; emit_binary_recurse(spec, &mut state, ast).map(move |_| state.out_buf) } #[derive(Debug)] struct BinaryEmitState { out_buf: Vec<u8>, out_pos: usize, deferred: Vec<(usize, Node)>, label_set: HashMap<String, u64>, local_label_set: HashMap<String, u64>, const_set: HashMap<String, u64>, } impl BinaryEmitState { fn accomodate_bytes(&mut self, byte_count: usize) -> &mut [u8] { let start_pos = self.out_pos; let end_pos = start_pos + byte_count; if self.out_buf.len() < end_pos { self.out_buf.resize(end_pos, 0); } self.out_pos = end_pos; &mut self.out_buf[start_pos..end_pos] } fn find_const(&self, key: &str, spec: &arch::RiscVSpec) -> Option<u64> { self.label_set .get(key) .or_else(|| self.local_label_set.get(key)) .or_else(|| self.const_set.get(key)) .copied() .or_else(|| spec.get_const(key)) } } fn emit_deferred(spec: &arch::RiscVSpec, state: &mut BinaryEmitState) -> Result<(), EmitError> { let mut to_remove = Vec::new(); let mut to_emit = Vec::new(); for (i, (pos, insn)) in state.deferred.iter().enumerate() { let pc = *pos as u64; let simp = insn.emitter_simplify(&|cname| state.find_const(cname, spec), pc); if !simp.1 { continue; } to_emit.push((*pos, simp.0)); to_remove.push(i); } for i in to_remove.iter().rev() { state.deferred.swap_remove(*i); } for (pos, insn) in to_emit.into_iter() { let saved_pos = state.out_pos; state.out_pos = pos; emit_binary_recurse(&spec, state, &insn)?; state.out_pos = saved_pos; } Ok(()) } fn emit_binary_recurse( spec: &arch::RiscVSpec, state: &mut BinaryEmitState, node: &Node, ) -> Result<(), EmitError> { use Node::*; let ialign_bytes = (spec.get_const("IALIGN").unwrap_or(32) as usize + 7) / 8; let max_ilen_bytes = (spec.get_const("ILEN").unwrap_or(32) as usize + 7) / 8; match node { Root(nodes) => { for node in nodes.iter() { emit_binary_recurse(spec, state, node)?; } emit_deferred(spec, state)?; if let Some(defnode) = state.deferred.first() { return Err(EmitError::UnexpectedNodeType(format!("{:?}", defnode))); } Ok(()) } Label(lname) => { if lname.starts_with('.') { if state .local_label_set .insert(lname.to_owned(), state.out_pos as u64) .is_some() { return Err(EmitError::DuplicateLabel(lname.to_owned())); } } else { emit_deferred(spec, state)?; state.local_label_set.clear(); if state .label_set .insert(lname.to_owned(), state.out_pos as u64) .is_some() { return Err(EmitError::DuplicateLabel(lname.to_owned())); } } Ok(()) } Instruction(iname, args) => { match iname.as_ref() { ".org" | ".ORG" => { if args.len() != 1 { return Err(EmitError::InvalidArgumentCount(iname.clone())); } if let (Node::Argument(box Node::Integer(adr)), _) = args[0].emitter_simplify( &|cname| state.find_const(cname, spec), state.out_pos as u64, ) { let new_out_pos = adr as usize; if new_out_pos > state.out_buf.len() { state .out_buf .reserve(new_out_pos - state.out_buf.len() + 32 * 32); state.out_buf.resize(new_out_pos, 0); } state.out_pos = new_out_pos; Ok(()) } else { Err(EmitError::InvalidArgumentType(iname.clone(), 0)) } } ".equ" | ".EQU" | ".define" | ".DEFINE" => { if args.len() != 2 { return Err(EmitError::InvalidArgumentCount(iname.clone())); } if let Node::Argument(box Node::Identifier(defname)) = &args[0] { if let (Node::Argument(box Node::Integer(val)), _) = args[1] .emitter_simplify( &|cname| state.find_const(cname, spec), state.out_pos as u64, ) { if state.const_set.insert(defname.to_owned(), val).is_none() { Ok(()) } else { Err(EmitError::DuplicateConstant(defname.to_owned())) } } else { Err(EmitError::InvalidArgumentType(iname.clone(), 1)) } } else { Err(EmitError::InvalidArgumentType(iname.clone(), 0)) } } _ => { let specinsn = spec .get_instruction_by_name(iname) .ok_or_else(|| EmitError::InvalidInstruction(iname.clone()))?; let fmt = specinsn.get_format(&spec); if args.len() != specinsn.args.len() { return Err(EmitError::InvalidArgumentCount(iname.clone())); } let ilen_bytes = (fmt.ilen + 7) / 8; if ilen_bytes > max_ilen_bytes { return Err(EmitError::InvalidEncoding(iname.clone())); } let aligned_pos = (state.out_pos + ialign_bytes - 1) / ialign_bytes * ialign_bytes; if state.out_pos != aligned_pos { state.accomodate_bytes(aligned_pos - state.out_pos); } let simpinsn = node.emitter_simplify( &|cname| state.find_const(cname, spec), state.out_pos as u64, ); if !simpinsn.1 { state.deferred.push((state.out_pos, simpinsn.0)); state.accomodate_bytes(ilen_bytes); return Ok(()); } let args;
let mut argv: SmallVec<[u64; 4]> = SmallVec::new(); for (i, arg) in args.iter().enumerate() { match fmt.fields[specinsn.args[i]].vtype { arch::FieldType::Value => { if let Node::Argument(box Node::Integer(val)) = arg { argv.push(*val); } else { return Err(EmitError::InvalidArgumentType(iname.clone(), i)); } } arch::FieldType::Register => { if let Node::Argument(box Node::Register(rid)) = arg { argv.push(*rid as u64); } else { return Err(EmitError::InvalidArgumentType(iname.clone(), i)); } } } } assert_eq!(argv.len(), specinsn.args.len()); let bytes = state.accomodate_bytes(ilen_bytes); specinsn .encode_into(bytes, spec, argv.as_slice()) .map_err(|_| EmitError::InvalidEncoding(iname.clone())) } } } _ => Err(EmitError::UnexpectedNodeType(format!("{:?}", node))), } }
if let Node::Instruction(_, sargs) = simpinsn.0 { args = sargs; } else { panic!("Simplified instruction is now a {:?}", simpinsn.0); }
if_condition
[ { "content": "pub fn ast_from_str(s: &str, spec: &arch::RiscVSpec) -> Result<Node, ParseError> {\n\n grammar::top_level(s, spec)\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 0, "score": 137818.47554548547 }, { "content": "pub fn ast_from_file(path: &str, spec: &arch::RiscVSpec) -> Result<Node, ParseError> {\n\n use std::fs::File;\n\n use std::io::prelude::*;\n\n use std::io::BufReader;\n\n let f = File::open(path).unwrap_or_else(|_| panic!(\"Could not open source file {}\", path));\n\n let mut rd = BufReader::new(f);\n\n let mut buf = String::new();\n\n rd.read_to_string(&mut buf)\n\n .unwrap_or_else(|_| panic!(\"Could not read from source file {}\", path));\n\n ast_from_str(&buf, spec)\n\n}\n", "file_path": "src/parser.rs", "rank": 2, "score": 128122.6449331164 }, { "content": "struct Opt {\n\n #[structopt(help = \"Input file path\")]\n\n input_file: Option<PathBuf>,\n\n\n\n #[structopt(\n\n short = \"s\",\n\n long = \"string\",\n\n help = \"Input string instead of file, all semicolons are replaced by newlines\"\n\n )]\n\n input_string: Option<String>,\n\n\n\n #[structopt(\n\n short = \"o\",\n\n long = \"output-file\",\n\n help = \"Output (assembled) file path\"\n\n )]\n\n output_file: Option<PathBuf>,\n\n\n\n #[structopt(short = \"v\", long = \"verbose\", help = \"Enable additional output\")]\n\n verbose: bool,\n", "file_path": "src/main.rs", "rank": 5, "score": 36397.842831634545 }, { "content": "#[derive(Debug, Copy, Clone, StructOpt)]\n\nenum OutputFormat {\n\n Flat,\n\n}\n\nimpl std::str::FromStr for OutputFormat {\n\n type Err = &'static str;\n\n\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n match s.to_ascii_lowercase().as_ref() {\n\n \"flat\" => Ok(OutputFormat::Flat),\n\n _ => Err(\"Invalid output format specified\"),\n\n }\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, StructOpt)]\n\n#[structopt(\n\n name = \"rvasm\",\n\n about = \"Usage of the rvasm RISC-V assembler\",\n\n setting = structopt::clap::AppSettings::ColoredHelp\n\n)]\n", "file_path": "src/main.rs", "rank": 6, "score": 35641.38554078539 }, { "content": "fn main() {\n\n let opt = Opt::from_args();\n\n if opt.input_string.is_none() && opt.input_file.is_none() {\n\n Opt::clap().print_long_help().unwrap();\n\n eprintln!(\"A source file or string is required\");\n\n return;\n\n }\n\n if opt.input_string.is_some() && opt.input_file.is_some() {\n\n Opt::clap().print_long_help().unwrap();\n\n eprintln!(\"Only one source allowed: either a file or a string\");\n\n return;\n\n }\n\n\n\n let mut std_path = Vec::new();\n\n std_path.push(PathBuf::from(\"./cfg/\"));\n\n\n\n let mut rv = crate::arch::RiscVSpec::new();\n\n if let Err(e) = rv.load_arch_cfg(&std_path, &opt.arch, opt.verbose) {\n\n eprintln!(\"Error loading arch-defined configuration: {:?}\", e);\n\n std::process::exit(1);\n", "file_path": "src/main.rs", "rank": 7, "score": 32204.874634731885 }, { "content": "#[test]\n\nfn test_parser_on_all_inputs() {\n\n use crate::parser::ast_from_file;\n\n use std::path::Path;\n\n\n\n let mut rv32i_str = String::new();\n\n use std::io::prelude::*;\n\n std::fs::File::open(\"./cfg/rv32i.toml\")\n\n .unwrap()\n\n .read_to_string(&mut rv32i_str)\n\n .unwrap();\n\n let mut rv = crate::arch::RiscVSpec::new();\n\n rv.load_single_cfg_string(&rv32i_str).expect(\"Parse error\");\n\n\n\n let dir = Path::new(\"./test/\")\n\n .read_dir()\n\n .expect(\"Can't open ./test folder of sample inputs\");\n\n for entry in dir {\n\n if entry.is_err() {\n\n continue;\n\n }\n", "file_path": "src/test.rs", "rank": 8, "score": 29555.215398403758 }, { "content": " Label(String),\n\n Argument(Box<Node>),\n\n Instruction(String, Vec<Node>),\n\n\n\n Root(Vec<Node>),\n\n}\n\n\n\nimpl Node {\n\n pub fn parse_u64(s: &str, radix: u32) -> Self {\n\n Node::Integer(u64::from_str_radix(&s.replace(\"_\", \"\"), radix).unwrap())\n\n }\n\n\n\n pub fn parse_register(spec: &arch::RiscVSpec, name: &str) -> Result<Self, &'static str> {\n\n spec.get_register_by_name(name)\n\n .map_or(Err(\"invalid register\"), |i| Ok(Node::Register(i.index)))\n\n }\n\n\n\n pub fn simplify(self) -> Self {\n\n use Node::*;\n\n match self {\n", "file_path": "src/parser.rs", "rank": 12, "score": 19.865283136325615 }, { "content": " Self {\n\n name,\n\n ..Default::default()\n\n }\n\n }\n\n\n\n pub fn get_format<'spec>(&self, spec: &'spec RiscVSpec) -> &'spec InstructionFormat {\n\n spec.get_instruction_format(self.format_idx).unwrap()\n\n }\n\n\n\n pub fn encode_into(\n\n &self,\n\n bytes: &mut [u8],\n\n spec: &RiscVSpec,\n\n argvals: &[u64],\n\n ) -> Result<(), ()> {\n\n assert_eq!(argvals.len(), self.args.len());\n\n let fmt = self.get_format(spec);\n\n for (fldid, fldval) in self.fields.iter() {\n\n let fld: &InstructionField = &fmt.fields[*fldid];\n", "file_path": "src/arch.rs", "rank": 14, "score": 14.960112315710267 }, { "content": "use crate::arch;\n\nuse crate::grammar;\n\n\n\n#[derive(Debug, Clone)]\n\npub enum Node {\n\n Identifier(String),\n\n Integer(u64),\n\n StringLiteral(Vec<u8>),\n\n Register(i32),\n\n PcValue,\n\n\n\n Negation(Box<Self>),\n\n Plus(Box<Self>, Box<Self>),\n\n Minus(Box<Self>, Box<Self>),\n\n Times(Box<Self>, Box<Self>),\n\n Divide(Box<Self>, Box<Self>),\n\n Shl(Box<Self>, Box<Self>),\n\n Shr(Box<Self>, Box<Self>),\n\n Ashr(Box<Self>, Box<Self>),\n\n\n", "file_path": "src/parser.rs", "rank": 15, "score": 12.681692865487541 }, { "content": " self.fields\n\n .iter()\n\n .map(|e| e.calculate_last_encoded_bit_index())\n\n .max()\n\n .unwrap_or(0)\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, Default)]\n\npub struct InstructionDefinition {\n\n pub name: String,\n\n pub format_idx: usize,\n\n /// Indices into InstructionFormat.fields\n\n pub args: Vec<usize>,\n\n /// Indices into InstructionFormat.fields paired with assigned values\n\n pub fields: Vec<(usize, u64)>,\n\n}\n\n\n\nimpl InstructionDefinition {\n\n fn new(name: String) -> Self {\n", "file_path": "src/arch.rs", "rank": 16, "score": 12.081389794360875 }, { "content": " let mut sargs = Vec::new();\n\n for arg in args.iter() {\n\n let s = arg.emitter_simplify(const_provider, pc);\n\n sargs.push(s.0);\n\n succ &= s.1;\n\n }\n\n (Instruction(iname.to_owned(), sargs), succ)\n\n }\n\n\n\n Root(nodes) => {\n\n let mut succ = true;\n\n let mut snodes = Vec::new();\n\n for node in nodes.iter() {\n\n let s = node.emitter_simplify(const_provider, pc);\n\n snodes.push(s.0);\n\n succ &= s.1;\n\n }\n\n (Root(snodes), succ)\n\n }\n\n }\n\n }\n\n}\n\n\n\npub type ParseError = peg::error::ParseError<peg::str::LineCol>;\n\n\n", "file_path": "src/parser.rs", "rank": 17, "score": 11.80489102464008 }, { "content": " DuplicateInstruction(String),\n\n BadInstructionFormat(String),\n\n}\n\n\n\n// Creation & Parsing\n\nimpl RiscVSpec {\n\n pub fn new() -> Self {\n\n Self::default()\n\n }\n\n\n\n pub fn load_single_cfg_string(&mut self, content: &str) -> Result<(), LoadError> {\n\n let val = Self::string_to_toml(content)?;\n\n self.load_single_toml(&val)\n\n }\n\n\n\n pub fn load_single_cfg_file(&mut self, path: &std::path::Path) -> Result<(), LoadError> {\n\n let content = std::fs::read_to_string(path).map_err(|_| LoadError::InvalidArchSpec)?;\n\n self.load_single_cfg_string(&content)\n\n }\n\n\n", "file_path": "src/arch.rs", "rank": 18, "score": 11.770830873691956 }, { "content": " pub fn load_arch_cfg(\n\n &mut self,\n\n std_paths: &[PathBuf],\n\n arch_spec: &str,\n\n verbose: bool,\n\n ) -> Result<(), LoadError> {\n\n use petgraph::prelude::*;\n\n use regex::Regex;\n\n\n\n let mut docs = Vec::new();\n\n let re = Regex::new(r\"(RV[0-9]+[A-Za-z])([A-Z][a-z]*)*\").unwrap();\n\n let cap = re.captures(arch_spec);\n\n if cap.is_none() {\n\n return Err(LoadError::InvalidArchSpec);\n\n }\n\n let cap = cap.unwrap();\n\n for fp in cap.iter().skip(1) {\n\n if fp.is_none() {\n\n continue;\n\n }\n", "file_path": "src/arch.rs", "rank": 19, "score": 11.225664538399192 }, { "content": "use smallvec::SmallVec;\n\nuse std::collections::HashMap;\n\nuse std::path::PathBuf;\n\nuse toml;\n\n\n\n#[derive(Clone, Debug, Default)]\n\npub struct Register {\n\n pub index: i32,\n\n pub names: Vec<String>,\n\n pub size_in_bits: i32,\n\n}\n\n\n\nimpl Register {\n\n pub fn new(index: i32) -> Self {\n\n Self {\n\n index,\n\n ..Default::default()\n\n }\n\n }\n\n\n", "file_path": "src/arch.rs", "rank": 20, "score": 10.865616005689127 }, { "content": "use crate::arch;\n\nuse crate::parser::Node;\n\n\n\npeg::parser! { grammar asmpeg(spec: &arch::RiscVSpec) for str {\n\nrule comment() = quiet!{\";\" (!['\\n'][_])+}\n\nrule whitechar() = quiet!{[' '|'\\r'|'\\t']} / quiet!{comment()} / \"\\\\\\n\"\n\nrule whitespace() = quiet!{whitechar()+}\n\nrule newline() = quiet!{whitespace()?} \"\\n\"\n\nrule register() -> Node = quiet!{ s:$(['a'..='z'|'A'..='Z'|'.'|'_']['a'..='z'|'A'..='Z'|'0'..='9'|'.'|'_']*) {? Node::parse_register(spec, s) } } / expected!(\"register\")\n\nrule idstr() -> &'input str = quiet!{ !register() sv:$(['a'..='z'|'A'..='Z'|'.'|'_']['a'..='z'|'A'..='Z'|'0'..='9'|'.'|'_']*) { sv } } / expected!(\"identifier\")\n\nrule identifier() -> Node = s:idstr() { Node::Identifier(s.to_owned()) }\n\n\n\nrule integer() -> Node = quiet!{ \"0x\" n:$(['0'..='9'|'a'..='f'|'A'..='F'|'_']+) { Node::parse_u64(n, 16) } }\n\n / quiet!{ \"0o\" n:$(['0'..='7'|'_']+) { Node::parse_u64(n, 8) } }\n\n / quiet!{ \"0b\" n:$(['0'..='1'|'_']+) { Node::parse_u64(n, 2) } }\n\n / quiet!{ \"0d\"? n:$(['0'..='9'|'_']+) { Node::parse_u64(n, 10) } }\n\n / expected!(\"integer\")\n\n\n\nrule escape() -> u8 = _:\"\\\\n\" {\"\\n\".as_bytes()[0]} / _:\"\\\\t\" {\"\\t\".as_bytes()[0]}\n\n / _:\"\\\\\\\\\" {\"\\\\\".as_bytes()[0]} / _:\"\\\\r\" {\"\\r\".as_bytes()[0]}\n", "file_path": "src/grammar.rs", "rank": 21, "score": 10.80203964253711 }, { "content": " / \"\\\\x\" n:$(['0'..='9'|'a'..='f'|'A'..='F'|'_']*<2>) { u64::from_str_radix(n, 16).unwrap() as u8 }\n\n\n\nrule str_char<Q>(quote: rule<Q>) -> u8 = escape() / c:$(!quote() [_]) { c.as_bytes()[0] }\n\n\n\nrule char_literal() -> Node = \"'\" s:str_char(<\"'\">) \"'\" { Node::Integer(s as u64) }\n\nrule bytes_literal() -> Node = \"\\\"\" s:str_char(<\"\\\"\">)* \"\\\"\" { Node::StringLiteral(s) }\n\n\n\nrule negation() -> Node = \"-\" e:expression() { Node::Negation(box e) }\n\npub rule expr_atom() -> Node = whitespace()? \"(\" whitespace()? e:expression() whitespace()? \")\" whitespace()? {e.simplify()}\n\n / whitespace()? n:negation() whitespace()? {n.simplify()}\n\n / whitespace()? i:integer() whitespace()? {i}\n\n / whitespace()? i:identifier() whitespace()? {i}\n\n / whitespace()? \"$\" whitespace()? { Node::PcValue }\n\n / whitespace()? c:char_literal() whitespace()? {c}\n\n\n\npub rule expression() -> Node = precedence! {\n\n x:(@) \"<<\" y:@ { Node::Shl(box x, box y).simplify() }\n\n x:(@) \">>\" y:@ { Node::Shr(box x, box y).simplify() }\n\n x:(@) \">>>\" y:@ { Node::Ashr(box x, box y).simplify() }\n\n --\n", "file_path": "src/grammar.rs", "rank": 22, "score": 10.546426471352575 }, { "content": " // Consts\n\n consts: HashMap<String, u64>,\n\n // Registers\n\n registers: HashMap<i32, Register>,\n\n register_name_lookup: HashMap<String, i32>,\n\n // Instruction formats\n\n instruction_formats: Vec<InstructionFormat>,\n\n // Instructions\n\n instructions: Vec<InstructionDefinition>,\n\n instruction_name_lookup: HashMap<String, usize>,\n\n}\n\n\n\npub struct AbiFileInfo<'a> {\n\n pub name: &'a str,\n\n pub code: &'a str,\n\n pub spec: &'a str,\n\n}\n\n\n\n// Main functionality\n\nimpl RiscVSpec {\n", "file_path": "src/arch.rs", "rank": 23, "score": 10.541510723722277 }, { "content": "impl BitRangeMap {\n\n pub fn new(value_last: i32, value_first: i32, instruction_first: i32) -> Self {\n\n Self {\n\n value_last,\n\n value_first,\n\n instruction_first,\n\n }\n\n }\n\n\n\n pub fn instruction_last(&self) -> i32 {\n\n self.instruction_first + self.value_last - self.value_first\n\n }\n\n\n\n pub fn value_bitmask(&self) -> u64 {\n\n let value_len = self.value_last - self.value_first + 1;\n\n ((1 << value_len) - 1) << self.value_first\n\n }\n\n\n\n pub fn encode_into(&self, bytes: &mut [u8], value: u64) {\n\n let mut enc_value = (value & self.value_bitmask()) >> self.value_first;\n", "file_path": "src/arch.rs", "rank": 24, "score": 10.455543910481598 }, { "content": " let fmt = &self.instruction_formats[insn.format_idx];\n\n\n\n for argv in iargs.iter() {\n\n let argv = argv.as_str().ok_or_else(|| {\n\n LoadError::BadType(format!(\"instructions.{}.args[] item\", iname))\n\n })?;\n\n insn.args\n\n .push(\n\n fmt.fields\n\n .iter()\n\n .position(|x| x.name == argv)\n\n .ok_or_else(|| {\n\n LoadError::BadInstructionFormat(format!(\n\n \"instructions.{}.args[{}]\",\n\n iname, argv\n\n ))\n\n })?,\n\n );\n\n }\n\n\n", "file_path": "src/arch.rs", "rank": 25, "score": 10.434547771933325 }, { "content": " Negation(box Integer(i)) => Integer(i.wrapping_neg()),\n\n Plus(box Integer(a), box Integer(b)) => Integer(a.wrapping_add(b)),\n\n Minus(box Integer(a), box Integer(b)) => Integer(a.wrapping_sub(b)),\n\n Times(box Integer(a), box Integer(b)) => Integer(a.wrapping_mul(b)),\n\n Divide(box Integer(a), box Integer(b)) => Integer(a.wrapping_div(b)),\n\n Shl(box Integer(a), box Integer(b)) => Integer(a << b),\n\n Shr(box Integer(a), box Integer(b)) => Integer(a >> b),\n\n Ashr(box Integer(a), box Integer(b)) => Integer((a as i64 >> b as i64) as u64),\n\n _ => self,\n\n }\n\n }\n\n\n\n /// Returns: the simplified node and whether all the constants were reduced to integers.\n\n pub fn emitter_simplify<F: Fn(&str) -> Option<u64>>(\n\n &self,\n\n const_provider: &F,\n\n pc: u64,\n\n ) -> (Self, bool) {\n\n use Node::*;\n\n let cloned_f = || (self.clone(), false);\n", "file_path": "src/parser.rs", "rank": 26, "score": 10.42734163484751 }, { "content": " fld.encoding\n\n .iter()\n\n .for_each(|e| e.encode_into(bytes, *fldval));\n\n }\n\n for (argid, argval) in self.args.iter().zip(argvals) {\n\n let arg: &InstructionField = &fmt.fields[*argid];\n\n arg.encoding\n\n .iter()\n\n .for_each(|e| e.encode_into(bytes, *argval));\n\n }\n\n Ok(())\n\n }\n\n}\n\n\n\n#[derive(Debug, Default)]\n\npub struct RiscVSpec {\n\n // Meta\n\n loaded_names: Vec<String>,\n\n loaded_codes: Vec<String>,\n\n loaded_specs: Vec<String>,\n", "file_path": "src/arch.rs", "rank": 27, "score": 10.370491614507948 }, { "content": " .unwrap_or(0)\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, Default)]\n\npub struct InstructionFormat {\n\n pub name: String,\n\n pub fields: SmallVec<[InstructionField; 8]>,\n\n pub ilen: usize,\n\n}\n\n\n\nimpl InstructionFormat {\n\n fn new(name: String) -> Self {\n\n Self {\n\n name,\n\n ..Default::default()\n\n }\n\n }\n\n\n\n fn calculate_last_encoded_bit_index(&self) -> i32 {\n", "file_path": "src/arch.rs", "rank": 28, "score": 10.00054513439368 }, { "content": " pub fn get_loaded_abis(&self) -> Vec<AbiFileInfo> {\n\n let mut v = Vec::new();\n\n assert_eq!(self.loaded_names.len(), self.loaded_codes.len());\n\n assert_eq!(self.loaded_names.len(), self.loaded_specs.len());\n\n for ((name, code), spec) in self\n\n .loaded_names\n\n .iter()\n\n .zip(self.loaded_codes.iter())\n\n .zip(self.loaded_specs.iter())\n\n {\n\n v.push(AbiFileInfo { name, code, spec });\n\n }\n\n v\n\n }\n\n\n\n // Consts\n\n\n\n pub fn get_const(&self, name: &str) -> Option<u64> {\n\n self.consts.get(name).copied()\n\n }\n", "file_path": "src/arch.rs", "rank": 29, "score": 9.282931648249589 }, { "content": " self.loaded_codes.push(\n\n meta.get(\"code\")\n\n .ok_or_else(|| MissingNode(\"meta.code\"))?\n\n .as_str()\n\n .ok_or_else(|| BadType(\"meta.code\"))?\n\n .to_owned(),\n\n );\n\n self.loaded_specs.push(\n\n meta.get(\"spec\")\n\n .ok_or_else(|| MissingNode(\"meta.spec\"))?\n\n .as_str()\n\n .ok_or_else(|| BadType(\"meta.spec\"))?\n\n .to_owned(),\n\n );\n\n\n\n // validate requirements\n\n let requires = meta.get(\"requires\");\n\n if let Some(requires) = requires {\n\n let list = requires\n\n .as_array()\n", "file_path": "src/arch.rs", "rank": 32, "score": 9.010748349216026 }, { "content": " .map_err(|_| LoadError::MalformedTOML)\n\n }\n\n\n\n /// Load integer or search in consts if a string\n\n fn toml_int(\n\n consts: &HashMap<String, u64>,\n\n key: String,\n\n v: &toml::Value,\n\n ) -> Result<i64, LoadError> {\n\n if let Some(i) = v.as_integer() {\n\n Ok(i)\n\n } else if let Some(s) = v.as_str() {\n\n consts\n\n .get(s)\n\n .ok_or_else(|| LoadError::ConstNotFound(s.to_owned()))\n\n .map(|x| *x as i64)\n\n } else {\n\n Err(LoadError::BadType(key))\n\n }\n\n }\n", "file_path": "src/arch.rs", "rank": 33, "score": 8.713963968885768 }, { "content": " let mut enc_mask = self.value_bitmask() >> self.value_first;\n\n let mut instr_byte = self.instruction_first as usize / 8;\n\n enc_value <<= self.instruction_first as usize % 8;\n\n enc_mask <<= self.instruction_first as usize % 8;\n\n while enc_mask != 0 {\n\n let bmask = (enc_mask & 0xff) as u8;\n\n let bval = (enc_value & 0xff) as u8;\n\n // zero out the bits to encode\n\n bytes[instr_byte] &= !bmask;\n\n // encode bits\n\n bytes[instr_byte] |= bval;\n\n // move on to next 8 bits\n\n enc_mask >>= 8;\n\n enc_value >>= 8;\n\n instr_byte += 1;\n\n }\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone, Debug)]\n", "file_path": "src/arch.rs", "rank": 34, "score": 8.610561443850596 }, { "content": "\n\n // figure out dependency graph\n\n let mut depgraph: StableGraph<usize, ()> =\n\n StableGraph::with_capacity(docs.len(), docs.len());\n\n let mut codes = HashMap::new();\n\n let mut nodes = Vec::new();\n\n for (i, doc) in docs.iter().enumerate() {\n\n let meta = doc\n\n .get(\"meta\")\n\n .ok_or_else(|| LoadError::MissingNode(\"meta\".to_owned()))?;\n\n let code = meta\n\n .get(\"code\")\n\n .ok_or_else(|| LoadError::MissingNode(\"meta.code\".to_owned()))?\n\n .as_str()\n\n .ok_or_else(|| LoadError::BadType(\"meta.code\".to_owned()))?;\n\n let nidx = depgraph.add_node(i);\n\n codes.insert(code.to_owned(), nidx);\n\n nodes.push(nidx);\n\n }\n\n for (i, doc) in docs.iter().enumerate() {\n", "file_path": "src/arch.rs", "rank": 35, "score": 8.54978299581617 }, { "content": "\n\n fn load_single_toml(&mut self, doc: &toml::Value) -> Result<(), LoadError> {\n\n #[allow(non_snake_case)]\n\n let MissingNode = |s: &'static str| LoadError::MissingNode(s.to_owned());\n\n #[allow(non_snake_case)]\n\n let BadType = |s: &'static str| LoadError::BadType(s.to_owned());\n\n\n\n let meta = doc.get(\"meta\").ok_or_else(|| MissingNode(\"meta\"))?;\n\n let consts = doc.get(\"consts\");\n\n let registers = doc.get(\"registers\");\n\n let instruction_formats = doc.get(\"instruction_formats\");\n\n let instructions = doc.get(\"instructions\");\n\n\n\n self.loaded_names.push(\n\n meta.get(\"name\")\n\n .ok_or_else(|| MissingNode(\"meta.name\"))?\n\n .as_str()\n\n .ok_or_else(|| BadType(\"meta.name\"))?\n\n .to_owned(),\n\n );\n", "file_path": "src/arch.rs", "rank": 37, "score": 8.364117402058088 }, { "content": "#![feature(box_syntax)]\n\n#![feature(box_patterns)]\n\n#![warn(clippy::all)]\n\n#![allow(dead_code)]\n\nmod arch;\n\nmod emit;\n\nmod grammar;\n\nmod parser;\n\nmod test;\n\n\n\nuse emit::flatbin;\n\nuse std::io::prelude::*;\n\nuse std::path::PathBuf;\n\nuse structopt::StructOpt;\n\n\n\n#[derive(Debug, Copy, Clone, StructOpt)]\n", "file_path": "src/main.rs", "rank": 39, "score": 8.20561694088268 }, { "content": " let fp = fp.unwrap();\n\n let mut loaded = false;\n\n for path in std_paths.iter() {\n\n let mut p: PathBuf = path.clone();\n\n p.push(&fp.as_str().to_ascii_lowercase());\n\n p.set_extension(\"toml\");\n\n if let Ok(strdata) = std::fs::read_to_string(&p) {\n\n docs.push(Self::string_to_toml(&strdata)?);\n\n if verbose {\n\n let pstr = p.as_os_str().to_string_lossy();\n\n eprintln!(\"Found {} spec in {}\", fp.as_str(), pstr);\n\n }\n\n loaded = true;\n\n break;\n\n }\n\n }\n\n if !loaded {\n\n return Err(LoadError::RequirementNotFound(fp.as_str().to_owned()));\n\n }\n\n }\n", "file_path": "src/arch.rs", "rank": 40, "score": 7.9197562854279315 }, { "content": " for (fname, fv) in ifields.iter() {\n\n let fv = Self::toml_int(\n\n &self.consts,\n\n format!(\"instructions.{}.fields[{}]\", iname, fname),\n\n fv,\n\n )?;\n\n let fi = fmt\n\n .fields\n\n .iter()\n\n .position(|x| x.name == fname.as_ref())\n\n .ok_or_else(|| {\n\n LoadError::BadInstructionFormat(format!(\n\n \"instructions.{}.fields[{}]\",\n\n iname, fname\n\n ))\n\n })?;\n\n insn.fields.push((fi, fv as u64));\n\n }\n\n\n\n if self\n", "file_path": "src/arch.rs", "rank": 41, "score": 7.909469144839537 }, { "content": " use std::convert::TryInto;\n\n let bin: Vec<u8>;\n\n\n\n match opt.output_format {\n\n OutputFormat::Flat => {\n\n let ebin = flatbin::emit_flat_binary(&rv, &ast);\n\n if let Err(e) = ebin {\n\n eprintln!(\"Binary emission error: {:?}\", e);\n\n std::process::exit(1);\n\n } else {\n\n bin = ebin.unwrap();\n\n }\n\n }\n\n }\n\n\n\n if opt.print_binary {\n\n println!(\"Binary assembly:\");\n\n let mut cnt = 0;\n\n for word in bin.chunks(4) {\n\n let word: [u8; 4] = word.try_into().unwrap();\n", "file_path": "src/main.rs", "rank": 42, "score": 7.8792035890279 }, { "content": " .as_array()\n\n .ok_or_else(|| LoadError::BadType(format!(\"instructions.{}.args\", iname)))?;\n\n\n\n let ifields = itable\n\n .get(\"fields\")\n\n .ok_or_else(|| {\n\n LoadError::MissingNode(format!(\"instructions.{}.fields\", iname))\n\n })?\n\n .as_table()\n\n .ok_or_else(|| LoadError::BadType(format!(\"instructions.{}.fields\", iname)))?;\n\n\n\n let mut insn = InstructionDefinition::new(iname.clone());\n\n\n\n insn.format_idx = self\n\n .instruction_formats\n\n .iter()\n\n .position(|x| x.name == iformat)\n\n .ok_or_else(|| {\n\n LoadError::BadInstructionFormat(format!(\"instructions.{}.format\", iname))\n\n })?;\n", "file_path": "src/arch.rs", "rank": 43, "score": 7.832747544867474 }, { "content": " }\n\n }\n\n\n\n // resolve graph\n\n let res = petgraph::algo::toposort(&depgraph, None);\n\n if res.is_err() {\n\n return Err(LoadError::DependencyCycle);\n\n }\n\n let res = res.unwrap();\n\n for node in res.iter().rev() {\n\n self.load_single_toml(&docs[depgraph[*node]])?;\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n fn string_to_toml(content: &str) -> Result<toml::Value, LoadError> {\n\n use toml::Value;\n\n content\n\n .parse::<Value>()\n", "file_path": "src/arch.rs", "rank": 44, "score": 7.816475923426443 }, { "content": " pub fn get_instruction_by_name(&self, name: &str) -> Option<&InstructionDefinition> {\n\n self.instruction_name_lookup\n\n .get(&name.to_ascii_lowercase())\n\n .and_then(|i| self.get_instruction(*i))\n\n }\n\n\n\n pub fn get_all_instructions(&self) -> &[InstructionDefinition] {\n\n &self.instructions\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub enum LoadError {\n\n MalformedTOML,\n\n InvalidArchSpec,\n\n DependencyCycle,\n\n RequirementNotFound(String),\n\n ConstNotFound(String),\n\n MissingNode(String),\n\n BadType(String),\n", "file_path": "src/arch.rs", "rank": 45, "score": 7.744357013490189 }, { "content": " pub fn get_main_name(&self) -> Option<&str> {\n\n self.names.get(0).map(|x| x.as_ref())\n\n }\n\n\n\n pub fn get_abi_name(&self) -> Option<&str> {\n\n self.names\n\n .get(1)\n\n .or_else(|| self.names.get(0))\n\n .map(|x| x.as_ref())\n\n }\n\n}\n\n\n\n// Values' [last:first] bits map onto instructions' [first+vlast-vfirst:first] bits.\n\n#[derive(Copy, Clone, Debug, Default)]\n\npub struct BitRangeMap {\n\n pub value_last: i32,\n\n pub value_first: i32,\n\n pub instruction_first: i32,\n\n}\n\n\n", "file_path": "src/arch.rs", "rank": 46, "score": 7.3124599597423146 }, { "content": " let instructions = instructions\n\n .as_table()\n\n .ok_or_else(|| BadType(\"instructions\"))?;\n\n for (iname, itable) in instructions.iter() {\n\n let iname = iname.to_ascii_lowercase();\n\n let itable = itable\n\n .as_table()\n\n .ok_or_else(|| LoadError::BadType(format!(\"instructions.{}\", iname)))?;\n\n\n\n let iformat = itable\n\n .get(\"format\")\n\n .ok_or_else(|| {\n\n LoadError::MissingNode(format!(\"instructions.{}.format\", iname))\n\n })?\n\n .as_str()\n\n .ok_or_else(|| LoadError::BadType(format!(\"instructions.{}.format\", iname)))?;\n\n\n\n let iargs = itable\n\n .get(\"args\")\n\n .ok_or_else(|| LoadError::MissingNode(format!(\"instructions.{}.args\", iname)))?\n", "file_path": "src/arch.rs", "rank": 47, "score": 7.116535283809716 }, { "content": " let entry = entry.unwrap();\n\n if !entry.file_type().unwrap().is_file() {\n\n continue;\n\n }\n\n let epath = entry.path();\n\n let path = epath.to_str().unwrap();\n\n if path.ends_with(\".s\") {\n\n eprint!(\" * parsing {} ...\", path);\n\n ast_from_file(path, &rv).expect(\"Testcase parsing failed\");\n\n eprintln!(\"ok\");\n\n }\n\n }\n\n}\n", "file_path": "src/test.rs", "rank": 48, "score": 7.015630997853608 }, { "content": " let sb = b.emitter_simplify(const_provider, pc);\n\n (Shl(box sa.0, box sb.0).simplify(), sa.1 && sb.1)\n\n }\n\n Shr(box a, box b) => {\n\n let sa = a.emitter_simplify(const_provider, pc);\n\n let sb = b.emitter_simplify(const_provider, pc);\n\n (Shr(box sa.0, box sb.0).simplify(), sa.1 && sb.1)\n\n }\n\n Ashr(box a, box b) => {\n\n let sa = a.emitter_simplify(const_provider, pc);\n\n let sb = b.emitter_simplify(const_provider, pc);\n\n (Ashr(box sa.0, box sb.0).simplify(), sa.1 && sb.1)\n\n }\n\n\n\n Argument(box node) => {\n\n let s = node.emitter_simplify(const_provider, pc);\n\n (Argument(box s.0), s.1)\n\n }\n\n Instruction(iname, args) => {\n\n let mut succ = true;\n", "file_path": "src/parser.rs", "rank": 49, "score": 6.763457394144869 }, { "content": " x:(@) \"+\" y:@ { Node::Plus(box x, box y).simplify() }\n\n x:(@) \"-\" y:@ { Node::Minus(box x, box y).simplify() }\n\n --\n\n x:(@) \"*\" y:@ { Node::Times(box x, box y).simplify() }\n\n x:(@) \"/\" y:@ { Node::Divide(box x, box y).simplify() }\n\n --\n\n a:expr_atom() {a}\n\n}\n\n\n\npub rule label() -> Node = whitespace()? i:idstr() whitespace()? \":\" { Node::Label(i.to_owned()) } / expected!(\"label\")\n\npub rule argument() -> Node = whitespace()? e:(register() / expression()) whitespace()? {Node::Argument(box e)}\n\nrule instruction0() -> Node = whitespace()? nm:idstr() whitespace()? { Node::Instruction(nm.to_owned(), vec![]) }\n\nrule instruction1() -> Node = whitespace()? nm:idstr() whitespace() a0:argument() whitespace()? { Node::Instruction(nm.to_owned(), vec![a0]) }\n\nrule instructionN() -> Node = whitespace()? nm:idstr() whitespace() a0:argument() aN:( \",\" an:argument() {an} )+ {\n\n let mut v = aN;\n\n v.insert(0, a0);\n\n Node::Instruction(nm.to_owned(), v)\n\n}\n\npub rule instruction() -> Node = instructionN() / instruction1() / instruction0() / expected!(\"instruction\")\n\n\n\npub rule top_element() -> Node = (whitespace() / newline())* n:(label() / instruction()) {n}\n\npub rule top_level() -> Node = n:(top_element()*) (whitespace() / newline())* { Node::Root(n) }\n\n\n\n}}\n\n\n\n//include!{\"../expanded.rs\"}\n\n\n\npub use asmpeg::top_level;\n", "file_path": "src/grammar.rs", "rank": 51, "score": 6.687088358162496 }, { "content": " let mut fld = InstructionField {\n\n name: fldname.to_owned(),\n\n vtype: FieldType::Value,\n\n length: 0,\n\n encoding: Default::default(),\n\n };\n\n let fldtype = fldtable\n\n .get(\"type\")\n\n .ok_or_else(|| {\n\n LoadError::MissingNode(format!(\n\n \"instruction_formats.{}.{}.type\",\n\n fmtname, fldname\n\n ))\n\n })?\n\n .as_str()\n\n .ok_or_else(|| {\n\n LoadError::BadType(format!(\n\n \"instruction_formats.{}.{}.type\",\n\n fmtname, fldname\n\n ))\n", "file_path": "src/arch.rs", "rank": 53, "score": 6.449080920943529 }, { "content": " .ok_or_else(|| BadType(\"meta.requires\"))?;\n\n for rq in list.iter() {\n\n let code = rq.as_str().ok_or_else(|| BadType(\"meta.requires item\"))?;\n\n if !self.loaded_codes.iter().any(|s| s == code) {\n\n return Err(LoadError::RequirementNotFound(code.to_owned()));\n\n }\n\n }\n\n }\n\n\n\n // parse consts\n\n if let Some(consts) = consts {\n\n let consts = consts.as_table().ok_or_else(|| BadType(\"consts\"))?;\n\n for (k, v) in consts.iter() {\n\n let intvalue = Self::toml_int(&self.consts, format!(\"consts.{}\", k), v)? as u64;\n\n self.consts.insert(k.to_owned(), intvalue);\n\n }\n\n }\n\n\n\n // parse registers\n\n if let Some(registers) = registers {\n", "file_path": "src/arch.rs", "rank": 54, "score": 6.299830425423104 }, { "content": "pub enum FieldType {\n\n Register,\n\n Value,\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct InstructionField {\n\n pub name: String,\n\n pub vtype: FieldType,\n\n /// Total length of the value in bits\n\n pub length: i32,\n\n pub encoding: SmallVec<[BitRangeMap; 2]>,\n\n}\n\n\n\nimpl InstructionField {\n\n fn calculate_last_encoded_bit_index(&self) -> i32 {\n\n self.encoding\n\n .iter()\n\n .map(|e| e.instruction_last())\n\n .max()\n", "file_path": "src/arch.rs", "rank": 55, "score": 5.855783961157906 }, { "content": " })?;\n\n match fldtype {\n\n \"value\" => {\n\n fld.vtype = FieldType::Value;\n\n }\n\n \"register\" => {\n\n fld.vtype = FieldType::Register;\n\n }\n\n _ => {\n\n return Err(LoadError::BadType(format!(\n\n \"instruction_formats.{}.{}.type\",\n\n fmtname, fldname\n\n )));\n\n }\n\n }\n\n fld.length = Self::toml_int(\n\n &self.consts,\n\n format!(\"instruction_formats.{}.{}.length\", fmtname, fldname),\n\n fldtable.get(\"length\").ok_or_else(|| {\n\n LoadError::MissingNode(format!(\n", "file_path": "src/arch.rs", "rank": 56, "score": 5.698895157821562 }, { "content": "\n\n let ast;\n\n if let Some(ref istr) = opt.input_string {\n\n ast = parser::ast_from_str(&istr.replace(\";\", \"\\n\"), &rv);\n\n } else {\n\n ast = parser::ast_from_file(\n\n opt.input_file\n\n .as_ref()\n\n .unwrap()\n\n .to_str()\n\n .expect(\"Invalid Unicode in specified file path\"),\n\n &rv,\n\n );\n\n }\n\n if let Err(e) = ast {\n\n eprintln!(\"Parse error: {:?}\", e);\n\n std::process::exit(1);\n\n }\n\n let ast = ast.unwrap();\n\n\n", "file_path": "src/main.rs", "rank": 57, "score": 5.569181223283449 }, { "content": "\n\n // Registers\n\n\n\n pub fn get_register(&self, rnum: i32) -> Option<&Register> {\n\n self.registers.get(&rnum)\n\n }\n\n\n\n pub fn get_register_by_name(&self, rname: &str) -> Option<&Register> {\n\n self.register_name_lookup\n\n .get(rname)\n\n .and_then(|i| self.get_register(*i))\n\n }\n\n\n\n pub fn get_all_registers(&self) -> &HashMap<i32, Register> {\n\n &self.registers\n\n }\n\n\n\n // Instruction Formats\n\n\n\n pub fn get_instruction_format(&self, index: usize) -> Option<&InstructionFormat> {\n", "file_path": "src/arch.rs", "rank": 58, "score": 5.523389967615663 }, { "content": " format!(\"instruction_formats.{}.{}.encoding[][]\", fmtname, fldname),\n\n &subenc[1],\n\n )? as i32;\n\n let ibegin = Self::toml_int(\n\n &self.consts,\n\n format!(\"instruction_formats.{}.{}.encoding[][]\", fmtname, fldname),\n\n &subenc[2],\n\n )? as i32;\n\n\n\n fld.encoding.push(BitRangeMap::new(vend, vbegin, ibegin));\n\n }\n\n fmt.fields.push(fld);\n\n }\n\n fmt.ilen = fmt.calculate_last_encoded_bit_index() as usize + 1;\n\n self.instruction_formats.push(fmt);\n\n }\n\n }\n\n\n\n // parse instructions\n\n if let Some(instructions) = instructions {\n", "file_path": "src/arch.rs", "rank": 59, "score": 5.5129078172824855 }, { "content": " self.instruction_formats.get(index)\n\n }\n\n\n\n pub fn get_instruction_format_by_name(&self, name: &str) -> Option<&InstructionFormat> {\n\n self.instruction_name_lookup\n\n .get(name)\n\n .and_then(|i| self.get_instruction_format(*i))\n\n }\n\n\n\n pub fn get_all_instruction_formats(&self) -> &[InstructionFormat] {\n\n &self.instruction_formats\n\n }\n\n\n\n // Instructions\n\n\n\n pub fn get_instruction(&self, index: usize) -> Option<&InstructionDefinition> {\n\n self.instructions.get(index)\n\n }\n\n\n\n /// Automatically converts name to lowercase\n", "file_path": "src/arch.rs", "rank": 60, "score": 5.224618661549011 }, { "content": " let registers = registers.as_table().ok_or_else(|| BadType(\"registers\"))?;\n\n if let Some(register_names) = registers.get(\"names\") {\n\n let register_names = register_names\n\n .as_table()\n\n .ok_or_else(|| BadType(\"registers.names\"))?;\n\n for (number, names) in register_names.iter() {\n\n let number: i32 = number.parse().map_err(|_| {\n\n LoadError::BadType(format!(\"registers.names.{} key\", number))\n\n })?;\n\n let names = names.as_array().ok_or_else(|| {\n\n LoadError::BadType(format!(\"registers.names.{} value\", number))\n\n })?;\n\n self.registers\n\n .entry(number)\n\n .or_insert_with(|| Register::new(number));\n\n let mut newnames = Vec::new();\n\n for name in names.iter() {\n\n let name = name.as_str().ok_or_else(|| {\n\n LoadError::BadType(format!(\"registers.names.{} element\", number))\n\n })?;\n", "file_path": "src/arch.rs", "rank": 61, "score": 5.029952191298472 }, { "content": " let meta = doc\n\n .get(\"meta\")\n\n .ok_or_else(|| LoadError::MissingNode(\"meta\".to_owned()))?;\n\n let requires = meta.get(\"requires\");\n\n if let Some(requires) = requires {\n\n let requires = requires\n\n .as_array()\n\n .ok_or_else(|| LoadError::BadType(\"meta.requires\".to_owned()))?;\n\n for rq in requires.iter() {\n\n let code = rq\n\n .as_str()\n\n .ok_or_else(|| LoadError::BadType(\"meta.requires item\".to_owned()))?;\n\n let other = codes.get(code);\n\n if other.is_none() {\n\n return Err(LoadError::RequirementNotFound(code.to_owned()));\n\n }\n\n let nidx = nodes[i];\n\n let oidx = *other.unwrap();\n\n depgraph.add_edge(nidx, oidx, ());\n\n }\n", "file_path": "src/arch.rs", "rank": 62, "score": 4.92051776901935 }, { "content": " newnames.push(name.to_owned());\n\n }\n\n self.registers.get_mut(&number).unwrap().names = newnames;\n\n }\n\n }\n\n if let Some(register_lengths) = registers.get(\"lengths\") {\n\n let register_lengths = register_lengths\n\n .as_table()\n\n .ok_or_else(|| BadType(\"registers.lengths\"))?;\n\n for (number, length) in register_lengths.iter() {\n\n let number: i32 = number.parse().map_err(|_| {\n\n LoadError::BadType(format!(\"registers.lengths.{} key\", number))\n\n })?;\n\n let length = Self::toml_int(\n\n &self.consts,\n\n format!(\"registers.lengths.{} value\", number),\n\n length,\n\n )? as i32;\n\n self.registers\n\n .entry(number)\n", "file_path": "src/arch.rs", "rank": 63, "score": 4.844990893725074 }, { "content": " .or_insert_with(|| Register::new(number));\n\n self.registers.get_mut(&number).unwrap().size_in_bits = length;\n\n }\n\n }\n\n }\n\n\n\n // parse instruction_formats\n\n if let Some(instruction_formats) = instruction_formats {\n\n let instruction_formats = instruction_formats\n\n .as_table()\n\n .ok_or_else(|| BadType(\"instruction_formats\"))?;\n\n for (fmtname, fmttable) in instruction_formats.iter() {\n\n let fmttable = fmttable.as_table().ok_or_else(|| {\n\n LoadError::BadType(format!(\"instruction_formats.{}\", fmtname))\n\n })?;\n\n let mut fmt = InstructionFormat::new(fmtname.to_owned());\n\n for (fldname, fldtable) in fmttable.iter() {\n\n let fldtable = fldtable.as_table().ok_or_else(|| {\n\n LoadError::BadType(format!(\"instruction_formats.{}.{}\", fmtname, fldname))\n\n })?;\n", "file_path": "src/arch.rs", "rank": 64, "score": 4.805807506577706 }, { "content": " let cloned_t = || (self.clone(), true);\n\n match self {\n\n Identifier(ident) => const_provider(ident)\n\n .map(|v| (Integer(v), true))\n\n .unwrap_or_else(cloned_f),\n\n Label(lname) => const_provider(lname)\n\n .map(|v| (Integer(v), true))\n\n .unwrap_or_else(cloned_f),\n\n\n\n Integer(v) => (Integer(*v), true),\n\n StringLiteral(_) => cloned_t(),\n\n Register(_) => cloned_t(),\n\n PcValue => (Integer(pc), true),\n\n\n\n Negation(box a) => {\n\n let sa = a.emitter_simplify(const_provider, pc);\n\n (Negation(box sa.0).simplify(), sa.1)\n\n }\n\n Plus(box a, box b) => {\n\n let sa = a.emitter_simplify(const_provider, pc);\n", "file_path": "src/parser.rs", "rank": 65, "score": 4.018793466350109 }, { "content": " \"instruction_formats.{}.{}.length\",\n\n fmtname, fldname\n\n ))\n\n })?,\n\n )? as i32;\n\n let fldencoding = fldtable\n\n .get(\"encoding\")\n\n .ok_or_else(|| {\n\n LoadError::MissingNode(format!(\n\n \"instruction_formats.{}.{}.encoding\",\n\n fmtname, fldname\n\n ))\n\n })?\n\n .as_array()\n\n .ok_or_else(|| {\n\n LoadError::BadType(format!(\n\n \"instruction_formats.{}.{}.encoding\",\n\n fmtname, fldname\n\n ))\n\n })?;\n", "file_path": "src/arch.rs", "rank": 66, "score": 3.763416920021792 }, { "content": " }\n\n for cfg in opt.cfg {\n\n if let Err(e) = rv.load_single_cfg_file(&cfg) {\n\n let pstr = cfg.as_os_str().to_string_lossy();\n\n eprintln!(\n\n \"Error loading additional configuration from {}: {:?}\",\n\n pstr, e\n\n );\n\n std::process::exit(1);\n\n }\n\n }\n\n\n\n if opt.verbose {\n\n for abi in rv.get_loaded_abis() {\n\n println!(\n\n \"Loaded ABI: {} - '{}' based on spec '{}'\",\n\n abi.code, abi.name, abi.spec\n\n );\n\n }\n\n }\n", "file_path": "src/main.rs", "rank": 67, "score": 3.5221243588410283 }, { "content": " for val in fldencoding.iter() {\n\n let subenc = val.as_array().ok_or_else(|| {\n\n LoadError::BadType(format!(\n\n \"instruction_formats.{}.{}.encoding[] element\",\n\n fmtname, fldname\n\n ))\n\n })?;\n\n if subenc.len() != 3 {\n\n return Err(LoadError::BadType(format!(\n\n \"instruction_formats.{}.{}.encoding[][] length (must be 3)\",\n\n fmtname, fldname\n\n )));\n\n }\n\n let vend = Self::toml_int(\n\n &self.consts,\n\n format!(\"instruction_formats.{}.{}.encoding[][]\", fmtname, fldname),\n\n &subenc[0],\n\n )? as i32;\n\n let vbegin = Self::toml_int(\n\n &self.consts,\n", "file_path": "src/arch.rs", "rank": 68, "score": 3.3260249484799895 }, { "content": " let sb = b.emitter_simplify(const_provider, pc);\n\n (Plus(box sa.0, box sb.0).simplify(), sa.1 && sb.1)\n\n }\n\n Minus(box a, box b) => {\n\n let sa = a.emitter_simplify(const_provider, pc);\n\n let sb = b.emitter_simplify(const_provider, pc);\n\n (Minus(box sa.0, box sb.0).simplify(), sa.1 && sb.1)\n\n }\n\n Times(box a, box b) => {\n\n let sa = a.emitter_simplify(const_provider, pc);\n\n let sb = b.emitter_simplify(const_provider, pc);\n\n (Times(box sa.0, box sb.0).simplify(), sa.1 && sb.1)\n\n }\n\n Divide(box a, box b) => {\n\n let sa = a.emitter_simplify(const_provider, pc);\n\n let sb = b.emitter_simplify(const_provider, pc);\n\n (Divide(box sa.0, box sb.0).simplify(), sa.1 && sb.1)\n\n }\n\n Shl(box a, box b) => {\n\n let sa = a.emitter_simplify(const_provider, pc);\n", "file_path": "src/parser.rs", "rank": 69, "score": 3.0123467743296493 }, { "content": " .instruction_name_lookup\n\n .insert(iname.clone(), self.instructions.len())\n\n .is_some()\n\n {\n\n return Err(LoadError::DuplicateInstruction(iname.clone()));\n\n }\n\n self.instructions.push(insn);\n\n }\n\n }\n\n\n\n // update register name mapping\n\n self.register_name_lookup.clear();\n\n for (num, reg) in self.registers.iter() {\n\n for name in reg.names.iter() {\n\n self.register_name_lookup.insert(name.to_owned(), *num);\n\n }\n\n }\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/arch.rs", "rank": 70, "score": 2.9885770918439274 }, { "content": "pub mod flatbin;\n", "file_path": "src/emit/mod.rs", "rank": 71, "score": 2.769157264281418 } ]
Rust
anvil/src/eth/backend/executor.rs
Rjected/dapptools-rs
b11b776934cce2a0e70ce4879e7a05c9a34ac008
use crate::eth::{ backend::{db::Db, validate::TransactionValidator}, error::InvalidTransactionError, pool::transactions::PoolTransaction, }; use anvil_core::eth::{ block::{Block, BlockInfo, Header, PartialHeader}, receipt::{EIP1559Receipt, EIP2930Receipt, EIP658Receipt, Log, TypedReceipt}, transaction::{PendingTransaction, TransactionInfo, TypedTransaction}, trie, }; use ethers::{ abi::ethereum_types::BloomInput, types::{Bloom, H256, U256}, utils::rlp, }; use foundry_evm::{ executor::inspector::Tracer, revm, revm::{BlockEnv, CfgEnv, Env, Return, TransactOut}, trace::node::CallTraceNode, }; use std::sync::Arc; use tracing::{trace, warn}; pub struct ExecutedTransaction { transaction: Arc<PoolTransaction>, exit: Return, out: TransactOut, gas: u64, logs: Vec<Log>, traces: Vec<CallTraceNode>, } impl ExecutedTransaction { fn create_receipt(&self) -> TypedReceipt { let used_gas: U256 = self.gas.into(); let mut bloom = Bloom::default(); logs_bloom(self.logs.clone(), &mut bloom); let logs = self.logs.clone(); let status_code: u8 = if self.exit as u8 <= Return::SelfDestruct as u8 { 1 } else { 0 }; match &self.transaction.pending_transaction.transaction { TypedTransaction::Legacy(_) => TypedReceipt::Legacy(EIP658Receipt { status_code, gas_used: used_gas, logs_bloom: bloom, logs, }), TypedTransaction::EIP2930(_) => TypedReceipt::EIP2930(EIP2930Receipt { status_code, gas_used: used_gas, logs_bloom: bloom, logs, }), TypedTransaction::EIP1559(_) => TypedReceipt::EIP1559(EIP1559Receipt { status_code, gas_used: used_gas, logs_bloom: bloom, logs, }), } } } #[derive(Debug, Clone)] pub struct ExecutedTransactions { pub block: BlockInfo, pub included: Vec<Arc<PoolTransaction>>, pub invalid: Vec<Arc<PoolTransaction>>, } pub struct TransactionExecutor<'a, Db: ?Sized, Validator: TransactionValidator> { pub db: &'a mut Db, pub validator: Validator, pub pending: std::vec::IntoIter<Arc<PoolTransaction>>, pub block_env: BlockEnv, pub cfg_env: CfgEnv, pub parent_hash: H256, pub gas_used: U256, } impl<'a, DB: Db + ?Sized, Validator: TransactionValidator> TransactionExecutor<'a, DB, Validator> { pub fn execute(mut self, timestamp: u64) -> ExecutedTransactions { let mut transactions = Vec::new(); let mut transaction_infos = Vec::new(); let mut receipts = Vec::new(); let mut bloom = Bloom::default(); let mut cumulative_gas_used = U256::zero(); let mut invalid = Vec::new(); let mut included = Vec::new(); let gas_limit = self.block_env.gas_limit; let parent_hash = self.parent_hash; let block_number = self.block_env.number; let difficulty = self.block_env.difficulty; let beneficiary = self.block_env.coinbase; for (idx, tx) in self.enumerate() { let tx = match tx { TransactionExecutionOutcome::Executed(tx) => { included.push(tx.transaction.clone()); tx } TransactionExecutionOutcome::Exhausted(_) => continue, TransactionExecutionOutcome::Invalid(tx, _) => { invalid.push(tx); continue } }; let receipt = tx.create_receipt(); cumulative_gas_used = cumulative_gas_used.saturating_add(receipt.gas_used()); let ExecutedTransaction { transaction, logs, out, traces, .. } = tx; logs_bloom(logs.clone(), &mut bloom); let contract_address = if let TransactOut::Create(_, contract_address) = out { trace!(target: "backend", "New contract deployed: at {:?}", contract_address); contract_address } else { None }; let info = TransactionInfo { transaction_hash: *transaction.hash(), transaction_index: idx as u32, from: *transaction.pending_transaction.sender(), to: transaction.pending_transaction.transaction.to().copied(), contract_address, logs, logs_bloom: *receipt.logs_bloom(), traces, }; transaction_infos.push(info); receipts.push(receipt); transactions.push(transaction.pending_transaction.transaction.clone()); } let ommers: Vec<Header> = Vec::new(); let receipts_root = trie::ordered_trie_root(receipts.iter().map(rlp::encode)); let partial_header = PartialHeader { parent_hash, beneficiary, state_root: self.db.maybe_state_root().unwrap_or_default(), receipts_root, logs_bloom: bloom, difficulty, number: block_number, gas_limit, gas_used: cumulative_gas_used, timestamp, extra_data: Default::default(), mix_hash: Default::default(), nonce: Default::default(), }; let block = Block::new(partial_header, transactions.clone(), ommers); let block = BlockInfo { block, transactions: transaction_infos, receipts }; ExecutedTransactions { block, included, invalid } } fn env_for(&self, tx: &PendingTransaction) -> Env { Env { cfg: self.cfg_env.clone(), block: self.block_env.clone(), tx: tx.to_revm_tx_env() } } } pub enum TransactionExecutionOutcome { Executed(ExecutedTransaction), Invalid(Arc<PoolTransaction>, InvalidTransactionError), Exhausted(Arc<PoolTransaction>), } impl<'a, 'b, DB: Db + ?Sized, Validator: TransactionValidator> Iterator for &'b mut TransactionExecutor<'a, DB, Validator> { type Item = TransactionExecutionOutcome; fn next(&mut self) -> Option<Self::Item> { let transaction = self.pending.next()?; let account = self.db.basic(*transaction.pending_transaction.sender()); let env = self.env_for(&transaction.pending_transaction); let max_gas = self.gas_used.saturating_add(U256::from(env.tx.gas_limit)); if max_gas > env.block.gas_limit { return Some(TransactionExecutionOutcome::Exhausted(transaction)) } if let Err(err) = self.validator.validate_pool_transaction_for( &transaction.pending_transaction, &account, &env, ) { warn!(target: "backend", "Skipping invalid tx execution [{:?}] {}", transaction.hash(), err); return Some(TransactionExecutionOutcome::Invalid(transaction, err)) } let mut evm = revm::EVM::new(); evm.env = env; evm.database(&mut self.db); let mut tracer = Tracer::default(); trace!(target: "backend", "[{:?}] executing", transaction.hash()); let (exit, out, gas, logs) = evm.inspect_commit(&mut tracer); if exit == Return::OutOfGas { warn!(target: "backend", "[{:?}] executed with out of gas", transaction.hash()) } trace!(target: "backend", "[{:?}] executed with out={:?}, gas ={}", transaction.hash(), out, gas); self.gas_used.saturating_add(U256::from(gas)); trace!(target: "backend::executor", "transacted [{:?}], result: {:?} gas {}", transaction.hash(), exit, gas); let tx = ExecutedTransaction { transaction, exit, out, gas, logs: logs.into_iter().map(Into::into).collect(), traces: tracer.traces.arena, }; Some(TransactionExecutionOutcome::Executed(tx)) } } fn logs_bloom(logs: Vec<Log>, bloom: &mut Bloom) { for log in logs { bloom.accrue(BloomInput::Raw(&log.address[..])); for topic in log.topics { bloom.accrue(BloomInput::Raw(&topic[..])); } } }
use crate::eth::{ backend::{db::Db, validate::TransactionValidator}, error::InvalidTransactionError, pool::transactions::PoolTransaction, }; use anvil_core::eth::{ block::{Block, BlockInfo, Header, PartialHeader}, receipt::{EIP1559Receipt, EIP2930Receipt, EIP658Receipt, Log, TypedReceipt}, transaction::{PendingTransaction, TransactionInfo, TypedTransaction}, trie, }; use ethers::{ abi::ethereum_types::BloomInput, types::{Bloom, H256, U256}, utils::rlp, }; use foundry_evm::{ executor::inspector::Tracer, revm, revm::{BlockEnv, CfgEnv, Env, Return, TransactOut}, trace::node::CallTraceNode, }; use std::sync::Arc; use tracing::{trace, warn}; pub struct ExecutedTransaction { transaction: Arc<PoolTransaction>, exit: Return, out: TransactOut, gas: u64, logs: Vec<Log>, traces: Vec<CallTraceNode>, } impl ExecutedTransaction { fn create_receipt(&self) -> TypedReceipt { let used_gas: U256 = self.gas.into(); let mut bloom = Bloom::default(); logs_bloom(self.logs.clone(), &mut bloom); let logs = self.logs.clone(); let status_code: u8 = if self.exit as u8 <= Return::SelfDestruct as u8 { 1 } else { 0 }; match &self.transaction.pending_transaction.transaction { TypedTransaction::Legacy(_) => TypedReceipt::Legacy(EIP658Receipt { status_code, gas_used: used_gas, logs_bloom: bloom, logs, }), TypedTransaction::EIP2930(_) => TypedReceipt::EIP2930(EIP2930Receipt { status_code, gas_used: used_gas, logs_bloom: bloom, logs, }), TypedTransaction::EIP1559(_) => TypedReceipt::EIP1559(EIP1559Receipt { status_code, gas_used: used_gas, logs_bloom: bloom, logs, }), } } } #[derive(Debug, Clone)] pub struct ExecutedTransactions { pub block: BlockInfo, pub included: Vec<Arc<PoolTransaction>>, pub invalid: Vec<Arc<PoolTransaction>>, } pub struct TransactionExecutor<'a, Db: ?Sized, Validator: TransactionValidator> { pub db: &'a mut Db, pub validator: Validator, pub pending: std::vec::IntoIter<Arc<PoolTransaction>>, pub block_env: BlockEnv, pub cfg_env: CfgEnv, pub parent_hash: H256, pub gas_used: U256, } impl<'a, DB: Db + ?Sized, Validator: TransactionValidator> TransactionExecutor<'a, DB, Validator> { pub fn execute(mut self, timestamp: u64) -> ExecutedTransactions { let mut transactions = Vec::new(); let mut transaction_infos = Vec::new(); let mut receipts = Vec::new(); let mut bloom = Bloom::default(); let mut cumulative_gas_used = U256::zero(); let mut invalid = Vec::new(); let mut included = Vec::new(); let gas_limit = self.block_env.gas_limit; let parent_hash = self.parent_hash; let block_number = self.block_env.number; let difficulty = self.block_env.difficulty; let beneficiary = self.block_env.coinbase; for (idx, tx) in self.enumerate() { let tx = match tx { TransactionExecutionOutcome::Executed(tx) => { included.push(tx.transaction.clone()); tx } TransactionExecutionOutcome::Exhausted(_) => continue, TransactionExecutionOutcome::Invalid(tx, _) => { invalid.push(tx); continue } }; let receipt = tx.create_receipt(); cumulative_gas_used = cumulative_gas_used.saturating_add(receipt.gas_used()); let ExecutedTransaction { transaction, logs, out, traces, .. } = tx; logs_bloom(logs.clone(), &mut bloom); let contract_address = if let TransactOut::Create(_, contract_address) = out { trace!(target: "backend", "New contract deployed: at {:?}", contract_address); contract_address } else { None }; let info = TransactionInfo { transaction_hash: *transaction.hash(), transaction_index: idx as u32, from: *transaction.pending_transaction.sender(), to: transaction.pending_transaction.transaction.to().copied(), contract_address, logs, logs_bloom: *receipt.logs_bloom(), traces, }; transaction_infos.push(info); receipts.push(receipt); transactions.push(transaction.pending_transaction.transaction.clone()); } let ommers: Vec<Header> = Vec::new(); let receipts_root = trie::ordered_trie_root(receipts.iter().map(rlp::encode)); let partial_header = PartialHeader { parent_hash, beneficiary, state_root: self.db.maybe_state_root().unwrap_or_default(), receipts_root, logs_bloom: bloom, difficulty, number: block_number, gas_limit, gas_used: cumulative_gas_used, timestamp, extra_data: Default::default(), mix_hash: Default::default(), nonce: Default::default(), }; let block = Block::new(partial_header, transactions.clone(), ommers); let block = BlockInfo { block, transactions: transaction_infos, receipts }; ExecutedTransactions { block, included, invalid } } fn env_for(&self, tx: &PendingTransaction) -> Env { Env { cfg: self.cfg_env.clone(), block: self.block_env.clone(), tx: tx.to_revm_tx_env() } } } pub enum TransactionExecutionOutcome { Executed(ExecutedTransaction), Invalid(Arc<PoolTransaction>, InvalidTransactionError), Exhausted(Arc<PoolTransaction>), } impl<'a, 'b, DB: Db + ?Sized, Validator: TransactionValidator> Iterator for &'b mut TransactionExecutor<'a, DB, Validator> { type Item = TransactionExecutionOutcome; fn next(&mut self) -> Option<Self::Item> { let transaction = self.pending.next()?; let account = self.db.basic(*transaction.pending_transaction.sender()); let env = self.env_for(&transaction.pending_transaction); let max_gas = self.gas_used.saturating_add(U256::from(env.tx.gas_limit)); if max_gas > env.block.gas_limit { return Some(TransactionExecutionOutcome::Exhausted(transaction)) } if let Err(err) = self.validator.validate_pool_transaction_for( &transaction.pending_transaction, &account, &env, ) { warn!(target: "backend", "Skipping invalid tx execution [{:?}] {}", transaction.hash(), err); return Some(TransactionExecutionOutcome::Invalid(transaction, err)) } let mut evm = revm::EVM::new(); evm.env = env; evm.database(&mut self.d
on { transaction, exit, out, gas, logs: logs.into_iter().map(Into::into).collect(), traces: tracer.traces.arena, }; Some(TransactionExecutionOutcome::Executed(tx)) } } fn logs_bloom(logs: Vec<Log>, bloom: &mut Bloom) { for log in logs { bloom.accrue(BloomInput::Raw(&log.address[..])); for topic in log.topics { bloom.accrue(BloomInput::Raw(&topic[..])); } } }
b); let mut tracer = Tracer::default(); trace!(target: "backend", "[{:?}] executing", transaction.hash()); let (exit, out, gas, logs) = evm.inspect_commit(&mut tracer); if exit == Return::OutOfGas { warn!(target: "backend", "[{:?}] executed with out of gas", transaction.hash()) } trace!(target: "backend", "[{:?}] executed with out={:?}, gas ={}", transaction.hash(), out, gas); self.gas_used.saturating_add(U256::from(gas)); trace!(target: "backend::executor", "transacted [{:?}], result: {:?} gas {}", transaction.hash(), exit, gas); let tx = ExecutedTransacti
function_block-random_span
[ { "content": "/// Returns the RLP for this account.\n\npub fn trie_account_rlp(info: &AccountInfo, storage: Map<U256, U256>) -> Bytes {\n\n let mut stream = RlpStream::new_list(4);\n\n stream.append(&info.nonce);\n\n stream.append(&info.balance);\n\n stream.append(&{\n\n sec_trie_root(storage.into_iter().filter(|(_k, v)| v != &U256::zero()).map(|(k, v)| {\n\n let mut temp: [u8; 32] = [0; 32];\n\n k.to_big_endian(&mut temp);\n\n (H256::from(temp), rlp::encode(&v))\n\n }))\n\n });\n\n stream.append(&info.code_hash.as_bytes());\n\n stream.out().freeze()\n\n}\n", "file_path": "anvil/src/eth/backend/mem/state.rs", "rank": 0, "score": 374092.2502033564 }, { "content": "/// Small helper function to convert [U256] into [H256].\n\npub fn u256_to_h256_be(u: U256) -> H256 {\n\n let mut h = H256::default();\n\n u.to_big_endian(h.as_mut());\n\n h\n\n}\n\n\n", "file_path": "evm/src/utils.rs", "rank": 2, "score": 348828.47210917633 }, { "content": "/// Small helper function to convert [H256] into [U256].\n\npub fn h256_to_u256_be(storage: H256) -> U256 {\n\n U256::from_big_endian(storage.as_bytes())\n\n}\n\n\n", "file_path": "evm/src/utils.rs", "rank": 3, "score": 343356.3596153263 }, { "content": "/// Small helper function to convert [U256] into [H256].\n\npub fn u256_to_h256_le(u: U256) -> H256 {\n\n let mut h = H256::default();\n\n u.to_little_endian(h.as_mut());\n\n h\n\n}\n\n\n", "file_path": "evm/src/utils.rs", "rank": 4, "score": 343356.3596153263 }, { "content": "/// Small helper function to convert [H256] into [U256].\n\npub fn h256_to_u256_le(storage: H256) -> U256 {\n\n U256::from_little_endian(storage.as_bytes())\n\n}\n", "file_path": "evm/src/utils.rs", "rank": 5, "score": 338140.8694579301 }, { "content": "type BlockHashFuture<Err> = Pin<Box<dyn Future<Output = (Result<H256, Err>, u64)> + Send>>;\n\n\n", "file_path": "evm/src/executor/fork/backend.rs", "rank": 6, "score": 336261.55272649316 }, { "content": "/// Returns the log hash for all `logs`\n\n///\n\n/// The log hash is `keccak(rlp(logs[]))`, <https://github.com/ethereum/go-ethereum/blob/356bbe343a30789e77bb38f25983c8f2f2bfbb47/cmd/evm/internal/t8ntool/execution.go#L255>\n\npub fn log_rlp_hash(logs: Vec<Log>) -> H256 {\n\n let mut stream = RlpStream::new();\n\n stream.begin_unbounded_list();\n\n for log in logs {\n\n stream.begin_list(3);\n\n stream.append(&log.address);\n\n stream.append_list(&log.topics);\n\n stream.append(&log.data);\n\n }\n\n stream.finalize_unbounded_list();\n\n let out = stream.out().freeze();\n\n\n\n let out = ethers::utils::keccak256(out);\n\n H256::from_slice(out.as_slice())\n\n}\n\n\n", "file_path": "anvil/src/eth/backend/mem/state.rs", "rank": 7, "score": 307818.7296666168 }, { "content": "/// Pretty print a slice of tokens.\n\npub fn format_tokens(tokens: &[Token]) -> impl Iterator<Item = String> + '_ {\n\n tokens.iter().map(format_token)\n\n}\n\n\n", "file_path": "utils/src/lib.rs", "rank": 8, "score": 307334.6602234176 }, { "content": "/// creates an unique identifier for aan (`nonce` + `Address`) combo\n\npub fn to_marker(nonce: u64, from: Address) -> TxMarker {\n\n let mut data = [0u8; 28];\n\n data[..8].copy_from_slice(&nonce.to_le_bytes()[..]);\n\n data[8..].copy_from_slice(&from.0[..]);\n\n data.to_vec()\n\n}\n\n\n\n/// Internal Transaction type\n\n#[derive(Clone, PartialEq)]\n\npub struct PoolTransaction {\n\n /// the pending eth transaction\n\n pub pending_transaction: PendingTransaction,\n\n /// Markers required by the transaction\n\n pub requires: Vec<TxMarker>,\n\n /// Markers that this transaction provides\n\n pub provides: Vec<TxMarker>,\n\n}\n\n\n\n// == impl PoolTransaction ==\n\n\n", "file_path": "anvil/src/eth/pool/transactions.rs", "rank": 9, "score": 303837.602385316 }, { "content": "/// If the input starts with a known `hardhat/console.log` `uint` selector, then this will replace\n\n/// it with the selector `abigen!` bindings expect.\n\npub fn patch_hardhat_console_selector(mut input: Vec<u8>) -> Vec<u8> {\n\n if input.len() < 4 {\n\n return input\n\n }\n\n\n\n let selector = Selector::try_from(&input[..4]).unwrap();\n\n if let Some(abigen_selector) = HARDHAT_CONSOLE_SELECTOR_PATCHES.get(&selector) {\n\n input.splice(..4, *abigen_selector);\n\n }\n\n input\n\n}\n\n\n\n/// This contains a map with all the `hardhat/console.log` log selectors that use `uint` or `int`\n\n/// as key and the selector of the call with `uint256`,\n\n///\n\n/// This is a bit terrible but a workaround for the differing selectors used by hardhat and the call\n\n/// bindings which `abigen!` creates. `hardhat/console.log` logs its events in functions that accept\n\n/// `uint` manually as `abi.encodeWithSignature(\"log(int)\", p0)`, but `abigen!` uses `uint256` for\n\n/// its call bindings (`HardhatConsoleCalls`) as generated by solc.\n\npub static HARDHAT_CONSOLE_SELECTOR_PATCHES: Lazy<HashMap<Selector, Selector>> = Lazy::new(|| {\n", "file_path": "evm/src/executor/abi.rs", "rank": 10, "score": 296037.7429772162 }, { "content": "pub fn apply<DB: Database>(\n\n state: &mut Cheatcodes,\n\n data: &mut EVMData<'_, DB>,\n\n caller: Address,\n\n call: &HEVMCalls,\n\n) -> Option<Result<Bytes, Bytes>> {\n\n Some(match call {\n\n HEVMCalls::Warp(inner) => {\n\n data.env.block.timestamp = inner.0;\n\n Ok(Bytes::new())\n\n }\n\n HEVMCalls::Roll(inner) => {\n\n data.env.block.number = inner.0;\n\n Ok(Bytes::new())\n\n }\n\n HEVMCalls::Fee(inner) => {\n\n data.env.block.basefee = inner.0;\n\n Ok(Bytes::new())\n\n }\n\n HEVMCalls::Coinbase(inner) => {\n", "file_path": "evm/src/executor/inspector/cheatcodes/env.rs", "rank": 11, "score": 288044.77477261075 }, { "content": "type AccountFuture<Err> =\n\n Pin<Box<dyn Future<Output = (Result<(U256, U256, Bytes), Err>, Address)> + Send>>;\n", "file_path": "evm/src/executor/fork/backend.rs", "rank": 12, "score": 284422.6361237576 }, { "content": "fn precompile<I, O>(number: u8, name: impl ToString, inputs: I, outputs: O) -> (Address, Function)\n\nwhere\n\n I: IntoIterator<Item = ParamType>,\n\n O: IntoIterator<Item = ParamType>,\n\n{\n\n (\n\n Address::from_slice(&[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, number]),\n\n #[allow(deprecated)]\n\n Function {\n\n name: name.to_string(),\n\n inputs: inputs\n\n .into_iter()\n\n .map(|kind| Param { name: \"\".to_string(), kind, internal_type: None })\n\n .collect(),\n\n outputs: outputs\n\n .into_iter()\n\n .map(|kind| Param { name: \"\".to_string(), kind, internal_type: None })\n\n .collect(),\n\n constant: None,\n\n state_mutability: ethers::abi::StateMutability::Pure,\n\n },\n\n )\n\n}\n\n\n", "file_path": "evm/src/trace/decoder.rs", "rank": 13, "score": 284407.9077767572 }, { "content": "pub fn get_pretty_block_attr<TX>(block: Block<TX>, attr: String) -> Option<String> {\n\n return match attr.as_str() {\n\n \"baseFeePerGas\" | \"base_fee_per_gas\" => Some(block.base_fee_per_gas.pretty()),\n\n \"difficulty\" => Some(block.difficulty.pretty()),\n\n \"extraData\" | \"extra_data\" => Some(block.extra_data.pretty()),\n\n \"gasLimit\" | \"gas_limit\" => Some(block.gas_limit.pretty()),\n\n \"gasUsed\" | \"gas_used\" => Some(block.gas_used.pretty()),\n\n \"hash\" => Some(block.hash.pretty()),\n\n \"logsBloom\" | \"logs_bloom\" => Some(block.logs_bloom.pretty()),\n\n \"miner\" | \"author\" => Some(block.author.pretty()),\n\n \"mixHash\" | \"mix_hash\" => Some(block.mix_hash.pretty()),\n\n \"nonce\" => Some(block.nonce.pretty()),\n\n \"number\" => Some(block.number.pretty()),\n\n \"parentHash\" | \"parent_hash\" => Some(block.parent_hash.pretty()),\n\n \"receiptsRoot\" | \"receipts_root\" => Some(block.receipts_root.pretty()),\n\n \"sealFields\" | \"seal_fields\" => Some(block.seal_fields.pretty()),\n\n \"sha3Uncles\" | \"sha_3_uncles\" => Some(block.uncles_hash.pretty()),\n\n \"size\" => Some(block.size.pretty()),\n\n \"stateRoot\" | \"state_root\" => Some(block.state_root.pretty()),\n\n \"timestamp\" => Some(block.timestamp.pretty()),\n\n \"totalDifficulty\" | \"total_difficult\" => Some(block.total_difficulty.pretty()),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "cast/src/print_utils.rs", "rank": 14, "score": 276965.1328446644 }, { "content": "/// Get the gas used, accounting for refunds\n\npub fn gas_used(spec: SpecId, spent: u64, refunded: u64) -> u64 {\n\n let refund_quotient = if SpecId::enabled(spec, SpecId::LONDON) { 5 } else { 2 };\n\n spent - (refunded).min(spent / refund_quotient)\n\n}\n", "file_path": "evm/src/executor/inspector/utils.rs", "rank": 15, "score": 275985.7178768294 }, { "content": "pub fn handle_expect_emit(state: &mut Cheatcodes, log: RawLog, address: &Address) {\n\n // Fill or check the expected emits\n\n if let Some(next_expect_to_fill) =\n\n state.expected_emits.iter_mut().find(|expect| expect.log.is_none())\n\n {\n\n // We have unfilled expects, so we fill the first one\n\n next_expect_to_fill.log = Some(log);\n\n } else if let Some(next_expect) = state.expected_emits.iter_mut().find(|expect| !expect.found) {\n\n // We do not have unfilled expects, so we try to match this log with the first unfound\n\n // log that we expect\n\n let expected =\n\n next_expect.log.as_ref().expect(\"we should have a log to compare against here\");\n\n if expected.topics[0] == log.topics[0] {\n\n // Topic 0 can match, but the amount of topics can differ.\n\n if expected.topics.len() != log.topics.len() {\n\n next_expect.found = false;\n\n } else {\n\n // Match topics\n\n next_expect.found = log\n\n .topics\n", "file_path": "evm/src/executor/inspector/cheatcodes/expect.rs", "rank": 16, "score": 270151.54502675397 }, { "content": "type StorageFuture<Err> = Pin<Box<dyn Future<Output = (Result<U256, Err>, Address, U256)> + Send>>;\n", "file_path": "evm/src/executor/fork/backend.rs", "rank": 17, "score": 270107.3160522339 }, { "content": "/// Very simple fuzzy matching of contract bytecode.\n\n///\n\n/// Will fail for small contracts that are essentially all immutable variables.\n\nfn diff_score(a: &[u8], b: &[u8]) -> f64 {\n\n let cutoff_len = usize::min(a.len(), b.len());\n\n if cutoff_len == 0 {\n\n return 1.0\n\n }\n\n\n\n let a = &a[..cutoff_len];\n\n let b = &b[..cutoff_len];\n\n let mut diff_chars = 0;\n\n for i in 0..cutoff_len {\n\n if a[i] != b[i] {\n\n diff_chars += 1;\n\n }\n\n }\n\n diff_chars as f64 / cutoff_len as f64\n\n}\n", "file_path": "evm/src/trace/identifier/local.rs", "rank": 18, "score": 263378.7792745942 }, { "content": "/// Returns `true` if the signature of the `transaction` is the `BYPASS_SIGNATURE`\n\npub fn is_bypassed(transaction: &TypedTransaction) -> bool {\n\n transaction.signature() == BYPASS_SIGNATURE\n\n}\n\n\n\n/// Manages user modifications that may affect the node's behavior\n\n///\n\n/// Contains the state of executed, non-eth standard cheat code RPC\n\n#[derive(Debug, Clone, Default)]\n\npub struct CheatsManager {\n\n /// shareable state\n\n state: Arc<RwLock<CheatsState>>,\n\n}\n\n\n\n// === impl CheatsManager ===\n\n\n\nimpl CheatsManager {\n\n /// Sets the account to impersonate and returns the account that was previously impersonated if\n\n /// any\n\n pub fn impersonate(&self, account: Address) -> Option<Address> {\n\n trace!(target: \"cheats\", \"Start impersonating {:?}\", account);\n", "file_path": "anvil/src/eth/backend/cheats.rs", "rank": 19, "score": 261385.56723030953 }, { "content": "/// Parses string input as Token against the expected ParamType\n\npub fn parse_tokens<'a, I: IntoIterator<Item = (&'a ParamType, &'a str)>>(\n\n params: I,\n\n lenient: bool,\n\n) -> Result<Vec<Token>> {\n\n params\n\n .into_iter()\n\n .map(|(param, value)| {\n\n let mut token = if lenient {\n\n LenientTokenizer::tokenize(param, value)\n\n } else {\n\n StrictTokenizer::tokenize(param, value)\n\n };\n\n\n\n if token.is_err() && value.starts_with(\"0x\") {\n\n if let ParamType::Uint(_) = param {\n\n // try again if value is hex\n\n if let Ok(value) = U256::from_str(value).map(|v| v.to_string()) {\n\n token = if lenient {\n\n LenientTokenizer::tokenize(param, &value)\n\n } else {\n", "file_path": "utils/src/lib.rs", "rank": 20, "score": 260992.41661611927 }, { "content": "pub fn get_pretty_tx_receipt_attr(receipt: TransactionReceipt, attr: String) -> Option<String> {\n\n return match attr.as_str() {\n\n \"blockHash\" | \"block_hash\" => Some(receipt.block_hash.pretty()),\n\n \"blockNumber\" | \"block_number\" => Some(receipt.block_number.pretty()),\n\n \"contractAddress\" | \"contract_address\" => Some(receipt.contract_address.pretty()),\n\n \"cumulativeGasUsed\" | \"cumulative_gas_used\" => Some(receipt.cumulative_gas_used.pretty()),\n\n \"effectiveGasPrice\" | \"effective_gas_price\" => Some(receipt.effective_gas_price.pretty()),\n\n \"gasUsed\" | \"gas_used\" => Some(receipt.gas_used.pretty()),\n\n \"logs\" => Some(receipt.logs.pretty()),\n\n \"logsBloom\" | \"logs_bloom\" => Some(receipt.logs_bloom.pretty()),\n\n \"root\" => Some(receipt.root.pretty()),\n\n \"status\" => Some(receipt.status.pretty()),\n\n \"transactionHash\" | \"transaction_hash\" => Some(receipt.transaction_hash.pretty()),\n\n \"transactionIndex\" | \"transaction_index\" => Some(receipt.transaction_index.pretty()),\n\n \"type\" | \"transaction_type\" => Some(receipt.transaction_type.pretty()),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "cast/src/print_utils.rs", "rank": 21, "score": 260654.05956334452 }, { "content": "/// Builds the initial [EvmFuzzState] from a database.\n\npub fn build_initial_state<DB: DatabaseRef>(db: &CacheDB<DB>) -> EvmFuzzState {\n\n let mut state: HashSet<[u8; 32]> = HashSet::new();\n\n for (address, storage) in db.storage() {\n\n let info = db.basic(*address);\n\n\n\n // Insert basic account information\n\n state.insert(H256::from(*address).into());\n\n state.insert(utils::u256_to_h256_le(info.balance).into());\n\n state.insert(utils::u256_to_h256_le(U256::from(info.nonce)).into());\n\n\n\n // Insert storage\n\n for (slot, value) in storage {\n\n state.insert(utils::u256_to_h256_le(*slot).into());\n\n state.insert(utils::u256_to_h256_le(*value).into());\n\n }\n\n }\n\n\n\n // need at least some state data if db is empty otherwise we can't select random data for state\n\n // fuzzing\n\n if state.is_empty() {\n\n // prefill with a random addresses\n\n state.insert(H256::from(Address::random()).into());\n\n }\n\n\n\n Rc::new(RefCell::new(state))\n\n}\n\n\n", "file_path": "evm/src/fuzz/strategies/state.rs", "rank": 22, "score": 253017.6226073432 }, { "content": "/// Get the address of a contract creation\n\npub fn get_create_address(call: &CreateInputs, nonce: u64) -> Address {\n\n match call.scheme {\n\n CreateScheme::Create => get_contract_address(call.caller, nonce),\n\n CreateScheme::Create2 { salt } => {\n\n let mut buffer: [u8; 4 * 8] = [0; 4 * 8];\n\n salt.to_big_endian(&mut buffer);\n\n get_create2_address(call.caller, buffer, call.init_code.clone())\n\n }\n\n }\n\n}\n\n\n", "file_path": "evm/src/executor/inspector/utils.rs", "rank": 23, "score": 251189.8249863895 }, { "content": "/// Decode a single log.\n\n///\n\n/// This function returns [None] if it is not a DSTest log or the result of a Hardhat\n\n/// `console.log`.\n\npub fn decode_console_log(log: &RawLog) -> Option<String> {\n\n let decoded = match ConsoleEvents::decode_log(log).ok()? {\n\n LogsFilter(inner) => format!(\"{}\", inner.0),\n\n LogBytesFilter(inner) => format!(\"{}\", inner.0),\n\n LogNamedAddressFilter(inner) => format!(\"{}: {:?}\", inner.key, inner.val),\n\n LogNamedBytes32Filter(inner) => {\n\n format!(\"{}: 0x{}\", inner.key, hex::encode(inner.val))\n\n }\n\n LogNamedDecimalIntFilter(inner) => {\n\n let (sign, val) = inner.val.into_sign_and_abs();\n\n format!(\n\n \"{}: {}{}\",\n\n inner.key,\n\n sign,\n\n ethers::utils::format_units(val, inner.decimals.as_u32()).unwrap()\n\n )\n\n }\n\n LogNamedDecimalUintFilter(inner) => {\n\n format!(\n\n \"{}: {}\",\n", "file_path": "evm/src/decode.rs", "rank": 24, "score": 249689.47752143058 }, { "content": "/// Given a parameter type, returns a strategy for generating values for that type.\n\n///\n\n/// Works with ABI Encoder v2 tuples.\n\npub fn fuzz_param(param: &ParamType) -> impl Strategy<Value = Token> {\n\n match param {\n\n ParamType::Address => {\n\n // The key to making this work is the `boxed()` call which type erases everything\n\n // https://altsysrq.github.io/proptest-book/proptest/tutorial/transforming-strategies.html\n\n any::<[u8; 20]>().prop_map(|x| Address::from_slice(&x).into_token()).boxed()\n\n }\n\n ParamType::Bytes => any::<Vec<u8>>().prop_map(|x| Bytes::from(x).into_token()).boxed(),\n\n // For ints and uints we sample from a U256, then wrap it to the correct size with a\n\n // modulo operation. Note that this introduces modulo bias, but it can be removed with\n\n // rejection sampling if it's determined the bias is too severe. Rejection sampling may\n\n // slow down tests as it resamples bad values, so may want to benchmark the performance\n\n // hit and weigh that against the current bias before implementing\n\n ParamType::Int(n) => match n / 8 {\n\n 32 => any::<[u8; 32]>()\n\n .prop_map(move |x| I256::from_raw(U256::from(&x)).into_token())\n\n .boxed(),\n\n y @ 1..=31 => any::<[u8; 32]>()\n\n .prop_map(move |x| {\n\n // Generate a uintN in the correct range, then shift it to the range of intN\n", "file_path": "evm/src/fuzz/strategies/param.rs", "rank": 25, "score": 247589.50994443055 }, { "content": "pub fn apply<DB: Database>(\n\n state: &mut Cheatcodes,\n\n data: &mut EVMData<'_, DB>,\n\n call: &HEVMCalls,\n\n) -> Option<Result<Bytes, Bytes>> {\n\n Some(match call {\n\n HEVMCalls::Addr(inner) => addr(inner.0),\n\n HEVMCalls::Sign(inner) => sign(inner.0, inner.1.into(), data.env.cfg.chain_id),\n\n HEVMCalls::Label(inner) => {\n\n state.labels.insert(inner.0, inner.1.clone());\n\n Ok(Bytes::new())\n\n }\n\n _ => return None,\n\n })\n\n}\n", "file_path": "evm/src/executor/inspector/cheatcodes/util.rs", "rank": 26, "score": 247456.2261038156 }, { "content": "pub fn apply<DB: Database>(\n\n state: &mut Cheatcodes,\n\n data: &mut EVMData<'_, DB>,\n\n call: &HEVMCalls,\n\n) -> Option<Result<Bytes, Bytes>> {\n\n Some(match call {\n\n HEVMCalls::ExpectRevert0(_) => expect_revert(state, Bytes::new(), data.subroutine.depth()),\n\n HEVMCalls::ExpectRevert1(inner) => {\n\n expect_revert(state, inner.0.to_vec().into(), data.subroutine.depth())\n\n }\n\n HEVMCalls::ExpectRevert2(inner) => {\n\n expect_revert(state, inner.0.to_vec().into(), data.subroutine.depth())\n\n }\n\n HEVMCalls::ExpectEmit0(inner) => {\n\n state.expected_emits.push(ExpectedEmit {\n\n depth: data.subroutine.depth() - 1,\n\n checks: [inner.0, inner.1, inner.2, inner.3],\n\n ..Default::default()\n\n });\n\n Ok(Bytes::new())\n", "file_path": "evm/src/executor/inspector/cheatcodes/expect.rs", "rank": 27, "score": 247456.2261038156 }, { "content": "pub fn apply<DB: Database>(\n\n _: &mut EVMData<'_, DB>,\n\n call: &HEVMCalls,\n\n) -> Option<Result<Bytes, Bytes>> {\n\n if let HEVMCalls::Assume(inner) = call {\n\n Some(if inner.0 { Ok(Bytes::new()) } else { Err(ASSUME_MAGIC_RETURN_CODE.into()) })\n\n } else {\n\n None\n\n }\n\n}\n", "file_path": "evm/src/executor/inspector/cheatcodes/fuzz.rs", "rank": 28, "score": 247456.2261038156 }, { "content": "/// Request variants that are executed by the provider\n\nenum ProviderRequest<Err> {\n\n Account(AccountFuture<Err>),\n\n Storage(StorageFuture<Err>),\n\n BlockHash(BlockHashFuture<Err>),\n\n}\n\n\n\n/// The Request type the Backend listens for\n", "file_path": "evm/src/executor/fork/backend.rs", "rank": 29, "score": 241550.67221615277 }, { "content": "/// Generates a trie root hash for a vector of values\n\npub fn ordered_trie_root<I, V>(input: I) -> H256\n\nwhere\n\n I: IntoIterator<Item = V>,\n\n V: AsRef<[u8]>,\n\n{\n\n triehash::ordered_trie_root::<Keccak256Hasher, I>(input)\n\n}\n", "file_path": "anvil/core/src/eth/trie.rs", "rank": 30, "score": 239974.86800561514 }, { "content": "/// Calculates the initial gas stipend for a transaction\n\nfn stipend(calldata: &[u8], spec: SpecId) -> u64 {\n\n let non_zero_data_cost = if SpecId::enabled(spec, SpecId::ISTANBUL) { 16 } else { 68 };\n\n calldata.iter().fold(21000, |sum, byte| sum + if *byte == 0 { 4 } else { non_zero_data_cost })\n\n}\n", "file_path": "evm/src/executor/mod.rs", "rank": 31, "score": 236701.61203832613 }, { "content": "/// Generates a trie root hash for a vector of key-value tuples\n\npub fn trie_root<I, K, V>(input: I) -> H256\n\nwhere\n\n I: IntoIterator<Item = (K, V)>,\n\n K: AsRef<[u8]> + Ord,\n\n V: AsRef<[u8]>,\n\n{\n\n triehash::trie_root::<Keccak256Hasher, _, _, _>(input)\n\n}\n\n\n", "file_path": "anvil/core/src/eth/trie.rs", "rank": 32, "score": 236449.33918943437 }, { "content": "/// converts the `request` into a [`TypedTransactionRequest`] with the given signature\n\n///\n\n/// # Errors\n\n///\n\n/// This will fail if the `signature` contains an erroneous recovery id.\n\npub fn build_typed_transaction(\n\n request: TypedTransactionRequest,\n\n signature: Signature,\n\n) -> Result<TypedTransaction, BlockchainError> {\n\n let tx = match request {\n\n TypedTransactionRequest::Legacy(tx) => {\n\n let LegacyTransactionRequest {\n\n nonce, gas_price, gas_limit, kind, value, input, ..\n\n } = tx;\n\n TypedTransaction::Legacy(LegacyTransaction {\n\n nonce,\n\n gas_price,\n\n gas_limit,\n\n kind,\n\n value,\n\n input,\n\n signature,\n\n })\n\n }\n\n TypedTransactionRequest::EIP2930(tx) => {\n", "file_path": "anvil/src/eth/sign.rs", "rank": 33, "score": 233292.03095135774 }, { "content": "/// Generates a key-hashed (secure) trie root hash for a vector of key-value tuples.\n\npub fn sec_trie_root<I, K, V>(input: I) -> H256\n\nwhere\n\n I: IntoIterator<Item = (K, V)>,\n\n K: AsRef<[u8]>,\n\n V: AsRef<[u8]>,\n\n{\n\n triehash::sec_trie_root::<Keccak256Hasher, _, _, _>(input)\n\n}\n\n\n", "file_path": "anvil/core/src/eth/trie.rs", "rank": 34, "score": 232740.96759628085 }, { "content": "#[allow(unused)]\n\n#[doc(hidden)]\n\npub fn init_tracing() -> LoggingManager {\n\n use tracing_subscriber::prelude::*;\n\n\n\n let manager = LoggingManager::default();\n\n // check whether `RUST_LOG` is explicitly set\n\n if std::env::var(\"RUST_LOG\").is_ok() {\n\n tracing_subscriber::Registry::default()\n\n .with(tracing_subscriber::EnvFilter::from_default_env())\n\n .with(tracing_subscriber::fmt::layer())\n\n .init();\n\n } else {\n\n tracing_subscriber::Registry::default()\n\n .with(NodeLogLayer::new(manager.clone()))\n\n .with(\n\n tracing_subscriber::fmt::layer()\n\n .without_time()\n\n .with_target(false)\n\n .with_level(false),\n\n )\n\n .init();\n\n }\n\n\n\n manager\n\n}\n", "file_path": "anvil/src/lib.rs", "rank": 35, "score": 229437.07259015477 }, { "content": "/// Creates a `Transaction` as it's expected for the `eth` RPC api from storage data\n\npub fn transaction_build(\n\n eth_transaction: TypedTransaction,\n\n block: Option<&Block>,\n\n info: Option<TransactionInfo>,\n\n is_eip1559: bool,\n\n base_fee: Option<U256>,\n\n) -> Transaction {\n\n let mut transaction: Transaction = eth_transaction.clone().into();\n\n\n\n if let TypedTransaction::EIP1559(_) = eth_transaction {\n\n if block.is_none() && info.is_none() {\n\n // transaction is not mined yet, gas price is considered just `max_fee_per_gas`\n\n transaction.gas_price = transaction.max_fee_per_gas;\n\n } else {\n\n // if transaction is already mined, gas price is considered base fee + priority fee: the\n\n // effective gas price.\n\n let base_fee = base_fee.unwrap_or(U256::zero());\n\n let max_priority_fee_per_gas =\n\n transaction.max_priority_fee_per_gas.unwrap_or(U256::zero());\n\n transaction.gas_price = Some(\n", "file_path": "anvil/src/eth/backend/mem/mod.rs", "rank": 36, "score": 228226.43642626866 }, { "content": "/// Parses an ether value from a string.\n\n///\n\n/// The amount can be tagged with a unit, e.g. \"1ether\".\n\n///\n\n/// If the string represents an untagged amount (e.g. \"100\") then\n\n/// it is interpreted as wei.\n\npub fn parse_ether_value(value: &str) -> eyre::Result<U256> {\n\n Ok(if value.starts_with(\"0x\") {\n\n U256::from_str(value)?\n\n } else {\n\n U256::from(LenientTokenizer::tokenize_uint(value)?)\n\n })\n\n}\n\n\n", "file_path": "cli/src/utils.rs", "rank": 37, "score": 226922.15828304834 }, { "content": "/// Returns the `Utc` datetime for the given seconds since unix epoch\n\npub fn utc_from_secs(secs: u64) -> DateTime<Utc> {\n\n DateTime::<Utc>::from_utc(NaiveDateTime::from_timestamp(secs as i64, 0), Utc)\n\n}\n\n\n\n/// Manages block time\n\n#[derive(Debug, Clone, Default)]\n\npub struct TimeManager {\n\n /// tracks the overall applied timestamp offset\n\n offset: Arc<RwLock<i128>>,\n\n /// Contains the next timestamp to use\n\n /// if this is set then the next time `[TimeManager::current_timestamp()]` is called this value\n\n /// will be taken and returned. After which the `offset` will be updated accordingly\n\n next_exact_timestamp: Arc<RwLock<Option<u64>>>,\n\n}\n\n\n\n// === impl TimeManager ===\n\n\n\nimpl TimeManager {\n\n fn offset(&self) -> i128 {\n\n *self.offset.read()\n", "file_path": "anvil/src/eth/backend/time.rs", "rank": 38, "score": 226745.3907821236 }, { "content": "/// Given a function and a vector of string arguments, it proceeds to convert the args to ethabi\n\n/// Tokens and then ABI encode them.\n\npub fn encode_args(func: &Function, args: &[impl AsRef<str>]) -> Result<Vec<u8>> {\n\n let params = func\n\n .inputs\n\n .iter()\n\n .zip(args)\n\n .map(|(input, arg)| (&input.kind, arg.as_ref()))\n\n .collect::<Vec<_>>();\n\n let tokens = parse_tokens(params, true)?;\n\n Ok(func.encode_input(&tokens)?)\n\n}\n\n\n\n/// Fetches a function signature given the selector using 4byte.directory\n\npub async fn fourbyte(selector: &str) -> Result<Vec<(String, i32)>> {\n\n #[derive(Deserialize)]\n\n struct Decoded {\n\n text_signature: String,\n\n id: i32,\n\n }\n\n\n\n #[derive(Deserialize)]\n", "file_path": "utils/src/lib.rs", "rank": 39, "score": 225913.36177180015 }, { "content": "pub fn enveloped<T: Encodable>(id: u8, v: &T, s: &mut RlpStream) {\n\n let encoded = rlp::encode(v);\n\n let mut out = vec![0; 1 + encoded.len()];\n\n out[0] = id;\n\n out[1..].copy_from_slice(&encoded);\n\n out.rlp_append(s)\n\n}\n\n\n", "file_path": "anvil/core/src/eth/utils.rs", "rank": 40, "score": 225858.1516781862 }, { "content": "/// This function takes a contract [`Abi`] and a name and proceeds to generate a Solidity\n\n/// `interface` from that ABI. If the provided name is empty, then it defaults to `interface\n\n/// Interface`.\n\n///\n\n/// This is done by iterating over the functions and their ABI inputs/outputs, and generating\n\n/// function signatures/inputs/outputs according to the ABI.\n\n///\n\n/// Notes:\n\n/// * ABI Encoder V2 is not supported yet\n\n/// * Kudos to [maxme/abi2solidity](https://github.com/maxme/abi2solidity) for the algorithm\n\npub fn abi_to_solidity(contract_abi: &Abi, mut contract_name: &str) -> Result<String> {\n\n let functions_iterator = contract_abi.functions();\n\n let events_iterator = contract_abi.events();\n\n if contract_name.trim().is_empty() {\n\n contract_name = \"Interface\";\n\n };\n\n\n\n // instantiate an array of all ABI Encoder v2 structs\n\n let mut structs = HashSet::new();\n\n\n\n let events = events_iterator\n\n .map(|event| {\n\n let inputs = event\n\n .inputs\n\n .iter()\n\n .map(|param| format_event_params(param, &mut structs))\n\n .collect::<Vec<String>>()\n\n .join(\", \");\n\n\n\n let event_final = format!(\"event {}({})\", event.name, inputs);\n", "file_path": "utils/src/lib.rs", "rank": 41, "score": 222542.24853163993 }, { "content": "fn sign(private_key: U256, digest: H256, chain_id: U256) -> Result<Bytes, Bytes> {\n\n if private_key.is_zero() {\n\n return Err(\"Private key cannot be 0.\".to_string().encode().into())\n\n }\n\n\n\n let mut bytes: [u8; 32] = [0; 32];\n\n private_key.to_big_endian(&mut bytes);\n\n\n\n let key = SigningKey::from_bytes(&bytes).map_err(|err| err.to_string().encode())?;\n\n let wallet = LocalWallet::from(key).with_chain_id(chain_id.as_u64());\n\n\n\n // The `ecrecover` precompile does not use EIP-155\n\n let sig = wallet.sign_hash(digest);\n\n let recovered = sig.recover(digest).map_err(|err| err.to_string().encode())?;\n\n\n\n assert_eq!(recovered, wallet.address());\n\n\n\n let mut r_bytes = [0u8; 32];\n\n let mut s_bytes = [0u8; 32];\n\n sig.r.to_big_endian(&mut r_bytes);\n\n sig.s.to_big_endian(&mut s_bytes);\n\n\n\n Ok((sig.v, r_bytes, s_bytes).encode().into())\n\n}\n\n\n", "file_path": "evm/src/executor/inspector/cheatcodes/util.rs", "rank": 42, "score": 221628.91231264884 }, { "content": "/// Decode a set of logs, only returning logs from DSTest logging events and Hardhat's `console.log`\n\npub fn decode_console_logs(logs: &[RawLog]) -> Vec<String> {\n\n logs.iter().filter_map(decode_console_log).collect()\n\n}\n\n\n", "file_path": "evm/src/decode.rs", "rank": 43, "score": 221576.9195511722 }, { "content": "fn required_marker(provided_nonce: U256, on_chain_nonce: U256, from: Address) -> Vec<TxMarker> {\n\n if provided_nonce == on_chain_nonce {\n\n return Vec::new()\n\n }\n\n let prev_nonce = provided_nonce.saturating_sub(U256::one());\n\n if on_chain_nonce <= prev_nonce {\n\n vec![to_marker(prev_nonce.as_u64(), from)]\n\n } else {\n\n Vec::new()\n\n }\n\n}\n", "file_path": "anvil/src/eth/api.rs", "rank": 44, "score": 220576.74786525857 }, { "content": "pub fn state_merkle_trie_root(\n\n accounts: &Map<Address, AccountInfo>,\n\n storage: &Map<Address, Map<U256, U256>>,\n\n) -> H256 {\n\n let vec = accounts\n\n .iter()\n\n .map(|(address, info)| {\n\n let storage = storage.get(address).cloned().unwrap_or_default();\n\n let storage_root = trie_account_rlp(info, storage);\n\n (*address, storage_root)\n\n })\n\n .collect::<Vec<_>>();\n\n\n\n trie_root(vec)\n\n}\n\n\n", "file_path": "anvil/src/eth/backend/mem/state.rs", "rank": 45, "score": 219824.27037789614 }, { "content": "pub fn evm_spec(evm: &EvmVersion) -> SpecId {\n\n match evm {\n\n EvmVersion::Istanbul => SpecId::ISTANBUL,\n\n EvmVersion::Berlin => SpecId::BERLIN,\n\n EvmVersion::London => SpecId::LONDON,\n\n _ => panic!(\"Unsupported EVM version\"),\n\n }\n\n}\n\n\n\n/// Securely reads a secret from stdin, or proceeds to return a fallback value\n\n/// which was provided in cleartext via CLI or env var\n", "file_path": "cli/src/utils.rs", "rank": 46, "score": 217859.3518594469 }, { "content": "#[auto_impl::auto_impl(&, Box)]\n\npub trait TransactionValidator {\n\n /// Validates the transaction's validity when it comes to nonce, payment\n\n ///\n\n /// This is intended to be checked before the transaction makes it into the pool and whether it\n\n /// should rather be outright rejected if the sender has insufficient funds.\n\n fn validate_pool_transaction(\n\n &self,\n\n tx: &PendingTransaction,\n\n ) -> Result<(), InvalidTransactionError>;\n\n\n\n /// Validates the transaction against a specific account before entering the pool\n\n fn validate_pool_transaction_for(\n\n &self,\n\n tx: &PendingTransaction,\n\n account: &AccountInfo,\n\n env: &Env,\n\n ) -> Result<(), InvalidTransactionError>;\n\n\n\n /// Validates the transaction against a specific account\n\n ///\n\n /// This should succeed if the transaction is ready to be executed\n\n fn validate_for(\n\n &self,\n\n tx: &PendingTransaction,\n\n account: &AccountInfo,\n\n env: &Env,\n\n ) -> Result<(), InvalidTransactionError>;\n\n}\n", "file_path": "anvil/src/eth/backend/validate.rs", "rank": 47, "score": 215630.58140204294 }, { "content": "fn start_record(state: &mut Cheatcodes) {\n\n state.accesses = Some(Default::default());\n\n}\n\n\n", "file_path": "evm/src/executor/inspector/cheatcodes/env.rs", "rank": 48, "score": 214665.46126611254 }, { "content": "fn verify_on_chain(info: Option<EnvExternalities>, prj: TestProject, mut cmd: TestCommand) {\n\n // only execute if keys present\n\n if let Some(info) = info {\n\n add_unique(&prj);\n\n\n\n prj.inner()\n\n .add_source(\n\n \"Verify.sol\",\n\n r#\"\n\n // SPDX-License-Identifier: UNLICENSED\n\n pragma solidity =0.8.10;\n\n import {Unique} from \"./unique.sol\";\n\n contract Verify is Unique {\n", "file_path": "cli/tests/it/verify.rs", "rank": 49, "score": 208376.939240458 }, { "content": "/// parse a hex str or decimal str as U256\n\npub fn parse_u256(s: &str) -> eyre::Result<U256> {\n\n Ok(if s.starts_with(\"0x\") { U256::from_str(s)? } else { U256::from_dec_str(s)? })\n\n}\n\n\n", "file_path": "cli/src/utils.rs", "rank": 50, "score": 207279.97257212194 }, { "content": "pub fn to_bytes(uint: U256) -> Bytes {\n\n let mut buffer: [u8; 4 * 8] = [0; 4 * 8];\n\n uint.to_big_endian(&mut buffer);\n\n Bytes::from(buffer)\n\n}\n\n\n", "file_path": "cast/src/print_utils.rs", "rank": 51, "score": 206093.51030354996 }, { "content": "pub fn to_access_list(list: Vec<AccessListItem>) -> Vec<(Address, Vec<U256>)> {\n\n list.into_iter()\n\n .map(|item| (item.address, item.storage_keys.into_iter().map(h256_to_u256_be).collect()))\n\n .collect()\n\n}\n", "file_path": "anvil/core/src/eth/utils.rs", "rank": 52, "score": 202907.50382899895 }, { "content": "/// Chooses the color of the trace depending on the destination address and status of the call.\n\nfn trace_color(trace: &CallTrace) -> Color {\n\n if trace.address == CHEATCODE_ADDRESS {\n\n Color::Blue\n\n } else if trace.success {\n\n Color::Green\n\n } else {\n\n Color::Red\n\n }\n\n}\n", "file_path": "evm/src/trace/mod.rs", "rank": 53, "score": 201033.43414560048 }, { "content": "/// If the output medium is terminal, this calls `f` within the [`SpinnerReporter`] that displays a\n\n/// spinning cursor to display solc progress.\n\n///\n\n/// If no terminal is available this falls back to common `println!` in [`BasicStdoutReporter`].\n\npub fn with_spinner_reporter<T>(f: impl FnOnce() -> T) -> T {\n\n let reporter = if TERM_SETTINGS.indicate_progress {\n\n ethers::solc::report::Report::new(SpinnerReporter::spawn())\n\n } else {\n\n ethers::solc::report::Report::new(BasicStdoutReporter::default())\n\n };\n\n ethers::solc::report::with_scoped(&reporter, f)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n #[ignore]\n\n fn can_spin() {\n\n let mut s = Spinner::new(\"Compiling\".to_string());\n\n let ticks = 50;\n\n for _ in 0..ticks {\n\n std::thread::sleep(std::time::Duration::from_millis(100));\n", "file_path": "cli/src/term.rs", "rank": 54, "score": 200627.09320334077 }, { "content": "pub fn get_pretty_tx_attr(transaction: Transaction, attr: String) -> Option<String> {\n\n return match attr.as_str() {\n\n \"blockHash\" | \"block_hash\" => Some(transaction.block_hash.pretty()),\n\n \"blockNumber\" | \"block_number\" => Some(transaction.block_number.pretty()),\n\n \"from\" => Some(transaction.from.pretty()),\n\n \"gas\" => Some(transaction.gas.pretty()),\n\n \"gasPrice\" | \"gas_price\" => Some(transaction.gas_price.pretty()),\n\n \"hash\" => Some(transaction.hash.pretty()),\n\n \"input\" => Some(transaction.input.pretty()),\n\n \"nonce\" => Some(transaction.nonce.pretty()),\n\n \"s\" => Some(to_bytes(transaction.s).pretty()),\n\n \"r\" => Some(to_bytes(transaction.r).pretty()),\n\n \"to\" => Some(transaction.to.pretty()),\n\n \"transactionIndex\" | \"transaction_index\" => Some(transaction.transaction_index.pretty()),\n\n \"v\" => Some(transaction.v.pretty()),\n\n \"value\" => Some(transaction.value.pretty()),\n\n _ => None,\n\n }\n\n}\n\n\n", "file_path": "cast/src/print_utils.rs", "rank": 55, "score": 200194.24711092433 }, { "content": "/// Returns all the logs that match the given filter\n\npub fn filter_logs(\n\n block: Block,\n\n receipts: Vec<TypedReceipt>,\n\n filter: &FilteredParams,\n\n) -> Vec<EthersLog> {\n\n /// Determines whether to add this log\n\n fn add_log(block_hash: H256, l: &Log, block: &Block, params: &FilteredParams) -> bool {\n\n let log = EthersLog {\n\n address: l.address,\n\n topics: l.topics.clone(),\n\n data: l.data.clone(),\n\n block_hash: None,\n\n block_number: None,\n\n transaction_hash: None,\n\n transaction_index: None,\n\n log_index: None,\n\n transaction_log_index: None,\n\n log_type: None,\n\n removed: Some(false),\n\n };\n", "file_path": "anvil/src/pubsub.rs", "rank": 56, "score": 198329.0009427704 }, { "content": "fn accesses(state: &mut Cheatcodes, address: Address) -> Bytes {\n\n if let Some(storage_accesses) = &mut state.accesses {\n\n ethers::abi::encode(&[\n\n storage_accesses.reads.remove(&address).unwrap_or_default().into_tokens()[0].clone(),\n\n storage_accesses.writes.remove(&address).unwrap_or_default().into_tokens()[0].clone(),\n\n ])\n\n .into()\n\n } else {\n\n ethers::abi::encode(&[Token::Array(vec![]), Token::Array(vec![])]).into()\n\n }\n\n}\n\n\n", "file_path": "evm/src/executor/inspector/cheatcodes/env.rs", "rank": 57, "score": 198157.77342867877 }, { "content": "fn expect_revert(state: &mut Cheatcodes, reason: Bytes, depth: u64) -> Result<Bytes, Bytes> {\n\n if state.expected_revert.is_some() {\n\n Err(\"You must call another function prior to expecting a second revert.\"\n\n .to_string()\n\n .encode()\n\n .into())\n\n } else {\n\n state.expected_revert = Some(ExpectedRevert { reason, depth });\n\n Ok(Bytes::new())\n\n }\n\n}\n\n\n", "file_path": "evm/src/executor/inspector/cheatcodes/expect.rs", "rank": 58, "score": 195646.68333962653 }, { "content": "/// Copies an initialized project to the given path\n\npub fn initialize(target: impl AsRef<Path>) {\n\n FORGE_INITIALIZED.copy_to(target)\n\n}\n\n\n", "file_path": "cli/test-utils/src/util.rs", "rank": 59, "score": 195308.63462066062 }, { "content": "/// Flattens a group of contracts into maps of all events and functions\n\npub fn flatten_known_contracts(\n\n contracts: &BTreeMap<ArtifactId, (Abi, Vec<u8>)>,\n\n) -> (BTreeMap<[u8; 4], Function>, BTreeMap<H256, Event>, Abi) {\n\n let flattened_funcs: BTreeMap<[u8; 4], Function> = contracts\n\n .iter()\n\n .flat_map(|(_name, (abi, _code))| {\n\n abi.functions()\n\n .map(|func| (func.short_signature(), func.clone()))\n\n .collect::<BTreeMap<[u8; 4], Function>>()\n\n })\n\n .collect();\n\n\n\n let flattened_events: BTreeMap<H256, Event> = contracts\n\n .iter()\n\n .flat_map(|(_name, (abi, _code))| {\n\n abi.events()\n\n .map(|event| (event.signature(), event.clone()))\n\n .collect::<BTreeMap<H256, Event>>()\n\n })\n\n .collect();\n", "file_path": "utils/src/lib.rs", "rank": 60, "score": 194398.3958457787 }, { "content": "#[cfg(any(feature = \"test\"))]\n\npub fn init_tracing_subscriber() {\n\n let _ = tracing_subscriber::fmt()\n\n .with_env_filter(tracing_subscriber::EnvFilter::from_default_env())\n\n .try_init()\n\n .ok();\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use ethers::{\n\n abi::Abi,\n\n solc::{artifacts::CompactContractBytecode, Project, ProjectPathsConfig},\n\n types::{Address, Bytes},\n\n };\n\n use std::future::Future;\n\n\n\n #[test]\n\n fn parse_hex_uint_tokens() {\n\n let param = ParamType::Uint(256);\n", "file_path": "utils/src/lib.rs", "rank": 61, "score": 194239.17650737078 }, { "content": "/// Clones a remote repository into the specified directory.\n\npub fn clone_remote(\n\n repo_url: &str,\n\n target_dir: impl AsRef<Path>,\n\n) -> std::io::Result<process::Output> {\n\n Command::new(\"git\")\n\n .args([\n\n \"clone\",\n\n \"--depth\",\n\n \"1\",\n\n \"--recursive\",\n\n repo_url,\n\n target_dir.as_ref().to_str().expect(\"Target path for git clone does not exist\"),\n\n ])\n\n .output()\n\n}\n\n\n", "file_path": "cli/test-utils/src/util.rs", "rank": 62, "score": 190674.99393843196 }, { "content": "fn string_or_number<'de, D>(deserializer: D) -> Result<u64, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n use serde::de::Error;\n\n match Gas::deserialize(deserializer)? {\n\n Gas::Number(num) => Ok(num),\n\n Gas::Text(s) => s.parse().map_err(D::Error::custom),\n\n }\n\n}\n\n\n", "file_path": "evm/src/executor/opts.rs", "rank": 63, "score": 190182.41944939684 }, { "content": "/// Returns the label for the given `token`\n\n///\n\n/// If the `token` is an `Address` then we look abel the label map.\n\n/// by default the token is formatted using standard formatting\n\npub fn label(token: &Token, labels: &HashMap<Address, String>) -> String {\n\n match token {\n\n Token::Address(addr) => {\n\n if let Some(label) = labels.get(addr) {\n\n format!(\"{}: [{:?}]\", label, addr)\n\n } else {\n\n format_token(token)\n\n }\n\n }\n\n _ => format_token(token),\n\n }\n\n}\n\n\n\npub(crate) fn decode_cheatcode_inputs(\n\n func: &Function,\n\n data: &[u8],\n\n errors: &Abi,\n\n) -> Option<Vec<String>> {\n\n match func.name.as_str() {\n\n \"expectRevert\" => {\n\n foundry_utils::decode_revert(data, Some(errors)).ok().map(|decoded| vec![decoded])\n\n }\n\n _ => None,\n\n }\n\n}\n", "file_path": "evm/src/trace/utils.rs", "rank": 64, "score": 187781.98011168127 }, { "content": "/// Given a parameter type, returns a strategy for generating values for that type, given some EVM\n\n/// fuzz state.\n\n///\n\n/// Works with ABI Encoder v2 tuples.\n\npub fn fuzz_param_from_state(param: &ParamType, state: EvmFuzzState) -> BoxedStrategy<Token> {\n\n // These are to comply with lifetime requirements\n\n let state_len = state.borrow().len();\n\n let s = state.clone();\n\n\n\n // Select a value from the state\n\n let value = any::<prop::sample::Index>()\n\n .prop_map(move |index| index.index(state_len))\n\n .prop_map(move |index| *s.borrow().iter().nth(index).unwrap());\n\n\n\n // Convert the value based on the parameter type\n\n match param {\n\n ParamType::Address => {\n\n value.prop_map(move |value| Address::from_slice(&value[12..]).into_token()).boxed()\n\n }\n\n ParamType::Bytes => value.prop_map(move |value| Bytes::from(value).into_token()).boxed(),\n\n ParamType::Int(n) => match n / 8 {\n\n 32 => {\n\n value.prop_map(move |value| I256::from_raw(U256::from(value)).into_token()).boxed()\n\n }\n", "file_path": "evm/src/fuzz/strategies/param.rs", "rank": 65, "score": 187324.06278898235 }, { "content": "/// Given a function and some state, it returns a strategy which generated valid calldata for the\n\n/// given function's input types, based on state taken from the EVM.\n\npub fn fuzz_calldata_from_state(\n\n func: Function,\n\n state: EvmFuzzState,\n\n) -> BoxedStrategy<ethers::types::Bytes> {\n\n let strats = func\n\n .inputs\n\n .iter()\n\n .map(|input| fuzz_param_from_state(&input.kind, state.clone()))\n\n .collect::<Vec<_>>();\n\n\n\n strats\n\n .prop_map(move |tokens| {\n\n tracing::trace!(input = ?tokens);\n\n func.encode_input(&tokens)\n\n .unwrap_or_else(|_| {\n\n panic!(\n\n r#\"Fuzzer generated invalid tokens {:?} for function `{}` inputs {:?}\n\nThis is a bug, please open an issue: https://github.com/foundry-rs/foundry/issues\"#,\n\n tokens, func.name, func.inputs\n\n )\n\n })\n\n .into()\n\n })\n\n .no_shrink()\n\n .boxed()\n\n}\n\n\n", "file_path": "evm/src/fuzz/strategies/state.rs", "rank": 66, "score": 186272.14858471864 }, { "content": "/// Collects state changes from a [StateChangeset] and logs into an [EvmFuzzState].\n\npub fn collect_state_from_call(\n\n logs: &[RawLog],\n\n state_changeset: &StateChangeset,\n\n state: EvmFuzzState,\n\n) {\n\n let state = &mut *state.borrow_mut();\n\n\n\n for (address, account) in state_changeset {\n\n // Insert basic account information\n\n state.insert(H256::from(*address).into());\n\n state.insert(utils::u256_to_h256_le(account.info.balance).into());\n\n state.insert(utils::u256_to_h256_le(U256::from(account.info.nonce)).into());\n\n\n\n // Insert storage\n\n for (slot, value) in &account.storage {\n\n state.insert(utils::u256_to_h256_le(*slot).into());\n\n state.insert(utils::u256_to_h256_le(*value).into());\n\n }\n\n\n\n // Insert push bytes\n", "file_path": "evm/src/fuzz/strategies/state.rs", "rank": 67, "score": 186262.69504291547 }, { "content": "pub fn read_string(path: impl AsRef<Path>) -> String {\n\n let path = path.as_ref();\n\n pretty_err(path, std::fs::read_to_string(path))\n\n}\n\n\n\n/// A simple wrapper around a process::Command with some conveniences.\n\n#[derive(Debug)]\n\npub struct TestCommand {\n\n saved_cwd: PathBuf,\n\n /// The project used to launch this command.\n\n project: TestProject,\n\n /// The actual command we use to control the process.\n\n cmd: Command,\n\n // initial: Command,\n\n current_dir_lock: Option<parking_lot::lock_api::MutexGuard<'static, parking_lot::RawMutex, ()>>,\n\n}\n\n\n\nimpl TestCommand {\n\n /// Returns a mutable reference to the underlying command.\n\n pub fn cmd(&mut self) -> &mut Command {\n", "file_path": "cli/test-utils/src/util.rs", "rank": 68, "score": 185567.15372308268 }, { "content": "pub fn handle_expect_revert(\n\n is_create: bool,\n\n expected_revert: &Bytes,\n\n status: Return,\n\n retdata: Bytes,\n\n) -> Result<(Option<Address>, Bytes), Bytes> {\n\n if matches!(status, return_ok!()) {\n\n return Err(\"Call did not revert as expected\".to_string().encode().into())\n\n }\n\n\n\n if !expected_revert.is_empty() && retdata.is_empty() {\n\n return Err(\"Call reverted as expected, but without data\".to_string().encode().into())\n\n }\n\n\n\n let (err, actual_revert): (_, Bytes) = match retdata {\n\n _ if retdata.len() >= 4 && retdata[0..4] == [8, 195, 121, 160] => {\n\n // It's a revert string, so we do some conversion to perform the check\n\n let decoded_data: Bytes =\n\n ethers::abi::decode(&[ethers::abi::ParamType::Bytes], &retdata[4..])\n\n .expect(\"String error code, but data is not a string\")[0]\n", "file_path": "evm/src/executor/inspector/cheatcodes/expect.rs", "rank": 69, "score": 182945.8529519952 }, { "content": "fn string_or_number_opt<'de, D>(deserializer: D) -> Result<Option<u64>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n use serde::de::Error;\n\n\n\n match Option::<Gas>::deserialize(deserializer)? {\n\n Some(gas) => match gas {\n\n Gas::Number(num) => Ok(Some(num)),\n\n Gas::Text(s) => s.parse().map(Some).map_err(D::Error::custom),\n\n },\n\n _ => Ok(None),\n\n }\n\n}\n", "file_path": "evm/src/executor/opts.rs", "rank": 70, "score": 181515.63810266776 }, { "content": "#[derive(Debug)]\n\nenum BackendRequest {\n\n /// Fetch the account info\n\n Basic(Address, OneshotSender<AccountInfo>),\n\n /// Fetch a storage slot\n\n Storage(Address, U256, OneshotSender<U256>),\n\n /// Fetch a block hash\n\n BlockHash(u64, OneshotSender<H256>),\n\n /// Sets the pinned block to fetch data from\n\n SetPinnedBlock(BlockId),\n\n}\n\n\n\n/// Handles an internal provider and listens for requests.\n\n///\n\n/// This handler will remain active as long as it is reachable (request channel still open) and\n\n/// requests are in progress.\n\n#[must_use = \"BackendHandler does nothing unless polled.\"]\n\npub struct BackendHandler<M: Middleware> {\n\n provider: M,\n\n /// Stores all the data.\n\n db: BlockchainDb,\n", "file_path": "evm/src/executor/fork/backend.rs", "rank": 71, "score": 181321.2170987606 }, { "content": "/// Returns the fixture path depending on whether the current terminal is tty\n\n///\n\n/// This is useful in combination with [OutputExt]\n\npub fn tty_fixture_path(path: impl AsRef<Path>) -> PathBuf {\n\n let path = path.as_ref();\n\n if *IS_TTY {\n\n return if let Some(ext) = path.extension().and_then(|s| s.to_str()) {\n\n path.with_extension(format!(\"tty.{}\", ext))\n\n } else {\n\n path.with_extension(\"tty\")\n\n }\n\n }\n\n path.to_path_buf()\n\n}\n\n\n", "file_path": "cli/test-utils/src/util.rs", "rank": 72, "score": 180286.33077275945 }, { "content": "/// Maps an opcode and returns a vector of named affected indices\n\npub fn stack_indices_affected(op: u8) -> Vec<(usize, &'static str)> {\n\n match op {\n\n 0x01 => vec![(0, \"a\"), (1, \"b\")],\n\n 0x02 => vec![(0, \"a\"), (1, \"b\")],\n\n 0x03 => vec![(0, \"a\"), (1, \"b\")],\n\n 0x04 => vec![(0, \"a\"), (1, \"b\")],\n\n 0x05 => vec![(0, \"a\"), (1, \"b\")],\n\n 0x06 => vec![(0, \"a\"), (1, \"b\")],\n\n 0x07 => vec![(0, \"a\"), (1, \"b\")],\n\n 0x08 => vec![(0, \"a\"), (1, \"b\"), (2, \"N\")],\n\n 0x09 => vec![(0, \"a\"), (1, \"b\"), (2, \"N\")],\n\n 0x0a => vec![(0, \"a\"), (1, \"exponent\")],\n\n 0x0b => vec![(0, \"b\"), (1, \"x\")],\n\n 0x10 => vec![(0, \"a\"), (1, \"b\")],\n\n 0x11 => vec![(0, \"a\"), (1, \"b\")],\n\n 0x12 => vec![(0, \"a\"), (1, \"b\")],\n\n 0x13 => vec![(0, \"a\"), (1, \"b\")],\n\n 0x14 => vec![(0, \"a\"), (1, \"b\")],\n\n 0x15 => vec![(0, \"a\")],\n\n 0x16 => vec![(0, \"a\"), (1, \"b\")],\n", "file_path": "ui/src/op_effects.rs", "rank": 73, "score": 179399.98385924008 }, { "content": "/// Returns a list of _unique_ paths to all folders under `root` that contain a `foundry.toml` file\n\n///\n\n/// This will also resolve symlinks\n\n///\n\n/// # Example\n\n///\n\n/// ```no_run\n\n/// use foundry_config::utils;\n\n/// let dirs = utils::foundry_toml_dirs(\"./lib\");\n\n/// ```\n\n///\n\n/// for following layout this will return\n\n/// `[\"lib/dep1\", \"lib/dep2\"]`\n\n///\n\n/// ```text\n\n/// lib\n\n/// └── dep1\n\n/// │ ├── foundry.toml\n\n/// └── dep2\n\n/// ├── foundry.toml\n\n/// ```\n\npub fn foundry_toml_dirs(root: impl AsRef<Path>) -> Vec<PathBuf> {\n\n walkdir::WalkDir::new(root)\n\n .max_depth(1)\n\n .into_iter()\n\n .filter_map(Result::ok)\n\n .filter(|e| e.file_type().is_dir())\n\n .filter_map(|e| ethers_solc::utils::canonicalize(e.path()).ok())\n\n .filter(|p| p.join(Config::FILE_NAME).exists())\n\n .collect()\n\n}\n", "file_path": "config/src/utils.rs", "rank": 74, "score": 179346.21307224547 }, { "content": "/// Returns the current duration since unix epoch.\n\npub fn duration_since_unix_epoch() -> Duration {\n\n use std::time::SystemTime;\n\n let now = SystemTime::now();\n\n now.duration_since(SystemTime::UNIX_EPOCH)\n\n .unwrap_or_else(|err| panic!(\"Current time {:?} is invalid: {:?}\", now, err))\n\n}\n", "file_path": "anvil/src/eth/backend/time.rs", "rank": 75, "score": 176327.05846862102 }, { "content": "/// Given an ABI encoded error string with the function signature `Error(string)`, it decodes\n\n/// it and returns the revert error message.\n\npub fn decode_revert(error: &[u8], maybe_abi: Option<&Abi>) -> Result<String> {\n\n if error.len() >= 4 {\n\n match error[0..4] {\n\n // keccak(Panic(uint256))\n\n [78, 72, 123, 113] => {\n\n // ref: https://soliditydeveloper.com/solidity-0.8\n\n match error[error.len() - 1] {\n\n 1 => {\n\n // assert\n\n Ok(\"Assertion violated\".to_string())\n\n }\n\n 17 => {\n\n // safemath over/underflow\n\n Ok(\"Arithmetic over/underflow\".to_string())\n\n }\n\n 18 => {\n\n // divide by 0\n\n Ok(\"Division or modulo by 0\".to_string())\n\n }\n\n 33 => {\n", "file_path": "utils/src/lib.rs", "rank": 76, "score": 176194.98719128684 }, { "content": "pub fn with_retry<T, F>(mut callback: F) -> eyre::Result<T>\n\nwhere\n\n F: FnMut() -> eyre::Result<T>,\n\n{\n\n let mut retry = Retry::new(2);\n\n loop {\n\n if let Some(ret) = retry.r#try(&mut callback)? {\n\n return Ok(ret)\n\n }\n\n }\n\n}\n\n\n", "file_path": "binder/src/utils.rs", "rank": 77, "score": 175860.5617399239 }, { "content": "#[derive(Debug)]\n\nstruct DbSnapshot {\n\n local: CacheDB<SharedBackend>,\n\n accounts: BTreeMap<Address, AccountInfo>,\n\n storage: BTreeMap<Address, BTreeMap<U256, U256>>,\n\n block_hashes: BTreeMap<u64, H256>,\n\n}\n", "file_path": "anvil/src/eth/backend/mem/fork_db.rs", "rank": 78, "score": 175087.51808057082 }, { "content": "/// This bundles all required revm traits\n\npub trait Db: DatabaseRef + Database + DatabaseCommit + Send + Sync {\n\n /// Inserts an account\n\n fn insert_account(&mut self, address: Address, account: AccountInfo);\n\n\n\n /// Sets the nonce of the given address\n\n fn set_nonce(&mut self, address: Address, nonce: u64) {\n\n let mut info = self.basic(address);\n\n info.nonce = nonce;\n\n self.insert_account(address, info);\n\n }\n\n\n\n /// Sets the balance of the given address\n\n fn set_balance(&mut self, address: Address, balance: U256) {\n\n let mut info = self.basic(address);\n\n info.balance = balance;\n\n self.insert_account(address, info);\n\n }\n\n\n\n /// Sets the balance of the given address\n\n fn set_code(&mut self, address: Address, code: Bytes) {\n", "file_path": "anvil/src/eth/backend/db.rs", "rank": 79, "score": 173566.0232563414 }, { "content": "/// Artifact/Contract identifier can take the following form:\n\n/// `<artifact file name>:<contract name>`, the `artifact file name` is the name of the json file of\n\n/// the contract's artifact and the contract name is the name of the solidity contract, like\n\n/// `SafeTransferLibTest.json:SafeTransferLibTest`\n\n///\n\n/// This returns the `contract name` part\n\n///\n\n/// # Example\n\n///\n\n/// ```\n\n/// assert_eq!(\n\n/// \"SafeTransferLibTest\",\n\n/// utils::get_contract_name(\"SafeTransferLibTest.json:SafeTransferLibTest\")\n\n/// );\n\n/// ```\n\npub fn get_contract_name(id: &str) -> &str {\n\n id.rsplit(':').next().unwrap_or(id)\n\n}\n\n\n", "file_path": "cli/src/utils.rs", "rank": 80, "score": 173288.72046812615 }, { "content": "#[track_caller]\n\npub fn pretty_err<T, E: std::error::Error>(path: impl AsRef<Path>, res: Result<T, E>) -> T {\n\n match res {\n\n Ok(t) => t,\n\n Err(err) => panic!(\"{}: {:?}\", path.as_ref().display(), err),\n\n }\n\n}\n\n\n", "file_path": "cli/test-utils/src/util.rs", "rank": 81, "score": 172938.28270948824 }, { "content": "/// Trace identifiers figure out what ABIs and labels belong to all the addresses of the trace.\n\npub trait TraceIdentifier {\n\n // TODO: Update docs\n\n /// Attempts to identify an address in one or more call traces.\n\n #[allow(clippy::type_complexity)]\n\n fn identify_addresses(\n\n &self,\n\n addresses: Vec<(&Address, Option<&Vec<u8>>)>,\n\n ) -> Vec<AddressIdentity>;\n\n}\n", "file_path": "evm/src/trace/identifier/mod.rs", "rank": 82, "score": 172268.4229551963 }, { "content": "pub fn parse_block_id(s: &str) -> eyre::Result<BlockId> {\n\n Ok(match s {\n\n \"earliest\" => BlockId::Number(BlockNumber::Earliest),\n\n \"latest\" => BlockId::Number(BlockNumber::Latest),\n\n \"pending\" => BlockId::Number(BlockNumber::Pending),\n\n s if s.starts_with(\"0x\") => BlockId::Hash(H256::from_str(s)?),\n\n s => BlockId::Number(BlockNumber::Number(u64::from_str(s)?.into())),\n\n })\n\n}\n\n\n", "file_path": "cli/src/opts/cast.rs", "rank": 83, "score": 172069.36953281023 }, { "content": "fn topics_to_bloom_filter(topics: &ValueOrArray<Option<H256>>) -> BloomFilter {\n\n let mut blooms = BloomFilter::new();\n\n match topics {\n\n ValueOrArray::Value(topic) => {\n\n if let Some(topic) = topic {\n\n let bloom: Bloom = BloomInput::Raw(topic.as_ref()).into();\n\n blooms.push(Some(bloom));\n\n } else {\n\n blooms.push(None);\n\n }\n\n }\n\n ValueOrArray::Array(topics) => {\n\n if topics.is_empty() {\n\n blooms.push(None);\n\n } else {\n\n for topic in topics.iter() {\n\n if let Some(topic) = topic {\n\n let bloom: Bloom = BloomInput::Raw(topic.as_ref()).into();\n\n blooms.push(Some(bloom));\n\n } else {\n\n blooms.push(None);\n\n }\n\n }\n\n }\n\n }\n\n }\n\n blooms\n\n}\n\n\n", "file_path": "anvil/core/src/eth/filter.rs", "rank": 84, "score": 172012.36809412355 }, { "content": "/// Parses the address the contract was deployed to\n\npub fn parse_deployed_address(out: &str) -> Option<String> {\n\n for line in out.lines() {\n\n if line.starts_with(\"Deployed to\") {\n\n return Some(line.trim_start_matches(\"Deployed to: \").to_string())\n\n }\n\n }\n\n None\n\n}\n\n\n", "file_path": "cli/tests/it/utils.rs", "rank": 85, "score": 170525.81601011698 }, { "content": "/// Returns a set of various contract addresses\n\npub fn contract_addresses(chain: Chain) -> Vec<Address> {\n\n vec![\n\n contract(\"dai\").unwrap().address(chain).unwrap(),\n\n contract(\"usdc\").unwrap().address(chain).unwrap(),\n\n contract(\"weth\").unwrap().address(chain).unwrap(),\n\n contract(\"uniswapV3Factory\").unwrap().address(chain).unwrap(),\n\n contract(\"uniswapV3SwapRouter02\").unwrap().address(chain).unwrap(),\n\n ]\n\n}\n", "file_path": "anvil/tests/it/utils.rs", "rank": 86, "score": 170521.06465519316 }, { "content": "type EtherscanFuture =\n\n Pin<Box<dyn Future<Output = (Address, Result<ContractMetadata, EtherscanError>)>>>;\n\n\n\n/// A rate limit aware Etherscan client.\n\n///\n\n/// Fetches information about multiple addresses concurrently, while respecting rate limits.\n\npub struct EtherscanFetcher {\n\n /// The Etherscan client\n\n client: etherscan::Client,\n\n /// The time we wait if we hit the rate limit\n\n timeout: Duration,\n\n /// The interval we are currently waiting for before making a new request\n\n backoff: Option<Interval>,\n\n /// The maximum amount of requests to send concurrently\n\n concurrency: usize,\n\n /// The addresses we have yet to make requests for\n\n queue: Vec<Address>,\n\n /// The in progress requests\n\n in_progress: FuturesUnordered<EtherscanFuture>,\n\n}\n", "file_path": "evm/src/trace/identifier/etherscan.rs", "rank": 87, "score": 169504.45301373396 }, { "content": "#[test]\n\nfn test_deploy_reverting() {\n\n let prj = TempProject::dapptools().unwrap();\n\n prj.add_source(\n\n \"Contract\",\n\n r#\"\n\npragma solidity 0.8.13;\n\ncontract Contract {\n\n constructor() {\n\n require(false, \"\");\n\n }\n\n}\n\n\"#,\n\n )\n\n .unwrap();\n\n\n\n let mut compiled = prj.compile().unwrap();\n\n assert!(!compiled.has_compiler_errors());\n\n let contract = compiled.remove(\"Contract\").unwrap();\n\n let (abi, bytecode, _) = contract.into_contract_bytecode().into_parts();\n\n\n", "file_path": "anvil/tests/it/transaction.rs", "rank": 88, "score": 167996.28541806992 }, { "content": "/// Collects all push bytes from the given bytecode.\n\nfn collect_push_bytes(code: Bytes) -> Vec<[u8; 32]> {\n\n let mut bytes: Vec<[u8; 32]> = Vec::new();\n\n\n\n // We use [SpecId::LATEST] since we do not really care what spec it is - we are not interested\n\n // in gas costs.\n\n let opcode_infos = spec_opcode_gas(SpecId::LATEST);\n\n\n\n let mut i = 0;\n\n while i < code.len().min(PUSH_BYTE_ANALYSIS_LIMIT) {\n\n let op = code[i];\n\n if opcode_infos[op as usize].is_push {\n\n let push_size = (op - opcode::PUSH1 + 1) as usize;\n\n let push_start = i + 1;\n\n let push_end = push_start + push_size;\n\n\n\n // As a precaution, if a fuzz test deploys malformed bytecode (such as using `CREATE2`)\n\n // this will terminate the loop early.\n\n if push_start > code.len() || push_end > code.len() {\n\n return bytes\n\n }\n", "file_path": "evm/src/fuzz/strategies/state.rs", "rank": 89, "score": 166158.56056070718 }, { "content": "fn prank(\n\n state: &mut Cheatcodes,\n\n prank_caller: Address,\n\n prank_origin: Address,\n\n new_caller: Address,\n\n new_origin: Option<Address>,\n\n depth: u64,\n\n single_call: bool,\n\n) -> Result<Bytes, Bytes> {\n\n let prank = Prank { prank_caller, prank_origin, new_caller, new_origin, depth, single_call };\n\n\n\n if state.prank.is_some() {\n\n return Err(\"You have an active prank already.\".to_string().encode().into())\n\n }\n\n\n\n state.prank = Some(prank);\n\n Ok(Bytes::new())\n\n}\n\n\n\n#[derive(Clone, Debug, Default)]\n\npub struct RecordAccess {\n\n pub reads: BTreeMap<Address, Vec<U256>>,\n\n pub writes: BTreeMap<Address, Vec<U256>>,\n\n}\n\n\n", "file_path": "evm/src/executor/inspector/cheatcodes/env.rs", "rank": 90, "score": 163333.03334181258 }, { "content": "fn addr(private_key: U256) -> Result<Bytes, Bytes> {\n\n if private_key.is_zero() {\n\n return Err(\"Private key cannot be 0.\".to_string().encode().into())\n\n }\n\n\n\n let mut bytes: [u8; 32] = [0; 32];\n\n private_key.to_big_endian(&mut bytes);\n\n\n\n let key = SigningKey::from_bytes(&bytes).map_err(|err| err.to_string().encode())?;\n\n let addr = utils::secret_key_to_address(&key);\n\n Ok(addr.encode().into())\n\n}\n\n\n", "file_path": "evm/src/executor/inspector/cheatcodes/util.rs", "rank": 91, "score": 163223.18373047293 }, { "content": "#[allow(unused)]\n\npub fn block_on<F: Future>(future: F) -> F::Output {\n\n let rt = tokio::runtime::Runtime::new().expect(\"could not start tokio rt\");\n\n rt.block_on(future)\n\n}\n\n\n", "file_path": "cli/src/utils.rs", "rank": 92, "score": 162578.3817983294 }, { "content": "fn print_traces(result: &mut RunResult, decoder: CallTraceDecoder) -> eyre::Result<()> {\n\n if result.traces.is_empty() {\n\n eyre::bail!(\"Unexpected error: No traces. Please report this as a bug: https://github.com/foundry-rs/foundry/issues/new?assignees=&labels=T-bug&template=BUG-FORM.yml\");\n\n }\n\n\n\n println!(\"Traces:\");\n\n for (_, trace) in &mut result.traces {\n\n decoder.decode(trace);\n\n println!(\"{trace}\");\n\n }\n\n println!();\n\n\n\n if result.success {\n\n println!(\"{}\", Paint::green(\"Script ran successfully.\"));\n\n } else {\n\n println!(\"{}\", Paint::red(\"Script failed.\"));\n\n }\n\n\n\n println!(\"Gas used: {}\", result.gas);\n\n Ok(())\n\n}\n\n\n", "file_path": "cli/src/cmd/cast/run.rs", "rank": 93, "score": 162051.33491100167 }, { "content": "/// Helper function that returns the [Fork] to use, if any.\n\n///\n\n/// storage caching for the [Fork] will be enabled if\n\n/// - `fork_url` is present\n\n/// - `fork_block_number` is present\n\n/// - [StorageCachingConfig] allows the `fork_url` + chain id pair\n\n/// - storage is allowed (`no_storage_caching = false`)\n\n///\n\n/// If all these criteria are met, then storage caching is enabled and storage info will be written\n\n/// to [Config::foundry_cache_dir()]/<str(chainid)>/<block>/storage.json\n\n///\n\n/// for `mainnet` and `--fork-block-number 14435000` on mac the corresponding storage cache will be\n\n/// at `~/.foundry/cache/mainnet/14435000/storage.json`\n\npub fn get_fork(evm_opts: &EvmOpts, config: &StorageCachingConfig) -> Option<Fork> {\n\n /// Returns the path where the cache file should be stored\n\n ///\n\n /// or `None` if caching should not be enabled\n\n ///\n\n /// See also [ Config::foundry_block_cache_file()]\n\n fn get_block_storage_path(\n\n evm_opts: &EvmOpts,\n\n config: &StorageCachingConfig,\n\n chain_id: u64,\n\n ) -> Option<PathBuf> {\n\n if evm_opts.no_storage_caching {\n\n // storage caching explicitly opted out of\n\n return None\n\n }\n\n let url = evm_opts.fork_url.as_ref()?;\n\n // cache only if block explicitly pinned\n\n let block = evm_opts.fork_block_number?;\n\n\n\n if config.enable_for_endpoint(url) && config.enable_for_chain_id(chain_id) {\n", "file_path": "cli/src/utils.rs", "rank": 94, "score": 161270.74741083276 }, { "content": "/// Given a function, it returns a strategy which generates valid calldata\n\n/// for that function's input types.\n\npub fn fuzz_calldata(func: Function) -> BoxedStrategy<Bytes> {\n\n // We need to compose all the strategies generated for each parameter in all\n\n // possible combinations\n\n let strats = func.inputs.iter().map(|input| fuzz_param(&input.kind)).collect::<Vec<_>>();\n\n\n\n strats\n\n .prop_map(move |tokens| {\n\n tracing::trace!(input = ?tokens);\n\n func.encode_input(&tokens).unwrap().into()\n\n })\n\n .boxed()\n\n}\n", "file_path": "evm/src/fuzz/strategies/calldata.rs", "rank": 95, "score": 160715.75210019923 }, { "content": "/// Prepare the authentication callbacks for cloning a git repository.\n\n///\n\n/// The main purpose of this function is to construct the \"authentication\n\n/// callback\" which is used to clone a repository. This callback will attempt to\n\n/// find the right authentication on the system (without user input) and will\n\n/// guide libgit2 in doing so.\n\n///\n\n/// The callback is provided `allowed` types of credentials, and we try to do as\n\n/// much as possible based on that:\n\n///\n\n/// * Prioritize SSH keys from the local ssh agent as they're likely the most reliable. The username\n\n/// here is prioritized from the credential callback, then from whatever is configured in git\n\n/// itself, and finally we fall back to the generic user of `git`.\n\n///\n\n/// * If a username/password is allowed, then we fallback to git2-rs's implementation of the\n\n/// credential helper. This is what is configured with `credential.helper` in git, and is the\n\n/// interface for the macOS keychain, for example.\n\n///\n\n/// * After the above two have failed, we just kinda grapple attempting to return *something*.\n\n///\n\n/// If any form of authentication fails, libgit2 will repeatedly ask us for\n\n/// credentials until we give it a reason to not do so. To ensure we don't\n\n/// just sit here looping forever we keep track of authentications we've\n\n/// attempted and we don't try the same ones again.\n\nfn with_authentication<T, F>(url: &str, cfg: &git2::Config, mut f: F) -> eyre::Result<T>\n\nwhere\n\n F: FnMut(&mut git2::Credentials<'_>) -> eyre::Result<T>,\n\n{\n\n let mut cred_helper = git2::CredentialHelper::new(url);\n\n cred_helper.config(cfg);\n\n\n\n let mut ssh_username_requested = false;\n\n let mut cred_helper_bad = None;\n\n let mut ssh_agent_attempts = Vec::new();\n\n let mut any_attempts = false;\n\n let mut tried_sshkey = false;\n\n let mut url_attempt = None;\n\n\n\n let orig_url = url;\n\n let mut res = f(&mut |url, username, allowed| {\n\n any_attempts = true;\n\n if url != orig_url {\n\n url_attempt = Some(url.to_string());\n\n }\n", "file_path": "binder/src/utils.rs", "rank": 96, "score": 160618.77300128853 }, { "content": "/// Returns the remappings from the given var\n\n///\n\n/// Returns `None` if the env var is not set, otherwise all Remappings, See\n\n/// `remappings_from_newline`\n\npub fn remappings_from_env_var(env_var: &str) -> Option<Result<Vec<Remapping>, RemappingError>> {\n\n let val = std::env::var(env_var).ok()?;\n\n Some(remappings_from_newline(&val).collect())\n\n}\n\n\n", "file_path": "config/src/utils.rs", "rank": 97, "score": 160237.47740840103 }, { "content": "// Returns the function parameter formatted as a string, as well as inserts into the provided\n\n// `structs` set in order to create type definitions for any Abi Encoder v2 structs.\n\nfn format_param(param: &Param, structs: &mut HashSet<String>) -> String {\n\n let kind = get_param_type(&param.kind, &param.name, param.internal_type.as_deref(), structs);\n\n\n\n // add `memory` if required (not needed for events, only for functions)\n\n let is_memory = matches!(\n\n param.kind,\n\n ParamType::Array(_) |\n\n ParamType::Bytes |\n\n ParamType::String |\n\n ParamType::FixedArray(_, _) |\n\n ParamType::Tuple(_),\n\n );\n\n let kind = if is_memory { format!(\"{kind} memory\") } else { kind };\n\n\n\n if param.name.is_empty() {\n\n kind\n\n } else {\n\n format!(\"{} {}\", kind, param.name)\n\n }\n\n}\n\n\n", "file_path": "utils/src/lib.rs", "rank": 98, "score": 159014.3011562827 }, { "content": "fn clean_chain_cache(chain: Chain, blocks: Vec<u64>) -> Result<()> {\n\n if let Ok(foundry_chain) = FoundryConfigChain::try_from(chain) {\n\n if blocks.is_empty() {\n\n Config::clean_foundry_chain_cache(foundry_chain)?;\n\n } else {\n\n for block in blocks {\n\n Config::clean_foundry_block_cache(foundry_chain, block)?;\n\n }\n\n }\n\n } else {\n\n eyre::bail!(\"failed to map chain\");\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "cli/src/cmd/forge/cache.rs", "rank": 99, "score": 158761.68833110956 } ]
Rust
menoh/src/model/mod.rs
Hakuyume/menoh-rs
2d463e94c0159a56821ec6766cba681cdc6a5edd
use menoh_sys; use std::ffi; use std::mem; use std::ptr; use std::slice; use Dtype; use handler::Handler; use Error; use error::check; pub struct Model { handle: menoh_sys::menoh_model_handle, } impl Model { pub fn get_variable_dims(&self, name: &str) -> Result<Vec<usize>, Error> { let name = ffi::CString::new(name)?; unsafe { let mut size = 0; check(menoh_sys::menoh_model_get_variable_dims_size(self.handle, name.as_ptr(), &mut size))?; let mut dims = Vec::with_capacity(size as _); for index in 0..size { let mut dim = 0; check(menoh_sys::menoh_model_get_variable_dims_at(self.handle, name.as_ptr(), index, &mut dim))?; dims.push(dim as _); } Ok(dims) } } fn get_variable_dtype(&self, name: &str) -> Result<menoh_sys::menoh_dtype, Error> { let name = ffi::CString::new(name)?; unsafe { let mut dtype = mem::uninitialized(); check(menoh_sys::menoh_model_get_variable_dtype(self.handle, name.as_ptr(), &mut dtype))?; Ok(dtype) } } pub fn get_variable<T>(&self, name: &str) -> Result<(Vec<usize>, &[T]), Error> where T: Dtype { T::check(self.get_variable_dtype(name)?)?; let dims = self.get_variable_dims(name)?; let name = ffi::CString::new(name)?; let mut buffer = ptr::null_mut(); unsafe { check(menoh_sys::menoh_model_get_variable_buffer_handle(self.handle, name.as_ptr(), &mut buffer))?; let buffer = slice::from_raw_parts(buffer as _, dims.iter().product()); Ok((dims, buffer)) } } pub fn get_variable_mut<T>(&mut self, name: &str) -> Result<(Vec<usize>, &mut [T]), Error> where T: Dtype { T::check(self.get_variable_dtype(name)?)?; let dims = self.get_variable_dims(name)?; let name = ffi::CString::new(name)?; let mut buffer = ptr::null_mut(); unsafe { check(menoh_sys::menoh_model_get_variable_buffer_handle(self.handle, name.as_ptr(), &mut buffer))?; let buffer = slice::from_raw_parts_mut(buffer as _, dims.iter().product()); Ok((dims, buffer)) } } pub fn run(&mut self) -> Result<(), Error> { unsafe { check(menoh_sys::menoh_model_run(self.handle)) } } } impl Handler for Model { type Handle = menoh_sys::menoh_model_handle; unsafe fn from_handle(handle: Self::Handle) -> Self { Self { handle } } unsafe fn handle(&self) -> Self::Handle { self.handle } } impl Drop for Model { fn drop(&mut self) { unsafe { menoh_sys::menoh_delete_model(self.handle) } } }
use menoh_sys; use std::ffi; use std::mem; use std::ptr; use std::slice; use Dtype; use handler::Handler; use Error; use error::check; pub struct Model { handle: menoh_sys::menoh_model_handle, } impl Model { pub fn get_variable_dims(&self, name: &str) -> Result<Vec<usize>, Error> { let name = ffi::CString::new(name)?; unsafe { let mut size = 0; check(menoh_sys::menoh_model_get_variable_dims_size(self.handle, name.as_ptr(), &mut size))?; let mut dims = Vec::with_capacity(size as _); for index in 0..size { let mut dim = 0; check(menoh_sys::menoh_model_get_variable_dims_at(self.handle, name.as_ptr(), index, &mut dim))?; dims.push(dim as _); } Ok(dims) } } fn get_variable_dtype(&self, name: &str) -> Result<menoh_sys::menoh_dtype, Error> { let name = ffi::CString::new(name)?; unsafe { let mut dtype = mem::uninitialized(); check(menoh_sys::menoh_model_get_variable_dtype(self.handle, name.as_ptr(), &mut dtype))?; Ok(dtype) } } pub fn get_variable<T>(&self, name: &str) -> Result<(Vec<usize>, &[T]), Error> where T: Dtype { T::check(self.get_variable_dtype(name)?)?; let dims = self.get_variable_dims(name)?; let name = ffi::CString::new(name)?; let mut buffer = ptr::null_mut(); unsaf
pub fn get_variable_mut<T>(&mut self, name: &str) -> Result<(Vec<usize>, &mut [T]), Error> where T: Dtype { T::check(self.get_variable_dtype(name)?)?; let dims = self.get_variable_dims(name)?; let name = ffi::CString::new(name)?; let mut buffer = ptr::null_mut(); unsafe { check(menoh_sys::menoh_model_get_variable_buffer_handle(self.handle, name.as_ptr(), &mut buffer))?; let buffer = slice::from_raw_parts_mut(buffer as _, dims.iter().product()); Ok((dims, buffer)) } } pub fn run(&mut self) -> Result<(), Error> { unsafe { check(menoh_sys::menoh_model_run(self.handle)) } } } impl Handler for Model { type Handle = menoh_sys::menoh_model_handle; unsafe fn from_handle(handle: Self::Handle) -> Self { Self { handle } } unsafe fn handle(&self) -> Self::Handle { self.handle } } impl Drop for Model { fn drop(&mut self) { unsafe { menoh_sys::menoh_delete_model(self.handle) } } }
e { check(menoh_sys::menoh_model_get_variable_buffer_handle(self.handle, name.as_ptr(), &mut buffer))?; let buffer = slice::from_raw_parts(buffer as _, dims.iter().product()); Ok((dims, buffer)) } }
function_block-function_prefixed
[ { "content": "pub fn check(code: menoh_sys::menoh_error_code) -> Result<(), Error> {\n\n let code = code as menoh_sys::menoh_error_code_constant;\n\n\n\n if code == menoh_sys::menoh_error_code_success {\n\n Ok(())\n\n } else {\n\n let message = unsafe {\n\n ffi::CStr::from_ptr(menoh_sys::menoh_get_last_error_message())\n\n .to_owned()\n\n .into_string()\n\n .unwrap_or(\"[failed to decode message]\".to_owned())\n\n };\n\n match code {\n\n menoh_sys::menoh_error_code_std_error => Err(Error::StdError(message)),\n\n menoh_sys::menoh_error_code_unknown_error => Err(Error::UnknownError(message)),\n\n menoh_sys::menoh_error_code_invalid_filename => Err(Error::InvalidFilename(message)),\n\n menoh_sys::menoh_error_code_unsupported_onnx_opset_version => {\n\n Err(Error::UnsupportedOnnxOpsetVersion(message))\n\n }\n\n menoh_sys::menoh_error_code_onnx_parse_error => Err(Error::OnnxParseError(message)),\n", "file_path": "menoh/src/error.rs", "rank": 0, "score": 90997.03714109448 }, { "content": "fn crop_and_resize(mut img: image::DynamicImage, size: usize) -> image::DynamicImage {\n\n let (h, w) = (img.height(), img.width());\n\n let min = cmp::min(w, h);\n\n img.crop((w - min) / 2, (h - min) / 2, min, min)\n\n .resize_exact(size as _, size as _, image::FilterType::Nearest)\n\n}\n\n\n", "file_path": "menoh/examples/vgg16.rs", "rank": 1, "score": 73314.26311655878 }, { "content": "fn main() -> Result<(), Box<dyn(error::Error)>> {\n\n let args: Args = docopt::Docopt::new(USAGE)\n\n .and_then(|d| d.deserialize())\n\n .unwrap_or_else(|e| e.exit());\n\n\n\n const INSIZE: usize = 224;\n\n const CONV1_1_IN_NAME: &'static str = \"140326425860192\";\n\n const FC6_OUT_NAME: &'static str = \"140326200777584\";\n\n const SOFTMAX_OUT_NAME: &'static str = \"140326200803680\";\n\n\n\n let mut model = menoh::Builder::from_onnx(args.flag_m)?\n\n .add_input::<f32>(CONV1_1_IN_NAME, &[1, 3, INSIZE, INSIZE])?\n\n .add_output::<f32>(FC6_OUT_NAME)?\n\n .add_output::<f32>(SOFTMAX_OUT_NAME)?\n\n .build(\"mkldnn\", \"\")?;\n\n\n\n let img = image::open(args.flag_i)?;\n\n {\n\n let (_, conv1_1_buf) = model.get_variable_mut::<f32>(CONV1_1_IN_NAME)?;\n\n set_image(conv1_1_buf, &crop_and_resize(img, INSIZE));\n", "file_path": "menoh/examples/vgg16.rs", "rank": 2, "score": 70956.55577211082 }, { "content": "#[test]\n\nfn check_f32() {\n\n f32::check(menoh_sys::menoh_dtype_float as _).unwrap();\n\n}\n\n\n", "file_path": "menoh/src/dtype/tests.rs", "rank": 3, "score": 58963.644384602594 }, { "content": "#[test]\n\n#[should_panic(expected=\"DtypeMismatch\")]\n\nfn check_f32_invalid() {\n\n f32::check((menoh_sys::menoh_dtype_float + 1) as _).unwrap();\n\n}\n", "file_path": "menoh/src/dtype/tests.rs", "rank": 4, "score": 57242.81367571778 }, { "content": "#[test]\n\n#[should_panic(expected=\"InvalidFilename\")]\n\nfn from_onnx_invalid_path() {\n\n ModelData::from_onnx(\"invalid.onnx\").unwrap();\n\n}\n", "file_path": "menoh/src/model_data/tests.rs", "rank": 5, "score": 54845.35553171036 }, { "content": "#[test]\n\n#[should_panic(expected=\"InvalidDimsSize\")]\n\nfn add_input_invalid_dims() {\n\n let mut vpt_builder = VariableProfileTableBuilder::new().unwrap();\n\n vpt_builder\n\n .add_input::<f32>(\"input\", &[1, 3, 224])\n\n .unwrap();\n\n}\n\n\n", "file_path": "menoh/src/variable_profile_table_builder/tests.rs", "rank": 6, "score": 51713.11266323248 }, { "content": "#[test]\n\n#[should_panic(expected=\"NulError\")]\n\nfn add_output_invalid_name() {\n\n let mut vpt_builder = VariableProfileTableBuilder::new().unwrap();\n\n vpt_builder.add_output::<f32>(\"out\\0put\").unwrap();\n\n}\n", "file_path": "menoh/src/variable_profile_table_builder/tests.rs", "rank": 7, "score": 51650.23093568905 }, { "content": "#[test]\n\n#[should_panic(expected=\"NulError\")]\n\nfn add_input_invalid_name() {\n\n let mut vpt_builder = VariableProfileTableBuilder::new().unwrap();\n\n vpt_builder\n\n .add_input::<f32>(\"in\\0put\", &[1, 3, 224, 224])\n\n .unwrap();\n\n}\n\n\n", "file_path": "menoh/src/variable_profile_table_builder/tests.rs", "rank": 8, "score": 51650.23093568905 }, { "content": "fn set_image<T>(buf: &mut [T], img: &image::DynamicImage)\n\n where T: From<u8>\n\n{\n\n let (h, w) = (img.height() as usize, img.width() as usize);\n\n assert_eq!(buf.len(), 3 * h * w);\n\n\n\n for c in 0..3 {\n\n for y in 0..h {\n\n for x in 0..w {\n\n // 3 - (c + 1): RGB -> BGR\n\n buf[(c * h + y) * w + x] = img.get_pixel(x as _, y as _).data[3 - (c + 1)].into();\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "menoh/examples/vgg16.rs", "rank": 9, "score": 51045.9225500892 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct Args {\n\n flag_i: path::PathBuf,\n\n flag_m: path::PathBuf,\n\n flag_s: path::PathBuf,\n\n}\n\n\n", "file_path": "menoh/examples/vgg16.rs", "rank": 10, "score": 43096.86583398383 }, { "content": "/// This trait makes it impossible to access internal handles from outside.\n\npub trait Handler {\n\n type Handle;\n\n unsafe fn from_handle(handle: Self::Handle) -> Self;\n\n unsafe fn handle(&self) -> Self::Handle;\n\n}\n", "file_path": "menoh/src/handler.rs", "rank": 11, "score": 39496.962665448475 }, { "content": "fn main() {\n\n match pkg_config::Config::new()\n\n .atleast_version(\"1.0\")\n\n .probe(\"menoh\") {\n\n Err(err) => {\n\n println!(\"cargo:warning=pkg-config failed: {}\", err);\n\n println!(\"cargo:rustc-link-lib=dylib=menoh\");\n\n }\n\n _ => (),\n\n }\n\n}\n", "file_path": "menoh-sys/build.rs", "rank": 12, "score": 37787.118867333804 }, { "content": "#[test]\n\nfn new() {\n\n VariableProfileTableBuilder::new().unwrap();\n\n}\n\n\n", "file_path": "menoh/src/variable_profile_table_builder/tests.rs", "rank": 13, "score": 34237.97017387612 }, { "content": "#[test]\n\nfn add_input() {\n\n let mut vpt_builder = VariableProfileTableBuilder::new().unwrap();\n\n vpt_builder\n\n .add_input::<f32>(\"input\", &[1, 3, 224, 224])\n\n .unwrap();\n\n}\n\n\n", "file_path": "menoh/src/variable_profile_table_builder/tests.rs", "rank": 14, "score": 33506.97851825798 }, { "content": "#[test]\n\nfn add_output() {\n\n let mut vpt_builder = VariableProfileTableBuilder::new().unwrap();\n\n vpt_builder.add_output::<f32>(\"output\").unwrap();\n\n}\n\n\n", "file_path": "menoh/src/variable_profile_table_builder/tests.rs", "rank": 15, "score": 33506.97851825798 }, { "content": " Error::InvalidDimsSize { name, size } => {\n\n write!(f,\n\n \"menoh invalid dims size error (2 or 4 is valid): dims size of {} is specified {}\",\n\n name,\n\n size)\n\n }\n\n Error::DtypeMismatch { actual, expected } => {\n\n write!(f,\n\n \"menoh dtype mismatch error: actural {}, expected {}\",\n\n actual,\n\n expected)\n\n }\n\n Error::NulError(err) => err.fmt(f),\n\n }\n\n }\n\n}\n\n\n\nimpl error::Error for Error {}\n\n\n", "file_path": "menoh/src/error.rs", "rank": 16, "score": 25969.464097348588 }, { "content": " UnsupportedOperator(String),\n\n FailedToConfigureOperator(String),\n\n BackendError(String),\n\n SameNamedVariableAlreadyExist(String),\n\n\n\n InvalidDimsSize {\n\n /// Name of the variable.\n\n name: String,\n\n /// Size of the specified dims.\n\n size: usize,\n\n },\n\n DtypeMismatch {\n\n /// Actual dtype.\n\n actual: menoh_sys::menoh_dtype,\n\n /// Requested dtype.\n\n expected: menoh_sys::menoh_dtype,\n\n },\n\n NulError(ffi::NulError),\n\n}\n\n\n", "file_path": "menoh/src/error.rs", "rank": 17, "score": 25967.161941405968 }, { "content": "use menoh_sys;\n\nuse std::error;\n\nuse std::ffi;\n\nuse std::fmt;\n\n\n\n#[derive(Debug)]\n\npub enum Error {\n\n StdError(String),\n\n UnknownError(String),\n\n InvalidFilename(String),\n\n UnsupportedOnnxOpsetVersion(String),\n\n OnnxParseError(String),\n\n InvalidDtype(String),\n\n InvalidAttributeType(String),\n\n UnsupportedOperatorAttribute(String),\n\n DimensionMismatch(String),\n\n VariableNotFound(String),\n\n IndexOutOfRange(String),\n\n JsonParseError(String),\n\n InvalidBackendName(String),\n", "file_path": "menoh/src/error.rs", "rank": 18, "score": 25964.98293308741 }, { "content": "impl fmt::Display for Error {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Error::StdError(message) => write!(f, \"{}\", message),\n\n Error::UnknownError(message) => write!(f, \"{}\", message),\n\n Error::InvalidFilename(message) => write!(f, \"{}\", message),\n\n Error::UnsupportedOnnxOpsetVersion(message) => write!(f, \"{}\", message),\n\n Error::OnnxParseError(message) => write!(f, \"{}\", message),\n\n Error::InvalidDtype(message) => write!(f, \"{}\", message),\n\n Error::InvalidAttributeType(message) => write!(f, \"{}\", message),\n\n Error::UnsupportedOperatorAttribute(message) => write!(f, \"{}\", message),\n\n Error::DimensionMismatch(message) => write!(f, \"{}\", message),\n\n Error::VariableNotFound(message) => write!(f, \"{}\", message),\n\n Error::IndexOutOfRange(message) => write!(f, \"{}\", message),\n\n Error::JsonParseError(message) => write!(f, \"{}\", message),\n\n Error::InvalidBackendName(message) => write!(f, \"{}\", message),\n\n Error::UnsupportedOperator(message) => write!(f, \"{}\", message),\n\n Error::FailedToConfigureOperator(message) => write!(f, \"{}\", message),\n\n Error::BackendError(message) => write!(f, \"{}\", message),\n\n Error::SameNamedVariableAlreadyExist(message) => write!(f, \"{}\", message),\n", "file_path": "menoh/src/error.rs", "rank": 19, "score": 25962.303307642047 }, { "content": " menoh_sys::menoh_error_code_invalid_dtype => Err(Error::InvalidDtype(message)),\n\n menoh_sys::menoh_error_code_invalid_attribute_type => {\n\n Err(Error::InvalidAttributeType(message))\n\n }\n\n menoh_sys::menoh_error_code_unsupported_operator_attribute => {\n\n Err(Error::UnsupportedOperatorAttribute(message))\n\n }\n\n menoh_sys::menoh_error_code_dimension_mismatch => {\n\n Err(Error::DimensionMismatch(message))\n\n }\n\n menoh_sys::menoh_error_code_variable_not_found => Err(Error::VariableNotFound(message)),\n\n menoh_sys::menoh_error_code_index_out_of_range => Err(Error::IndexOutOfRange(message)),\n\n menoh_sys::menoh_error_code_json_parse_error => Err(Error::JsonParseError(message)),\n\n menoh_sys::menoh_error_code_invalid_backend_name => {\n\n Err(Error::InvalidBackendName(message))\n\n }\n\n menoh_sys::menoh_error_code_unsupported_operator => {\n\n Err(Error::UnsupportedOperator(message))\n\n }\n\n menoh_sys::menoh_error_code_failed_to_configure_operator => {\n", "file_path": "menoh/src/error.rs", "rank": 20, "score": 25961.710807884032 }, { "content": " Err(Error::FailedToConfigureOperator(message))\n\n }\n\n menoh_sys::menoh_error_code_backend_error => Err(Error::BackendError(message)),\n\n menoh_sys::menoh_error_code_same_named_variable_already_exist => {\n\n Err(Error::SameNamedVariableAlreadyExist(message))\n\n }\n\n _ => unreachable!(),\n\n }\n\n }\n\n}\n\n\n\nimpl From<ffi::NulError> for Error {\n\n fn from(value: ffi::NulError) -> Self {\n\n Error::NulError(value)\n\n }\n\n}\n", "file_path": "menoh/src/error.rs", "rank": 21, "score": 25961.258532071763 }, { "content": "use menoh_sys;\n\n\n\nuse Error;\n\n\n\n/// Representation of scalar types supported by Menoh.\n\npub unsafe trait Dtype {\n\n /// Integer specifying the scalar type.\n\n ///\n\n /// ```\n\n /// # extern crate menoh;\n\n /// # extern crate menoh_sys;\n\n /// # use menoh::*;\n\n /// assert_eq!(f32::ID, menoh_sys::menoh_dtype_float as menoh_sys::menoh_dtype);\n\n /// ```\n\n const ID: menoh_sys::menoh_dtype;\n\n\n\n /// Verify a scalar type.\n\n ///\n\n /// ```\n\n /// # use menoh::*;\n", "file_path": "menoh/src/dtype/mod.rs", "rank": 22, "score": 25075.41661810557 }, { "content": " /// assert!(f32::check(f32::ID).is_ok());\n\n /// assert!(f32::check(f32::ID + 1).is_err());\n\n /// ```\n\n fn check(dtype: menoh_sys::menoh_dtype) -> Result<(), Error> {\n\n if dtype == Self::ID {\n\n Ok(())\n\n } else {\n\n Err(Error::DtypeMismatch {\n\n actual: dtype,\n\n expected: Self::ID,\n\n })\n\n }\n\n }\n\n}\n\n\n\nunsafe impl Dtype for f32 {\n\n const ID: menoh_sys::menoh_dtype = menoh_sys::menoh_dtype_float as _;\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests;\n", "file_path": "menoh/src/dtype/mod.rs", "rank": 23, "score": 25073.57447607243 }, { "content": "use menoh_sys;\n\n\n\nuse super::Dtype;\n\n\n\n#[test]\n", "file_path": "menoh/src/dtype/tests.rs", "rank": 24, "score": 25071.39794579911 }, { "content": "fn load_category_list<P>(path: P) -> io::Result<Vec<String>>\n\n where P: AsRef<path::Path>\n\n{\n\n let mut categories = Vec::new();\n\n for line in io::BufReader::new(fs::File::open(path)?).lines() {\n\n categories.push(line?);\n\n }\n\n Ok(categories)\n\n}\n", "file_path": "menoh/examples/vgg16.rs", "rank": 25, "score": 24181.25550879402 }, { "content": " -> Result<Model, Error> {\n\n let backend_name = ffi::CString::new(backend_name)?;\n\n let backend_config = ffi::CString::new(backend_config)?;\n\n let mut handle = ptr::null_mut();\n\n unsafe {\n\n check(menoh_sys::menoh_build_model(self.handle,\n\n model_data.handle(),\n\n backend_name.as_ptr(),\n\n backend_config.as_ptr(),\n\n &mut handle))?;\n\n Ok(Model::from_handle(handle))\n\n }\n\n }\n\n}\n\n\n\nimpl Drop for ModelBuilder {\n\n fn drop(&mut self) {\n\n unsafe { menoh_sys::menoh_delete_model_builder(self.handle) }\n\n }\n\n}\n", "file_path": "menoh/src/model_builder/mod.rs", "rank": 33, "score": 23157.32781706181 }, { "content": " /// # fn main() -> Result<(), Error> {\n\n /// let model_data = ModelData::from_onnx(\"MLP.onnx\")?;\n\n /// # Ok(())\n\n /// # }\n\n /// ```\n\n pub fn from_onnx<P>(path: P) -> Result<Self, Error>\n\n where P: AsRef<path::Path>\n\n {\n\n let path = ffi::CString::new::<&str>(&path.as_ref().to_string_lossy())?;\n\n let mut handle = ptr::null_mut();\n\n unsafe { check(menoh_sys::menoh_make_model_data_from_onnx(path.as_ptr(), &mut handle))? };\n\n Ok(Self { handle })\n\n }\n\n\n\n /// Remove unused data using a `VariableProfileTable`.\n\n ///\n\n /// ```\n\n /// use menoh::*;\n\n /// # fn main() -> Result<(), Error> {\n\n /// # let mut model_data = ModelData::from_onnx(\"MLP.onnx\")?;\n", "file_path": "menoh/src/model_data/mod.rs", "rank": 34, "score": 23156.452058885072 }, { "content": "use menoh_sys;\n\nuse std::ffi;\n\nuse std::ptr;\n\n\n\nuse Error;\n\nuse error::check;\n\nuse handler::Handler;\n\nuse Model;\n\nuse ModelData;\n\nuse VariableProfileTable;\n\n\n\n/// Builder for `Model`.\n\npub struct ModelBuilder {\n\n handle: menoh_sys::menoh_model_builder_handle,\n\n}\n\n\n\nimpl ModelBuilder {\n\n /// Create a builder using a `VariableProfileTable`.\n\n ///\n\n /// ```\n", "file_path": "menoh/src/model_builder/mod.rs", "rank": 35, "score": 23155.442158828377 }, { "content": "use menoh_sys;\n\nuse std::ffi;\n\nuse std::path;\n\nuse std::ptr;\n\n\n\nuse Error;\n\nuse handler::Handler;\n\nuse error::check;\n\nuse VariableProfileTable;\n\n\n\n/// Container of operators and values of constant variables.\n\npub struct ModelData {\n\n handle: menoh_sys::menoh_model_data_handle,\n\n}\n\n\n\nimpl ModelData {\n\n /// Load data from a ONNX file.\n\n ///\n\n /// ```\n\n /// # use menoh::*;\n", "file_path": "menoh/src/model_data/mod.rs", "rank": 36, "score": 23154.846180766453 }, { "content": " }\n\n unsafe fn handle(&self) -> Self::Handle {\n\n self.handle\n\n }\n\n}\n\n\n\nimpl Drop for ModelData {\n\n fn drop(&mut self) {\n\n unsafe { menoh_sys::menoh_delete_model_data(self.handle) }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests;\n", "file_path": "menoh/src/model_data/mod.rs", "rank": 37, "score": 23154.74065054995 }, { "content": " /// # let mut vpt_builder = VariableProfileTableBuilder::new()?;\n\n /// # vpt_builder.add_input::<f32>(\"input\", &[2, 3])?;\n\n /// # vpt_builder.add_output::<f32>(\"fc2\")?;\n\n /// # let vpt = vpt_builder.build(&model_data)?;\n\n /// model_data.optimize(&vpt)?;\n\n /// # Ok(())\n\n /// # }\n\n /// ```\n\n pub fn optimize(&mut self, variable_profile_table: &VariableProfileTable) -> Result<(), Error> {\n\n unsafe {\n\n check(menoh_sys::menoh_model_data_optimize(self.handle,\n\n variable_profile_table.handle()))\n\n }\n\n }\n\n}\n\n\n\nimpl Handler for ModelData {\n\n type Handle = menoh_sys::menoh_model_data_handle;\n\n unsafe fn from_handle(handle: Self::Handle) -> Self {\n\n Self { handle }\n", "file_path": "menoh/src/model_data/mod.rs", "rank": 38, "score": 23154.7040772179 }, { "content": " /// # use menoh::*;\n\n /// # fn main() -> Result<(), Error> {\n\n /// # let mut model_data = ModelData::from_onnx(\"MLP.onnx\")?;\n\n /// # let mut vpt_builder = VariableProfileTableBuilder::new()?;\n\n /// # vpt_builder.add_input::<f32>(\"input\", &[2, 3])?;\n\n /// # vpt_builder.add_output::<f32>(\"fc2\")?;\n\n /// # let vpt = vpt_builder.build(&model_data)?;\n\n /// let model_builder = ModelBuilder::new(&vpt)?;\n\n /// # Ok(())\n\n /// # }\n\n /// ```\n\n pub fn new(variable_profile_table: &VariableProfileTable) -> Result<Self, Error> {\n\n let mut handle = ptr::null_mut();\n\n unsafe {\n\n check(menoh_sys::menoh_make_model_builder(variable_profile_table.handle(),\n\n &mut handle))?;\n\n }\n\n Ok(Self { handle })\n\n }\n\n\n", "file_path": "menoh/src/model_builder/mod.rs", "rank": 39, "score": 23154.549850976826 }, { "content": " /// Build a `Model` from a `ModelData`.\n\n ///\n\n /// ```\n\n /// # use menoh::*;\n\n /// # fn main() -> Result<(), Error> {\n\n /// # let mut model_data = ModelData::from_onnx(\"MLP.onnx\")?;\n\n /// # let mut vpt_builder = VariableProfileTableBuilder::new()?;\n\n /// # vpt_builder.add_input::<f32>(\"input\", &[2, 3])?;\n\n /// # vpt_builder.add_output::<f32>(\"fc2\")?;\n\n /// # let vpt = vpt_builder.build(&model_data)?;\n\n /// # model_data.optimize(&vpt)?;\n\n /// # let model_builder = ModelBuilder::new(&vpt)?;\n\n /// let model = model_builder.build(model_data, \"mkldnn\", \"\")?;\n\n /// # Ok(())\n\n /// # }\n\n /// ```\n\n pub fn build(self,\n\n model_data: ModelData,\n\n backend_name: &str,\n\n backend_config: &str)\n", "file_path": "menoh/src/model_builder/mod.rs", "rank": 40, "score": 23153.5534877684 }, { "content": "use super::ModelData;\n\n\n\n#[test]\n\n#[should_panic(expected=\"InvalidFilename\")]\n", "file_path": "menoh/src/model_data/tests.rs", "rank": 41, "score": 23147.329296935754 }, { "content": "# [ repr ( C ) ]\n\npub struct menoh_model {\n\n _unused: [u8; 0],\n\n}\n\npub type menoh_model_handle = *mut menoh_model;\n\nextern \"C\" {\n\n pub fn menoh_build_model(builder: menoh_model_builder_handle, model_data: menoh_model_data_handle, backend_name: *const ::std::os::raw::c_char, backend_config: *const ::std::os::raw::c_char, dst_model_handle: *mut menoh_model_handle) -> menoh_error_code;\n\n}\n\nextern \"C\" {\n\n pub fn menoh_delete_model(model: menoh_model_handle);\n\n}\n\nextern \"C\" {\n\n pub fn menoh_model_get_variable_buffer_handle(model: menoh_model_handle, variable_name: *const ::std::os::raw::c_char, dst_data: *mut *mut ::std::os::raw::c_void) -> menoh_error_code;\n\n}\n\nextern \"C\" {\n\n pub fn menoh_model_get_variable_dtype(model: menoh_model_handle, variable_name: *const ::std::os::raw::c_char, dst_dtype: *mut menoh_dtype) -> menoh_error_code;\n\n}\n\nextern \"C\" {\n\n pub fn menoh_model_get_variable_dims_size(model: menoh_model_handle, variable_name: *const ::std::os::raw::c_char, dst_size: *mut i32) -> menoh_error_code;\n\n}\n\nextern \"C\" {\n\n pub fn menoh_model_get_variable_dims_at(model: menoh_model_handle, variable_name: *const ::std::os::raw::c_char, index: i32, dst_size: *mut i32) -> menoh_error_code;\n\n}\n\nextern \"C\" {\n\n pub fn menoh_model_run(model: menoh_model_handle) -> menoh_error_code;\n\n}\n", "file_path": "menoh-sys/src/lib.rs", "rank": 42, "score": 18.577701388486116 }, { "content": " /// # vpt_builder.add_input::<f32>(\"input\", &[2, 3])?;\n\n /// # vpt_builder.add_output::<f32>(\"fc2\")?;\n\n /// # let vpt = vpt_builder.build(&model_data)?;\n\n /// let dims = vpt.get_variable_dims(\"fc2\")?;\n\n /// # assert_eq!(dims, &[2, 5]);\n\n /// # Ok(())\n\n /// # }\n\n /// ```\n\n pub fn get_variable_dims(&self, name: &str) -> Result<Vec<usize>, Error> {\n\n let name = ffi::CString::new(name)?;\n\n unsafe {\n\n let mut size = 0;\n\n check(menoh_sys::menoh_variable_profile_table_get_dims_size(self.handle,\n\n name.as_ptr(),\n\n &mut size))?;\n\n let mut dims = Vec::with_capacity(size as _);\n\n for index in 0..size {\n\n let mut dim = 0;\n\n check(menoh_sys::menoh_variable_profile_table_get_dims_at(self.handle,\n\n name.as_ptr(),\n", "file_path": "menoh/src/variable_profile_table/mod.rs", "rank": 43, "score": 18.339176769830942 }, { "content": " /// # }\n\n /// ```\n\n pub fn add_input<T>(&mut self, name: &str, dims: &[usize]) -> Result<(), Error>\n\n where T: Dtype\n\n {\n\n let c_name = ffi::CString::new(name)?;\n\n match dims.len() {\n\n 2 => unsafe {\n\n check(menoh_sys::menoh_variable_profile_table_builder_add_input_profile_dims_2(\n\n self.handle, c_name.as_ptr(), T::ID,\n\n dims[0] as _, dims[1] as _))\n\n },\n\n 4 => unsafe {\n\n check(menoh_sys::menoh_variable_profile_table_builder_add_input_profile_dims_4(\n\n self.handle, c_name.as_ptr(), T::ID,\n\n dims[0] as _, dims[1] as _, dims[2] as _, dims[3] as _))\n\n },\n\n _ => {\n\n Err(Error::InvalidDimsSize {\n\n name: name.to_owned(),\n", "file_path": "menoh/src/variable_profile_table_builder/mod.rs", "rank": 44, "score": 17.118979381088774 }, { "content": "extern \"C\" {\n\n pub fn menoh_variable_profile_table_get_dims_at(variable_profile_table: menoh_variable_profile_table_handle, variable_name: *const ::std::os::raw::c_char, index: i32, dst_size: *mut i32) -> menoh_error_code;\n\n}\n\nextern \"C\" {\n\n pub fn menoh_model_data_optimize(model_data: menoh_model_data_handle, variable_profile_table: menoh_variable_profile_table_handle) -> menoh_error_code;\n\n}\n\n# [ repr ( C ) ]\n\npub struct menoh_model_builder {\n\n _unused: [u8; 0],\n\n}\n\npub type menoh_model_builder_handle = *mut menoh_model_builder;\n\nextern \"C\" {\n\n pub fn menoh_make_model_builder(variable_profile_table: menoh_variable_profile_table_handle, dst_handle: *mut menoh_model_builder_handle) -> menoh_error_code;\n\n}\n\nextern \"C\" {\n\n pub fn menoh_delete_model_builder(model_builder: menoh_model_builder_handle);\n\n}\n\nextern \"C\" {\n\n pub fn menoh_model_builder_attach_external_buffer(builder: menoh_model_builder_handle, variable_name: *const ::std::os::raw::c_char, buffer_handle: *mut ::std::os::raw::c_void) -> menoh_error_code;\n\n}\n", "file_path": "menoh-sys/src/lib.rs", "rank": 45, "score": 15.86634265812904 }, { "content": " size: dims.len(),\n\n })\n\n }\n\n }\n\n }\n\n\n\n /// Register a variable as output.\n\n ///\n\n /// ```\n\n /// use menoh::*;\n\n /// # fn main() -> Result<(), Error> {\n\n /// # let mut vpt_builder = VariableProfileTableBuilder::new()?;\n\n /// # vpt_builder.add_input::<f32>(\"input\", &[2, 3])?;\n\n /// vpt_builder.add_output::<f32>(\"fc2\")?;\n\n /// # Ok(())\n\n /// # }\n\n /// ```\n\n pub fn add_output<T>(&mut self, name: &str) -> Result<(), Error>\n\n where T: Dtype\n\n {\n", "file_path": "menoh/src/variable_profile_table_builder/mod.rs", "rank": 46, "score": 14.869249194265674 }, { "content": "extern \"C\" {\n\n pub fn menoh_variable_profile_table_builder_add_output_profile(builder: menoh_variable_profile_table_builder_handle, name: *const ::std::os::raw::c_char, dtype: menoh_dtype) -> menoh_error_code;\n\n}\n\n# [ repr ( C ) ]\n\npub struct menoh_variable_profile_table {\n\n _unused: [u8; 0],\n\n}\n\npub type menoh_variable_profile_table_handle = *mut menoh_variable_profile_table;\n\nextern \"C\" {\n\n pub fn menoh_build_variable_profile_table(builder: menoh_variable_profile_table_builder_handle, model_data: menoh_model_data_handle, dst_handle: *mut menoh_variable_profile_table_handle) -> menoh_error_code;\n\n}\n\nextern \"C\" {\n\n pub fn menoh_delete_variable_profile_table(variable_profile_table: menoh_variable_profile_table_handle);\n\n}\n\nextern \"C\" {\n\n pub fn menoh_variable_profile_table_get_dtype(variable_profile_table: menoh_variable_profile_table_handle, variable_name: *const ::std::os::raw::c_char, dst_dtype: *mut menoh_dtype) -> menoh_error_code;\n\n}\n\nextern \"C\" {\n\n pub fn menoh_variable_profile_table_get_dims_size(variable_profile_table: menoh_variable_profile_table_handle, variable_name: *const ::std::os::raw::c_char, dst_size: *mut i32) -> menoh_error_code;\n\n}\n", "file_path": "menoh-sys/src/lib.rs", "rank": 47, "score": 14.304223019886834 }, { "content": " index,\n\n &mut dim))?;\n\n dims.push(dim as _);\n\n }\n\n Ok(dims)\n\n }\n\n }\n\n}\n\n\n\nimpl Handler for VariableProfileTable {\n\n type Handle = menoh_sys::menoh_variable_profile_table_handle;\n\n unsafe fn from_handle(handle: Self::Handle) -> Self {\n\n Self { handle }\n\n }\n\n unsafe fn handle(&self) -> Self::Handle {\n\n self.handle\n\n }\n\n}\n\n\n\nimpl Drop for VariableProfileTable {\n\n fn drop(&mut self) {\n\n unsafe { menoh_sys::menoh_delete_variable_profile_table(self.handle) }\n\n }\n\n}\n", "file_path": "menoh/src/variable_profile_table/mod.rs", "rank": 48, "score": 14.233360471213395 }, { "content": " /// ```\n\n pub fn add_output<T>(mut self, name: &str) -> Result<Self, Error>\n\n where T: Dtype\n\n {\n\n self.vpt_builder.add_output::<T>(name)?;\n\n Ok(self)\n\n }\n\n\n\n /// Build a `Model`.\n\n ///\n\n /// ```\n\n /// # use menoh::*;\n\n /// # fn main() -> Result<(), Error> {\n\n /// # let builder = Builder::from_onnx(\"MLP.onnx\")?\n\n /// # .add_input::<f32>(\"input\", &[2, 3])?\n\n /// # .add_output::<f32>(\"fc2\")?;\n\n /// let model = builder.build(\"mkldnn\", \"\")?;\n\n /// # Ok(())\n\n /// # }\n\n /// ```\n\n pub fn build(mut self, backend: &str, backend_config: &str) -> Result<Model, Error> {\n\n let vpt = self.vpt_builder.build(&self.model_data)?;\n\n self.model_data.optimize(&vpt)?;\n\n let model_builder = ModelBuilder::new(&vpt)?;\n\n Ok(model_builder\n\n .build(self.model_data, backend, backend_config)?)\n\n }\n\n}\n", "file_path": "menoh/src/builder/mod.rs", "rank": 49, "score": 13.798599044408547 }, { "content": "extern \"C\" {\n\n pub fn menoh_delete_model_data(model_data: menoh_model_data_handle);\n\n}\n\n# [ repr ( C ) ]\n\npub struct menoh_variable_profile_table_builder {\n\n _unused: [u8; 0],\n\n}\n\npub type menoh_variable_profile_table_builder_handle = *mut menoh_variable_profile_table_builder;\n\nextern \"C\" {\n\n pub fn menoh_make_variable_profile_table_builder(dst_handle: *mut menoh_variable_profile_table_builder_handle) -> menoh_error_code;\n\n}\n\nextern \"C\" {\n\n pub fn menoh_delete_variable_profile_table_builder(builder: menoh_variable_profile_table_builder_handle);\n\n}\n\nextern \"C\" {\n\n pub fn menoh_variable_profile_table_builder_add_input_profile_dims_2(builder: menoh_variable_profile_table_builder_handle, name: *const ::std::os::raw::c_char, dtype: menoh_dtype, num: i32, size: i32) -> menoh_error_code;\n\n}\n\nextern \"C\" {\n\n pub fn menoh_variable_profile_table_builder_add_input_profile_dims_4(builder: menoh_variable_profile_table_builder_handle, name: *const ::std::os::raw::c_char, dtype: menoh_dtype, num: i32, channel: i32, height: i32, width: i32) -> menoh_error_code;\n\n}\n", "file_path": "menoh-sys/src/lib.rs", "rank": 50, "score": 13.636388197492968 }, { "content": " pub fn build(self, model_data: &ModelData) -> Result<VariableProfileTable, Error> {\n\n let mut handle = ptr::null_mut();\n\n unsafe {\n\n check(menoh_sys::menoh_build_variable_profile_table(self.handle,\n\n model_data.handle(),\n\n &mut handle))?;\n\n Ok(VariableProfileTable::from_handle(handle))\n\n }\n\n }\n\n}\n\n\n\nimpl Drop for VariableProfileTableBuilder {\n\n fn drop(&mut self) {\n\n unsafe { menoh_sys::menoh_delete_variable_profile_table_builder(self.handle) }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests;\n", "file_path": "menoh/src/variable_profile_table_builder/mod.rs", "rank": 51, "score": 13.259158102622873 }, { "content": " /// let builder = builder.add_input::<f32>(\"input\", &[2, 3])?;\n\n /// # Ok(())\n\n /// # }\n\n /// ```\n\n pub fn add_input<T>(mut self, name: &str, dims: &[usize]) -> Result<Self, Error>\n\n where T: Dtype\n\n {\n\n self.vpt_builder.add_input::<T>(name, dims)?;\n\n Ok(self)\n\n }\n\n\n\n /// Register a variable as output.\n\n ///\n\n /// ```\n\n /// # use menoh::*;\n\n /// # fn main() -> Result<(), Error> {\n\n /// # let builder = Builder::from_onnx(\"MLP.onnx\")?;\n\n /// let builder = builder.add_output::<f32>(\"fc2\")?;\n\n /// # Ok(())\n\n /// # }\n", "file_path": "menoh/src/builder/mod.rs", "rank": 52, "score": 13.148923209302065 }, { "content": "//! let (out_dims, out_buf) = model.get_variable::<f32>(\"fc2\")?;\n\n//! // use the data (e.g. print them).\n\n//! println!(\"{:?}\", out_buf);\n\n//! # Ok(())\n\n//! # }\n\n//! ```\n\n\n\nextern crate menoh_sys;\n\n\n\nmod builder;\n\nmod dtype;\n\nmod error;\n\nmod handler;\n\nmod model;\n\nmod model_builder;\n\nmod model_data;\n\nmod variable_profile_table;\n\nmod variable_profile_table_builder;\n\n\n\npub use builder::Builder;\n\npub use dtype::Dtype;\n\npub use error::Error;\n\npub use model::Model;\n\npub use model_builder::ModelBuilder;\n\npub use model_data::ModelData;\n\npub use variable_profile_table::VariableProfileTable;\n\npub use variable_profile_table_builder::VariableProfileTableBuilder;\n", "file_path": "menoh/src/lib.rs", "rank": 53, "score": 13.071033506188401 }, { "content": "use menoh_sys;\n\nuse std::ffi;\n\n\n\nuse Error;\n\nuse error::check;\n\nuse handler::Handler;\n\n\n\n/// Container of variable profiles (type, shape and flag of input/output).\n\npub struct VariableProfileTable {\n\n handle: menoh_sys::menoh_variable_profile_table_handle,\n\n}\n\n\n\nimpl VariableProfileTable {\n\n /// Fetch the shape of variable.\n\n ///\n\n /// ```\n\n /// # use menoh::*;\n\n /// # fn main() -> Result<(), Error> {\n\n /// # let mut model_data = ModelData::from_onnx(\"MLP.onnx\")?;\n\n /// # let mut vpt_builder = VariableProfileTableBuilder::new()?;\n", "file_path": "menoh/src/variable_profile_table/mod.rs", "rank": 54, "score": 12.77317320083921 }, { "content": "use menoh_sys;\n\nuse std::ffi;\n\nuse std::ptr;\n\n\n\nuse Dtype;\n\nuse Error;\n\nuse error::check;\n\nuse handler::Handler;\n\nuse ModelData;\n\nuse VariableProfileTable;\n\n\n\n/// Builder for `VariableProfileTable`.\n\npub struct VariableProfileTableBuilder {\n\n handle: menoh_sys::menoh_variable_profile_table_builder_handle,\n\n}\n\n\n\nimpl VariableProfileTableBuilder {\n\n /// Create a builder.\n\n ///\n\n /// ```\n", "file_path": "menoh/src/variable_profile_table_builder/mod.rs", "rank": 55, "score": 12.485775322990122 }, { "content": "pub const menoh_error_code_index_out_of_range: menoh_error_code_constant = 11;\n\npub const menoh_error_code_json_parse_error: menoh_error_code_constant = 12;\n\npub const menoh_error_code_invalid_backend_name: menoh_error_code_constant = 13;\n\npub const menoh_error_code_unsupported_operator: menoh_error_code_constant = 14;\n\npub const menoh_error_code_failed_to_configure_operator: menoh_error_code_constant = 15;\n\npub const menoh_error_code_backend_error: menoh_error_code_constant = 16;\n\npub const menoh_error_code_same_named_variable_already_exist: menoh_error_code_constant = 17;\n\npub type menoh_error_code_constant = u32;\n\npub type menoh_error_code = i32;\n\nextern \"C\" {\n\n pub fn menoh_get_last_error_message() -> *const ::std::os::raw::c_char;\n\n}\n\n# [ repr ( C ) ]\n\npub struct menoh_model_data {\n\n _unused: [u8; 0],\n\n}\n\npub type menoh_model_data_handle = *mut menoh_model_data;\n\nextern \"C\" {\n\n pub fn menoh_make_model_data_from_onnx(onnx_filename: *const ::std::os::raw::c_char, dst_handle: *mut menoh_model_data_handle) -> menoh_error_code;\n\n}\n", "file_path": "menoh-sys/src/lib.rs", "rank": 56, "score": 12.087458802921434 }, { "content": "use std::path;\n\n\n\nuse Dtype;\n\nuse Error;\n\nuse Model;\n\nuse ModelBuilder;\n\nuse ModelData;\n\nuse VariableProfileTableBuilder;\n\n\n\n/// Helper to build `Model`.\n\npub struct Builder {\n\n model_data: ModelData,\n\n vpt_builder: VariableProfileTableBuilder,\n\n}\n\n\n\nimpl Builder {\n\n /// Create a builder from a ONNX file.\n\n ///\n\n /// ```\n\n /// # use menoh::*;\n", "file_path": "menoh/src/builder/mod.rs", "rank": 57, "score": 11.889815826650112 }, { "content": " let name = ffi::CString::new(name)?;\n\n unsafe {\n\n check(menoh_sys::menoh_variable_profile_table_builder_add_output_profile(\n\n self.handle, name.as_ptr(), T::ID))\n\n }\n\n }\n\n\n\n /// Build a `VariableProfileTable` using a `ModelData`.\n\n ///\n\n /// ```\n\n /// use menoh::*;\n\n /// # fn main() -> Result<(), Error> {\n\n /// # let mut model_data = ModelData::from_onnx(\"MLP.onnx\")?;\n\n /// # let mut vpt_builder = VariableProfileTableBuilder::new()?;\n\n /// # vpt_builder.add_input::<f32>(\"input\", &[2, 3])?;\n\n /// # vpt_builder.add_output::<f32>(\"fc2\")?;\n\n /// let vpt = vpt_builder.build(&model_data)?;\n\n /// # Ok(())\n\n /// # }\n\n /// ```\n", "file_path": "menoh/src/variable_profile_table_builder/mod.rs", "rank": 58, "score": 11.338289245738135 }, { "content": " /// use menoh::*;\n\n /// # fn main() -> Result<(), Error> {\n\n /// let vpt_builder = VariableProfileTableBuilder::new()?;\n\n /// # Ok(())\n\n /// # }\n\n /// ```\n\n pub fn new() -> Result<Self, Error> {\n\n let mut handle = ptr::null_mut();\n\n unsafe { check(menoh_sys::menoh_make_variable_profile_table_builder(&mut handle))? };\n\n Ok(Self { handle })\n\n }\n\n\n\n /// Register a variable as input.\n\n ///\n\n /// ```\n\n /// use menoh::*;\n\n /// # fn main() -> Result<(), Error> {\n\n /// # let mut vpt_builder = VariableProfileTableBuilder::new()?;\n\n /// vpt_builder.add_input::<f32>(\"input\", &[2, 3])?;\n\n /// # Ok(())\n", "file_path": "menoh/src/variable_profile_table_builder/mod.rs", "rank": 59, "score": 11.0470553917308 }, { "content": " }\n\n model.run()?;\n\n\n\n let (_, fc6_buf) = model.get_variable::<f32>(FC6_OUT_NAME)?;\n\n println!(\"{:?}\", &fc6_buf[..10]);\n\n\n\n let (softmax_dims, softmax_buf) = model.get_variable::<f32>(SOFTMAX_OUT_NAME)?;\n\n let mut indices: Vec<_> = (0..softmax_dims[1]).collect();\n\n indices.sort_unstable_by(|&i, &j| {\n\n softmax_buf[j]\n\n .partial_cmp(&softmax_buf[i])\n\n .unwrap_or(cmp::Ordering::Equal)\n\n });\n\n let categories = load_category_list(args.flag_s)?;\n\n for &i in &indices[..5] {\n\n println!(\"{} {} {}\", i, softmax_buf[i], categories[i]);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "menoh/examples/vgg16.rs", "rank": 60, "score": 7.55515733181873 }, { "content": "#![allow(non_camel_case_types)]\n\n#![allow(non_snake_case)]\n\n#![allow(non_upper_case_globals)]\n\n\n\n/* automatically generated by rust-bindgen */\n\n\n\npub const menoh_dtype_float: menoh_dtype_constant = 0;\n\npub type menoh_dtype_constant = u32;\n\npub type menoh_dtype = i32;\n\npub const menoh_error_code_success: menoh_error_code_constant = 0;\n\npub const menoh_error_code_std_error: menoh_error_code_constant = 1;\n\npub const menoh_error_code_unknown_error: menoh_error_code_constant = 2;\n\npub const menoh_error_code_invalid_filename: menoh_error_code_constant = 3;\n\npub const menoh_error_code_unsupported_onnx_opset_version: menoh_error_code_constant = 4;\n\npub const menoh_error_code_onnx_parse_error: menoh_error_code_constant = 5;\n\npub const menoh_error_code_invalid_dtype: menoh_error_code_constant = 6;\n\npub const menoh_error_code_invalid_attribute_type: menoh_error_code_constant = 7;\n\npub const menoh_error_code_unsupported_operator_attribute: menoh_error_code_constant = 8;\n\npub const menoh_error_code_dimension_mismatch: menoh_error_code_constant = 9;\n\npub const menoh_error_code_variable_not_found: menoh_error_code_constant = 10;\n", "file_path": "menoh-sys/src/lib.rs", "rank": 61, "score": 7.4602047887615575 }, { "content": "//! Rust binding for [Menoh](https://github.com/pfnet-research/menoh)\n\n//!\n\n//! ## Example\n\n//!\n\n//! ```\n\n//! extern crate menoh;\n\n//!\n\n//! fn main() -> Result<(), menoh::Error> {\n\n//! let mut model = menoh::Builder::from_onnx(\"MLP.onnx\")?\n\n//! .add_input::<f32>(\"input\", &[2, 3])?\n\n//! .add_output::<f32>(\"fc2\")?\n\n//! .build(\"mkldnn\", \"\")?;\n\n//!\n\n//! {\n\n//! let (in_dims, in_buf) = model.get_variable_mut::<f32>(\"input\")?;\n\n//! in_buf.copy_from_slice(&[0., 1., 2., 3., 4., 5.]);\n\n//! println!(\"in:\");\n\n//! # assert_eq!(in_dims, &[2, 3]);\n\n//! println!(\" dims: {:?}\", in_dims);\n\n//! println!(\" buf: {:?}\", in_buf);\n", "file_path": "menoh/src/lib.rs", "rank": 62, "score": 7.445403056021183 }, { "content": "//! // fetch a read/write view of a variable.\n\n//! let (in_dims, in_buf) = model.get_variable_mut::<f32>(\"input\")?;\n\n//! // set data to the variable.\n\n//! in_buf.copy_from_slice(&[0., 1., 2., 3., 4., 5.]);\n\n//! # Ok(())\n\n//! # }\n\n//! ```\n\n//! Note: The lifetime of views has to end before executing `Model::run`.\n\n//! Blocks will be required to limit the lifetime.\n\n//! ```compile_fail\n\n//! // NG: `in_buf` lives after `model.run()`.\n\n//! # fn main() -> Result<(), menoh::Error> {\n\n//! # let mut model = menoh::Builder::from_onnx(\"MLP.onnx\")?\n\n//! # .add_input::<f32>(\"input\", &[2, 3])?\n\n//! # .add_output::<f32>(\"fc2\")?\n\n//! # .build(\"mkldnn\", \"\")?;\n\n//! let (in_dims, in_buf) = model.get_variable_mut::<f32>(\"input\")?;\n\n//! in_buf.copy_from_slice(&[0., 1., 2., 3., 4., 5.]);\n\n//! model.run()?;\n\n//! # Ok(())\n", "file_path": "menoh/src/lib.rs", "rank": 63, "score": 7.253870858720207 }, { "content": "//! # }\n\n//! ```\n\n//! ```\n\n//! // OK: the lifetime of `in_buf` is limited by a block.\n\n//! # fn main() -> Result<(), menoh::Error> {\n\n//! # let mut model = menoh::Builder::from_onnx(\"MLP.onnx\")?\n\n//! # .add_input::<f32>(\"input\", &[2, 3])?\n\n//! # .add_output::<f32>(\"fc2\")?\n\n//! # .build(\"mkldnn\", \"\")?;\n\n//! {\n\n//! let (in_dims, in_buf) = model.get_variable_mut::<f32>(\"input\")?;\n\n//! in_buf.copy_from_slice(&[0., 1., 2., 3., 4., 5.]);\n\n//! }\n\n//! model.run()?;\n\n//! # Ok(())\n\n//! # }\n\n//! ```\n\n//!\n\n//! ### 3. Execute computation.\n\n//!\n", "file_path": "menoh/src/lib.rs", "rank": 64, "score": 7.228033687978139 }, { "content": "# menoh-rs\n\n\n\n[![crates.io](https://img.shields.io/crates/v/menoh.svg)](https://crates.io/crates/menoh)\n\n[![docs.rs](https://docs.rs/menoh/badge.svg)](https://docs.rs/menoh)\n\n[![Travis CI](https://travis-ci.org/pfnet-research/menoh-rs.svg?branch=master)](https://travis-ci.org/pfnet-research/menoh-rs)\n\n[![AppVeyor](https://ci.appveyor.com/api/projects/status/y33xqwwlhtarirwd/branch/master?svg=true)](https://ci.appveyor.com/project/pfnet-research/menoh-rs/branch/master)\n\n\n\nRust binding for [Menoh](https://github.com/pfnet-research/menoh) \n\n[Documentation](https://docs.rs/menoh)\n\n\n\n## Requirements\n\n- Rust 1.27+\n\n- [Menoh](https://github.com/pfnet-research/menoh) 1.0+\n\n (please make sure that `pkg-config` can find `menoh`)\n\n\n\n## Demo\n\n\n\n```\n\n$ git clone https://github.com/pfnet-research/menoh-rs.git\n\n$ cd menoh-rs/menoh\n\n\n\n$ curl -L https://www.dropbox.com/s/bjfn9kehukpbmcm/VGG16.onnx?dl=1 -o VGG16.onnx\n\n$ curl -LO https://raw.githubusercontent.com/HoldenCaulfieldRye/caffe/master/data/ilsvrc12/synset_words.txt\n\n$ curl -LO https://upload.wikimedia.org/wikipedia/commons/5/54/Light_sussex_hen.jpg\n\n\n\n$ cargo run --example vgg16 # use Light_sussex_hen.jpg\n\n$ cargo run --example vgg16 -- --image <image> # use your image\n\n```\n\n\n\n## Example\n\n\n\n```rust\n\nextern crate menoh;\n\n\n\nfn main() -> Result<(), menoh::Error> {\n\n let mut model = menoh::Builder::from_onnx(\"MLP.onnx\")?\n\n .add_input::<f32>(\"input\", &[2, 3])?\n\n .add_output::<f32>(\"fc2\")?\n\n .build(\"mkldnn\", \"\")?;\n\n\n\n {\n\n let (in_dims, in_buf) = model.get_variable_mut::<f32>(\"input\")?;\n\n in_buf.copy_from_slice(&[0., 1., 2., 3., 4., 5.]);\n\n println!(\"in:\");\n\n println!(\" dims: {:?}\", in_dims);\n\n println!(\" buf: {:?}\", in_buf);\n\n }\n\n\n\n model.run()?;\n\n\n\n let (out_dims, out_buf) = model.get_variable::<f32>(\"fc2\")?;\n\n println!(\"out:\");\n\n println!(\" dims: {:?}\", out_dims);\n\n println!(\" buf: {:?}\", out_buf);\n\n Ok(())\n\n}\n\n```\n", "file_path": "README.md", "rank": 65, "score": 6.971919350761685 }, { "content": "//!\n\n//! ```\n\n//! # fn main() -> Result<(), menoh::Error> {\n\n//! let mut model = menoh::Builder::from_onnx(\"MLP.onnx\")?\n\n//! // register `\"input\"` as input\n\n//! // and specify its type (`f32`) and shape (`&[2, 3]`).\n\n//! .add_input::<f32>(\"input\", &[2, 3])?\n\n//! // register `\"fc2\"` as output\n\n//! // and specify its type (`f32`).\n\n//! .add_output::<f32>(\"fc2\")?\n\n//! // specify backend (`\"mkldnn\"`) and its configuration (`\"\"`).\n\n//! .build(\"mkldnn\", \"\")?;\n\n//! # Ok(())\n\n//! # }\n\n//! ```\n\n//! Instead of `Builder`, we can use a combination of some low-level APIs.\n\n//! ```\n\n//! # fn main() -> Result<(), menoh::Error> {\n\n//! let mut model_data = menoh::ModelData::from_onnx(\"MLP.onnx\")?;\n\n//!\n", "file_path": "menoh/src/lib.rs", "rank": 66, "score": 6.8558647137710365 }, { "content": " /// # fn main() -> Result<(), Error> {\n\n /// let builder = Builder::from_onnx(\"MLP.onnx\")?;\n\n /// # Ok(())\n\n /// # }\n\n /// ```\n\n pub fn from_onnx<P>(path: P) -> Result<Self, Error>\n\n where P: AsRef<path::Path>\n\n {\n\n Ok(Self {\n\n model_data: ModelData::from_onnx(path)?,\n\n vpt_builder: VariableProfileTableBuilder::new()?,\n\n })\n\n }\n\n\n\n /// Register a variable as input.\n\n ///\n\n /// ```\n\n /// # use menoh::*;\n\n /// # fn main() -> Result<(), Error> {\n\n /// # let builder = Builder::from_onnx(\"MLP.onnx\")?;\n", "file_path": "menoh/src/builder/mod.rs", "rank": 67, "score": 6.702966476667427 }, { "content": "extern crate docopt;\n\nextern crate image;\n\nextern crate menoh;\n\n#[macro_use]\n\nextern crate serde_derive;\n\n\n\nuse image::GenericImage;\n\nuse std::cmp;\n\nuse std::error;\n\nuse std::fs;\n\nuse std::io;\n\nuse std::io::BufRead;\n\nuse std::path;\n\n\n\nconst USAGE: &'static str = r#\"\n\nVGG16 example\n\n\n\nUsage: vgg16 [options]\n\n\n\nOptions:\n\n -i --image PATH input image path [default: Light_sussex_hen.jpg]\n\n -m --model PATH onnx model path [default: VGG16.onnx]\n\n -s --synset-words PATH synset words path [default: synset_words.txt]\n\n\"#;\n\n\n\n#[derive(Debug, Deserialize)]\n", "file_path": "menoh/examples/vgg16.rs", "rank": 68, "score": 6.525264939948496 }, { "content": "//! ```\n\n//! # fn main() -> Result<(), menoh::Error> {\n\n//! # let mut model = menoh::Builder::from_onnx(\"MLP.onnx\")?\n\n//! # .add_input::<f32>(\"input\", &[2, 3])?\n\n//! # .add_output::<f32>(\"fc2\")?\n\n//! # .build(\"mkldnn\", \"\")?;\n\n//! model.run()?;\n\n//! # Ok(())\n\n//! # }\n\n//! ```\n\n//!\n\n//! ### 4. Fetch the result(s).\n\n//!\n\n//! ```\n\n//! # fn main() -> Result<(), menoh::Error> {\n\n//! # let mut model = menoh::Builder::from_onnx(\"MLP.onnx\")?\n\n//! # .add_input::<f32>(\"input\", &[2, 3])?\n\n//! # .add_output::<f32>(\"fc2\")?\n\n//! # .build(\"mkldnn\", \"\")?;\n\n//! // fetch a read-only view of a variable.\n", "file_path": "menoh/src/lib.rs", "rank": 69, "score": 5.791927592111911 }, { "content": "//! let mut vpt_builder = menoh::VariableProfileTableBuilder::new()?;\n\n//! vpt_builder.add_input::<f32>(\"input\", &[2, 3])?;\n\n//! vpt_builder.add_output::<f32>(\"fc2\")?;\n\n//! let vpt = vpt_builder.build(&model_data)?;\n\n//!\n\n//! model_data.optimize(&vpt)?;\n\n//! let model_builder = menoh::ModelBuilder::new(&vpt)?;\n\n//! let mut model = model_builder.build(model_data, \"mkldnn\", \"\")?;\n\n//! # Ok(())\n\n//! # }\n\n//! ```\n\n//!\n\n//! ### 2. Set data to input variable(s).\n\n//!\n\n//! ```\n\n//! # fn main() -> Result<(), menoh::Error> {\n\n//! # let mut model = menoh::Builder::from_onnx(\"MLP.onnx\")?\n\n//! # .add_input::<f32>(\"input\", &[2, 3])?\n\n//! # .add_output::<f32>(\"fc2\")?\n\n//! # .build(\"mkldnn\", \"\")?;\n", "file_path": "menoh/src/lib.rs", "rank": 70, "score": 5.66506085689941 }, { "content": "//! }\n\n//!\n\n//! model.run()?;\n\n//!\n\n//! let (out_dims, out_buf) = model.get_variable::<f32>(\"fc2\")?;\n\n//! println!(\"out:\");\n\n//! # assert_eq!(out_dims, &[2, 5]);\n\n//! println!(\" dims: {:?}\", out_dims);\n\n//! println!(\" buf: {:?}\", out_buf);\n\n//! # let expected = &[0., 0., 15., 96., 177., 0., 0., 51., 312., 573.];\n\n//! # for i in 0..10 {\n\n//! # assert!((out_buf[i] - expected[i]).abs() < 1e-6);\n\n//! # }\n\n//! Ok(())\n\n//! }\n\n//! ```\n\n//!\n\n//! ## Usage\n\n//!\n\n//! ### 1. Build a `Model`.\n", "file_path": "menoh/src/lib.rs", "rank": 71, "score": 5.574164464320669 }, { "content": "/// This trait makes it impossible to access internal handles from outside.\n", "file_path": "menoh/src/handler.rs", "rank": 72, "score": 2.5743384878227173 }, { "content": "use super::VariableProfileTableBuilder;\n\n\n\n#[test]\n", "file_path": "menoh/src/variable_profile_table_builder/tests.rs", "rank": 73, "score": 2.5433137984380574 } ]
Rust
src/component/transformed.rs
DaseinPhaos/arendur
5c3b6c4dffd969131ebf37a3f8eb9b6a3cf7f5c6
use geometry::prelude::*; use super::*; use std::sync::Arc; use spectrum::*; use renderer::scene::Scene; use lighting::{LightFlag, LightSample, SampleInfo, PathInfo}; #[derive(Clone, Debug)] pub struct TransformedComposable<T> { inner: T, local_parent: Arc<Matrix4f>, parent_local: Arc<Matrix4f>, } impl<T> TransformedComposable<T> { pub fn new(inner: T, local_parent: Arc<Matrix4f>, parent_local: Arc<Matrix4f>) -> Self { #[cfg(debug)] { assert_relative_eq(*local_parent *(*parent_local), Matrix4f::identity()); } TransformedComposable{ inner: inner, local_parent: local_parent, parent_local: parent_local, } } } impl<T: Composable> Composable for TransformedComposable<T> { #[inline] fn bbox_parent(&self) -> BBox3f { self.inner.bbox_parent().apply_transform(&*self.local_parent) } #[inline] fn intersection_cost(&self) -> Float { 1.0 as Float + self.inner.intersection_cost() } #[inline] default fn intersect_ray(&self, ray: &mut RawRay) -> Option<SurfaceInteraction> { *ray = ray.apply_transform(&*self.parent_local); let mut ret = self.inner.intersect_ray(ray); if let Some(ret) = ret.as_mut() { *ret = ret.apply_transform(&*self.local_parent); } *ray = ray.apply_transform(&*self.local_parent); ret } #[inline] default fn as_light(&self) -> &Light { unimplemented!(); } } impl<T: Primitive> Composable for TransformedComposable<T> { #[inline] fn intersect_ray(&self, ray: &mut RawRay) -> Option<SurfaceInteraction> { *ray = ray.apply_transform(&*self.parent_local); let mut ret = self.inner.intersect_ray(ray); if let Some(ret) = ret.as_mut() { *ret = ret.apply_transform(&*self.local_parent); ret.primitive_hit = Some(self); } *ray = ray.apply_transform(&*self.local_parent); ret } #[inline] fn as_light(&self) -> &Light { self } } impl<T: Primitive> Primitive for TransformedComposable<T> { #[inline] fn is_emissive(&self) -> bool { self.inner.is_emissive() } #[inline] fn get_material(&self) -> &Material { self.inner.get_material() } } impl<T: Primitive> Light for TransformedComposable<T> { fn flags(&self) -> LightFlag { self.inner.flags() } #[inline] fn evaluate_ray(&self, rd: &RayDifferential) -> RGBSpectrumf { let rd = rd.apply_transform(&self.parent_local); self.inner.evaluate_ray(&rd) } #[inline] fn evaluate_path(&self, pos: Point3f, dir: Vector3f) -> RGBSpectrumf { let pos = self.parent_local.transform_point(pos); let dir = self.parent_local.transform_vector(dir); self.inner.evaluate_path(pos, dir) } #[inline] fn evaluate_sampled(&self, pos: Point3f, sample: Point2f) -> LightSample { let pos = self.parent_local.transform_point(pos); let ls = self.inner.evaluate_sampled(pos, sample); ls.apply_transform(&*self.local_parent) } #[inline] fn generate_path(&self, samples: SampleInfo) -> PathInfo { self.inner.generate_path(samples).apply_transform(&*self.local_parent) } #[inline] fn pdf_path(&self, pos: Point3f, dir: Vector3f, norm: Vector3f) -> (Float, Float) { let pos = self.parent_local.transform_point(pos); let dir = self.parent_local.transform_vector(dir); let norm = self.parent_local.transform_norm(norm); self.inner.pdf_path(pos, dir, norm) } #[inline] fn pdf(&self, pos: Point3f, wi: Vector3f) -> Float { let pos = self.parent_local.transform_point(pos); let wi = self.parent_local.transform_vector(wi); self.inner.pdf(pos, wi) } #[inline] fn power(&self) -> RGBSpectrumf { self.inner.power() } #[inline] fn preprocess(&mut self, s: &Scene) { self.inner.preprocess(s); } } impl<T: Composable> Composable for TransformedComposable<Arc<T>> { #[inline] fn bbox_parent(&self) -> BBox3f { self.inner.bbox_parent().apply_transform(&*self.local_parent) } #[inline] fn intersection_cost(&self) -> Float { 2.0 as Float + self.inner.intersection_cost() } #[inline] default fn intersect_ray(&self, ray: &mut RawRay) -> Option<SurfaceInteraction> { *ray = ray.apply_transform(&*self.parent_local); let mut ret = self.inner.intersect_ray(ray); if let Some(ret) = ret.as_mut() { *ret = ret.apply_transform(&*self.local_parent); } *ray = ray.apply_transform(&*self.local_parent); ret } #[inline] default fn as_light(&self) -> &Light { unimplemented!(); } } impl<T: Primitive> Composable for TransformedComposable<Arc<T>> { #[inline] fn intersect_ray(&self, ray: &mut RawRay) -> Option<SurfaceInteraction> { *ray = ray.apply_transform(&*self.parent_local); let mut ret = self.inner.intersect_ray(ray); if let Some(ret) = ret.as_mut() { *ret = ret.apply_transform(&*self.local_parent); ret.primitive_hit = Some(self); } *ray = ray.apply_transform(&*self.local_parent); ret } #[inline] fn as_light(&self) -> &Light { self } } impl<T: Primitive> Primitive for TransformedComposable<Arc<T>> { #[inline] fn is_emissive(&self) -> bool { self.inner.is_emissive() } #[inline] fn get_material(&self) -> &Material { self.inner.get_material() } } impl<T: Primitive> Light for TransformedComposable<Arc<T>> { fn flags(&self) -> LightFlag { self.inner.flags() } #[inline] fn evaluate_ray(&self, rd: &RayDifferential) -> RGBSpectrumf { let rd = rd.apply_transform(&self.parent_local); self.inner.evaluate_ray(&rd) } #[inline] fn evaluate_path(&self, pos: Point3f, dir: Vector3f) -> RGBSpectrumf { let pos = self.parent_local.transform_point(pos); let dir = self.parent_local.transform_vector(dir); self.inner.evaluate_path(pos, dir) } #[inline] fn evaluate_sampled(&self, pos: Point3f, sample: Point2f) -> LightSample { let pos = self.parent_local.transform_point(pos); let ls = self.inner.evaluate_sampled(pos, sample); ls.apply_transform(&*self.local_parent) } #[inline] fn generate_path(&self, samples: SampleInfo) -> PathInfo { self.inner.generate_path(samples).apply_transform(&*self.local_parent) } #[inline] fn pdf_path(&self, pos: Point3f, dir: Vector3f, norm: Vector3f) -> (Float, Float) { let pos = self.parent_local.transform_point(pos); let dir = self.parent_local.transform_vector(dir); let norm = self.parent_local.transform_norm(norm); self.inner.pdf_path(pos, dir, norm) } #[inline] fn pdf(&self, pos: Point3f, wi: Vector3f) -> Float { let pos = self.parent_local.transform_point(pos); let wi = self.parent_local.transform_vector(wi); self.inner.pdf(pos, wi) } #[inline] fn power(&self) -> RGBSpectrumf { self.inner.power() } } impl Composable for TransformedComposable<Arc<Composable>> { #[inline] fn bbox_parent(&self) -> BBox3f { self.inner.bbox_parent().apply_transform(&*self.local_parent) } #[inline] fn intersection_cost(&self) -> Float { 2.0 as Float + self.inner.intersection_cost() } #[inline] fn intersect_ray(&self, ray: &mut RawRay) -> Option<SurfaceInteraction> { *ray = ray.apply_transform(&*self.parent_local); let mut ret = self.inner.intersect_ray(ray); if let Some(ret) = ret.as_mut() { *ret = ret.apply_transform(&*self.local_parent); } *ray = ray.apply_transform(&*self.local_parent); ret } #[inline] fn as_light(&self) -> &Light { unimplemented!(); } } impl Composable for TransformedComposable<Arc<Primitive>> { #[inline] fn bbox_parent(&self) -> BBox3f { self.inner.bbox_parent().apply_transform(&*self.local_parent) } #[inline] fn intersection_cost(&self) -> Float { 2.0 as Float + self.inner.intersection_cost() } #[inline] fn intersect_ray(&self, ray: &mut RawRay) -> Option<SurfaceInteraction> { *ray = ray.apply_transform(&*self.parent_local); let mut ret = self.inner.intersect_ray(ray); if let Some(ret) = ret.as_mut() { *ret = ret.apply_transform(&*self.local_parent); ret.primitive_hit = Some(self); } *ray = ray.apply_transform(&*self.local_parent); ret } #[inline] fn as_light(&self) -> &Light { self } } impl Primitive for TransformedComposable<Arc<Primitive>> { #[inline] fn is_emissive(&self) -> bool { self.inner.is_emissive() } #[inline] fn get_material(&self) -> &Material { self.inner.get_material() } } impl Light for TransformedComposable<Arc<Primitive>> { fn flags(&self) -> LightFlag { self.inner.flags() } #[inline] fn evaluate_ray(&self, rd: &RayDifferential) -> RGBSpectrumf { let rd = rd.apply_transform(&self.parent_local); self.inner.evaluate_ray(&rd) } #[inline] fn evaluate_path(&self, pos: Point3f, dir: Vector3f) -> RGBSpectrumf { let pos = self.parent_local.transform_point(pos); let dir = self.parent_local.transform_vector(dir); self.inner.evaluate_path(pos, dir) } #[inline] fn evaluate_sampled(&self, pos: Point3f, sample: Point2f) -> LightSample { let pos = self.parent_local.transform_point(pos); let ls = self.inner.evaluate_sampled(pos, sample); ls.apply_transform(&*self.local_parent) } #[inline] fn generate_path(&self, samples: SampleInfo) -> PathInfo { self.inner.generate_path(samples).apply_transform(&*self.local_parent) } #[inline] fn pdf_path(&self, pos: Point3f, dir: Vector3f, norm: Vector3f) -> (Float, Float) { let pos = self.parent_local.transform_point(pos); let dir = self.parent_local.transform_vector(dir); let norm = self.parent_local.transform_norm(norm); self.inner.pdf_path(pos, dir, norm) } #[inline] fn pdf(&self, pos: Point3f, wi: Vector3f) -> Float { let pos = self.parent_local.transform_point(pos); let wi = self.parent_local.transform_vector(wi); self.inner.pdf(pos, wi) } #[inline] fn power(&self) -> RGBSpectrumf { self.inner.power() } }
use geometry::prelude::*; use super::*; use std::sync::Arc; use spectrum::*; use renderer::scene::Scene; use lighting::{LightFlag, LightSample, SampleInfo, PathInfo}; #[derive(Clone, Debug)] pub struct TransformedComposable<T> { inner: T, local_parent: Arc<Matrix4f>, parent_local: Arc<Matrix4f>, } impl<T> TransformedComposable<T> { pub fn new(inner: T, local_parent: Arc<Matrix4f>, parent_local: Arc<Matrix4f>) -> Self { #[cfg(debug)] { assert_relative_eq(*local_parent *(*parent_local), Matrix4f::identity()); } TransformedComposable{ inner: inner, local_parent: local_parent, parent_local: parent_local, } } } impl<T: Composable> Composable for TransformedComposable<T> { #[inline] fn bbox_parent(&self) -> BBox3f { self.inner.bbox_parent().apply_transform(&*self.local_parent) } #[inline] fn intersection_cost(&self) -> Float { 1.0 as Float + self.inner.intersection_cost() } #[inline] default fn intersect_ray(&self, ray: &mut RawRay) -> Option<SurfaceInteraction> { *ray = ray.apply_transform(&*self.parent_local); let mut ret = self.inner.intersect_ray(ray); if let Some(ret) = ret.as_mut() { *ret = ret.apply_transform(&*self.local_parent); } *ray = ray.apply_transform(&*self.local_parent); ret } #[inline] default fn as_light(&self) -> &Light { unimplemented!(); } } impl<T: Primitive> Composable for TransformedComposable<T> { #[inline] fn intersect_ray(&self, ray: &mut RawRay) -> Option<SurfaceInteraction> { *ray = ray.apply_transform(&*self.parent_local); let mut ret = self.inner.intersect_ray(ray); if let Some(ret) = ret.as_mut() { *ret = ret.apply_transform(&*self.local_parent); ret.primitive_hit = Some(self); } *ray = ray.apply_transform(&*self.local_parent); ret } #[inline] fn as_light(&self) -> &Light { self } } impl<T: Primitive> Primitive for TransformedComposable<T> { #[inline] fn is_emissive(&self) -> bool { self.inner.is_emissive() } #[inline] fn get_material(&self) -> &Material { self.inner.get_material() } } impl<T: Primitive> Light for TransformedComposable<T> { fn flags(&self) -> LightFlag { self.inner.flags() } #[inline] fn evaluate_ray(&self, rd: &RayDifferential) -> RGBSpectrumf { let rd = rd.apply_transform(&self.parent_local); self.inner.evaluate_ray(&rd) } #[inline] fn evaluate_path(&self, pos: Point3f, dir: Vector3f) -> RGBSpectrumf { let pos = self.parent_local.transform_point(pos); let dir = self.parent_local.transform_vector(dir); self.inner.evaluate_path(pos, dir) } #[inline] fn evaluate_sampled(&self, pos: Point3f, sample: Point2f) -> LightSample { let pos = self.parent_local.transform_point(pos); let ls = self.inner.evaluate_sampled(pos, sample); ls.apply_transform(&*self.local_parent) } #[inline] fn generate_path(&self, samples: SampleInfo) -> PathInfo { self.inner.generate_path(samples).apply_transform(&*self.local_parent) } #[inline] fn pdf_path(&self, pos: Point3f, dir: Vector3f, norm: Vector3f) -> (Float, Float) { let pos = self.parent_local.transform_point(pos); let dir = self.parent_local.transform_vector(dir); let norm = self.parent_local.transform_norm(norm); self.inner.pdf_path(pos, dir, norm) } #[inline] fn pdf(&self, pos: Point3f, wi: Vector3f) -> Float { let pos = self.parent_local.transform_point(pos); let wi = self.parent_local.transform_vector(wi); self.inner.pdf(pos, wi) } #[inline] fn power(&self) -> RGBSpectrumf { self.inner.power() } #[inline] fn preprocess(&mut self, s: &Scene) { self.inner.preprocess(s); } } impl<T: Composable> Composable for TransformedComposable<Arc<T>> { #[inline] fn bbox_parent(&self) -> BBox3f { self.inner.bbox_parent().apply_transform(&*self.local_parent) } #[inline] fn intersection_cost(&self) -> Float { 2.0 as Float + self.inner.intersection_cost() } #[inline] default fn intersect_ray(&self, ray: &mut RawRay) -> Option<SurfaceInteraction> { *ray = ray.apply_transform(&*self.parent_local); let mut ret = self.inner.intersect_ray(ray); if let Some(ret) = ret.as_mut() { *ret = ret.apply_transform(&*self.local_parent); } *ray = ray.apply_transform(&*self.local_parent); ret } #[inline] default fn as_light(&self) -> &Light { unimplemented!(); } } impl<T: Primitive> Composable for TransformedComposable<Arc<T>> { #[inline] fn intersect_ray(&self, ray: &mut RawRay) -> Option<SurfaceInteraction> { *ray = ray.apply_transform(&*self.parent_local); let mut ret = self.inner.intersect_ray(ray); if let Some(ret) = ret.as_mut() { *ret = ret.apply_transform(&*self.local_parent); ret.primitive_hit = Some(self); } *ray = ray.apply_transform(&*self.local_parent); ret } #[inline] fn as_light(&self) -> &Light { self } } impl<T: Primitive> Primitive for TransformedComposable<Arc<T>> { #[inline] fn is_emissive(&self) -> bool { self.inner.is_emissive() } #[inline] fn get_material(&self) -> &Material { self.inner.get_material() } } impl<T: Primitive> Light for TransformedComposable<Arc<T>> { fn flags(&self) -> LightFlag { self.inner.flags() } #[inline] fn evaluate_ray(&self, rd: &RayDifferential) -> RGBSpectrumf { let rd = rd.apply_transform(&self.parent_local); self.inner.evaluate_ray(&rd) } #[inline] fn evaluate_path(&self, pos: Point3f, dir: Vector3f) -> RGBSpectrumf { let pos = self.parent_local.transform_point(pos); let dir = self.parent_local.transform_vector(dir); self.inner.evaluate_path(pos, dir) } #[inline] fn evaluate_sampled(&self, pos: Point3f, sample: Point2f) -> LightSample { let pos = self.parent_local.transform_point(pos); let ls = self.inner.evaluate_sampled(pos, sample); ls.apply_transform(&*self.local_parent) } #[inline] fn generate_path(&self, samples: SampleInfo) -> PathInfo { self.inner.generate_path(samples).apply_transform(&*self.local_parent) } #[inline] fn pdf_path(&self, pos: Point3f, dir: Vector3f, norm: Vector3f) -> (Float, Float) { let pos = self.parent_local.transform_point(po
#[inline] fn pdf(&self, pos: Point3f, wi: Vector3f) -> Float { let pos = self.parent_local.transform_point(pos); let wi = self.parent_local.transform_vector(wi); self.inner.pdf(pos, wi) } #[inline] fn power(&self) -> RGBSpectrumf { self.inner.power() } } impl Composable for TransformedComposable<Arc<Composable>> { #[inline] fn bbox_parent(&self) -> BBox3f { self.inner.bbox_parent().apply_transform(&*self.local_parent) } #[inline] fn intersection_cost(&self) -> Float { 2.0 as Float + self.inner.intersection_cost() } #[inline] fn intersect_ray(&self, ray: &mut RawRay) -> Option<SurfaceInteraction> { *ray = ray.apply_transform(&*self.parent_local); let mut ret = self.inner.intersect_ray(ray); if let Some(ret) = ret.as_mut() { *ret = ret.apply_transform(&*self.local_parent); } *ray = ray.apply_transform(&*self.local_parent); ret } #[inline] fn as_light(&self) -> &Light { unimplemented!(); } } impl Composable for TransformedComposable<Arc<Primitive>> { #[inline] fn bbox_parent(&self) -> BBox3f { self.inner.bbox_parent().apply_transform(&*self.local_parent) } #[inline] fn intersection_cost(&self) -> Float { 2.0 as Float + self.inner.intersection_cost() } #[inline] fn intersect_ray(&self, ray: &mut RawRay) -> Option<SurfaceInteraction> { *ray = ray.apply_transform(&*self.parent_local); let mut ret = self.inner.intersect_ray(ray); if let Some(ret) = ret.as_mut() { *ret = ret.apply_transform(&*self.local_parent); ret.primitive_hit = Some(self); } *ray = ray.apply_transform(&*self.local_parent); ret } #[inline] fn as_light(&self) -> &Light { self } } impl Primitive for TransformedComposable<Arc<Primitive>> { #[inline] fn is_emissive(&self) -> bool { self.inner.is_emissive() } #[inline] fn get_material(&self) -> &Material { self.inner.get_material() } } impl Light for TransformedComposable<Arc<Primitive>> { fn flags(&self) -> LightFlag { self.inner.flags() } #[inline] fn evaluate_ray(&self, rd: &RayDifferential) -> RGBSpectrumf { let rd = rd.apply_transform(&self.parent_local); self.inner.evaluate_ray(&rd) } #[inline] fn evaluate_path(&self, pos: Point3f, dir: Vector3f) -> RGBSpectrumf { let pos = self.parent_local.transform_point(pos); let dir = self.parent_local.transform_vector(dir); self.inner.evaluate_path(pos, dir) } #[inline] fn evaluate_sampled(&self, pos: Point3f, sample: Point2f) -> LightSample { let pos = self.parent_local.transform_point(pos); let ls = self.inner.evaluate_sampled(pos, sample); ls.apply_transform(&*self.local_parent) } #[inline] fn generate_path(&self, samples: SampleInfo) -> PathInfo { self.inner.generate_path(samples).apply_transform(&*self.local_parent) } #[inline] fn pdf_path(&self, pos: Point3f, dir: Vector3f, norm: Vector3f) -> (Float, Float) { let pos = self.parent_local.transform_point(pos); let dir = self.parent_local.transform_vector(dir); let norm = self.parent_local.transform_norm(norm); self.inner.pdf_path(pos, dir, norm) } #[inline] fn pdf(&self, pos: Point3f, wi: Vector3f) -> Float { let pos = self.parent_local.transform_point(pos); let wi = self.parent_local.transform_vector(wi); self.inner.pdf(pos, wi) } #[inline] fn power(&self) -> RGBSpectrumf { self.inner.power() } }
s); let dir = self.parent_local.transform_vector(dir); let norm = self.parent_local.transform_norm(norm); self.inner.pdf_path(pos, dir, norm) }
function_block-function_prefixed
[ { "content": "#[inline]\n\npub fn sample_uniform_cone(u: Point2f, cos_max: Float) -> Vector3f {\n\n let costheta = (1.0 as Float - u.x) + u.x * cos_max;\n\n let sintheta = (1.0 as Float - costheta*costheta).sqrt();\n\n let phi = u.y * (2.0 as Float * float::pi());\n\n Vector3f::new(sintheta*phi.cos(), sintheta*phi.sin(), costheta)\n\n}\n\n\n\n/// pdf of uniform samples on a cone\n", "file_path": "src/sample/mod.rs", "rank": 0, "score": 286244.83898703475 }, { "content": "#[inline]\n\npub fn sample_uniform_sphere(u: Point2f) -> Vector3f {\n\n let costheta = 1.0 as Float - 2.0 as Float * u.x;\n\n let sintheta = (1.0 as Float - costheta).max(0.0 as Float).sqrt();\n\n let phi = 2.0 as Float * float::pi() * u.y;\n\n Vector3f::new(sintheta*phi.cos(), sintheta*phi.sin(), costheta)\n\n}\n\n\n\n/// pdf of uniform samples on a hemisphere\n", "file_path": "src/sample/mod.rs", "rank": 1, "score": 259408.7781652111 }, { "content": "#[inline]\n\npub fn sample_uniform_triangle(u: Point2f) -> Vector3f {\n\n let sqrtux = u.x.sqrt();\n\n let x = 1.0 as Float - sqrtux;\n\n let y = sqrtux * u.y;\n\n Vector3f::new(x, y, 1.0 as Float - x - y)\n\n}\n\n\n\n/// power heuristic as per $\\beta = 2$\n", "file_path": "src/sample/mod.rs", "rank": 2, "score": 259408.77816521112 }, { "content": "#[inline]\n\npub fn sample_cosw_hemisphere(u: Point2f) -> Vector3f {\n\n let d = sample_concentric_disk(u);\n\n let z = (1.0 as Float - d.x*d.x - d.y*d.y).abs().sqrt();\n\n Vector3f::new(d.x, d.y, z)\n\n}\n\n\n\n/// pdf of cosine-theta weighted samples on a hemisphere\n", "file_path": "src/sample/mod.rs", "rank": 3, "score": 259408.7781652111 }, { "content": "#[inline]\n\npub fn sample_uniform_hemisphere(u: Point2f) -> Vector3f {\n\n let costheta = u.x;\n\n let sintheta = (1.0 as Float - costheta).max(0.0 as Float).sqrt();\n\n let phi = 2.0 as Float * float::pi() * u.y;\n\n Vector3f::new(sintheta*phi.cos(), sintheta*phi.sin(), costheta)\n\n}\n\n\n\n/// pdf of uniform samples on a hemisphere\n", "file_path": "src/sample/mod.rs", "rank": 4, "score": 259408.7781652111 }, { "content": "fn _sample_wh_trowbridge(wo: Vector3f, u: Point2f, ax: Float, ay: Float) -> Vector3f {\n\n let wo_stretched = Vector3f::new(ax*wo.x, ay*wo.y, wo.z).normalize();\n\n let cos_theta = normal::cos_theta(wo_stretched).abs();\n\n let (mut sx, mut sy) = if cos_theta > 0.9999 as Float {\n\n let r = (u.x/(1.0 as Float - u.x)).sqrt();\n\n let phi = 2.0 as Float * u.y * float::pi();\n\n (r*phi.cos(), r*phi.sin())\n\n } else {\n\n let sin_theta = (1.0 as Float - cos_theta*cos_theta).max(0. as Float).sqrt();\n\n let tan_theta = sin_theta/cos_theta;\n\n let cot_theta = cos_theta/sin_theta;\n\n let g1 = 2.0 as Float / (1.0 as Float + (\n\n 1.0 as Float + 1.0 as Float / (cot_theta*cot_theta)\n\n ).sqrt());\n\n let a = 2.0 as Float * u.y / g1 - 1.0 as Float;\n\n let tmp = (1.0 as Float / (a*a - 1.0 as Float)).min(1e10 as Float);\n\n let d = (tan_theta*tan_theta*tmp*tmp - (\n\n a * a - tan_theta * tan_theta\n\n )*tmp).max(0. as Float).sqrt();\n\n let sx1 = tan_theta*tmp - d;\n", "file_path": "src/bxdf/microfacet.rs", "rank": 5, "score": 246311.1976226591 }, { "content": "fn _sample_wh_beckmann(wo: Vector3f, u: Point2f, ax: Float, ay: Float) -> Vector3f {\n\n // let mut ln = u.x.ln();\n\n // if ln.is_infinite() { ln = 0. as Float;}\n\n // let (tan2_theta, phi) = if relative_eq!(ax, ay) { \n\n // (-ln * ax * ay, u.y * float::pi() * 2. as Float)\n\n // } else {\n\n // let mut phi = u.y * float::pi() * 2. as Float + float::frac_pi_2();\n\n // phi = (ay/ax*phi.tan()).atan();\n\n // if u.y > 0.5 as Float { phi += float::pi(); }\n\n // let sp = phi.sin();\n\n // let cp = phi.cos();\n\n // let ax2 = ax * ax;\n\n // let ay2 = ay * ay;\n\n // (-ln/(cp*cp/ax2+sp*sp/ay2), phi)\n\n // };\n\n // let ct = 1. as Float / (1. as Float + tan2_theta).sqrt();\n\n // let st = (1. as Float - ct*ct).max(0. as Float).sqrt();\n\n // let wh = Vector3f::new(st*phi.cos(), st*phi.sin(), ct);\n\n // if wo.dot(wh) <= 0. as Float {\n\n // -wh\n", "file_path": "src/bxdf/microfacet.rs", "rank": 6, "score": 246311.1976226591 }, { "content": "#[inline]\n\npub fn sample_concentric_disk(u: Point2f) -> Point2f {\n\n let u = (2.0 as Float * u) - Point2f::new(1.0 as Float, 1.0 as Float);\n\n if u.x == 0.0 as Float && u.y == 0.0 as Float {\n\n Point2f::new(0.0 as Float, 0.0 as Float)\n\n } else {\n\n let (r, theta) = if u.x.abs() > u.y.abs() {\n\n (u.x, float::frac_pi_4() * (u.y/u.x))\n\n } else {\n\n (u.y, float::frac_pi_2() - float::frac_pi_4() * (u.x/u.y))\n\n };\n\n r * Point2f::new(theta.cos(), theta.sin())\n\n }\n\n}\n\n\n\n/// pdf of concentric samples on a disk\n", "file_path": "src/sample/mod.rs", "rank": 7, "score": 221274.44339913665 }, { "content": "#[inline]\n\npub fn sample_uniform_disk(u: Point2f) -> Point2f {\n\n let r = u.x.sqrt();\n\n let theta = 2.0 as Float * float::pi() * u.y;\n\n Point2f::new(r*theta.cos(), r*theta.sin())\n\n}\n\n\n\n/// pdf of uniform samples on a disk\n", "file_path": "src/sample/mod.rs", "rank": 8, "score": 221274.44339913665 }, { "content": "#[inline]\n\npub fn permyz(p0t: Point3f) -> Point3f {\n\n Point3f::new(p0t.z, p0t.x, p0t.y)\n\n}\n\n\n\n/// Ray with differencials\n\n#[must_use]\n\n#[derive(Clone)]\n\npub struct RayDifferential {\n\n pub ray: RawRay,\n\n pub diffs: Option<(RawRay, RawRay)>,\n\n}\n\n\n\nimpl RayDifferential {\n\n pub fn apply_transform(&self, t: &Matrix4f) -> Self\n\n {\n\n let mut diffs = self.diffs;\n\n if let Some(diffs) = diffs.as_mut() {\n\n diffs.0 = diffs.0.apply_transform(t);\n\n diffs.1 = diffs.1.apply_transform(t);\n\n }\n", "file_path": "src/geometry/ray.rs", "rank": 9, "score": 215799.59955973344 }, { "content": "#[inline]\n\npub fn permxz(p0t: Point3f) -> Point3f {\n\n Point3f::new(p0t.y, p0t.z, p0t.x)\n\n}\n\n\n", "file_path": "src/geometry/ray.rs", "rank": 10, "score": 215799.59955973344 }, { "content": "#[inline]\n\npub fn pdf_concentric_disk() -> Float {\n\n float::pi()\n\n}\n\n\n\n/// transform an uniformly sampled `u` in $[0,1)^2$\n\n/// into uniform samples on a disk\n", "file_path": "src/sample/mod.rs", "rank": 11, "score": 201391.16519434337 }, { "content": "#[inline]\n\npub fn pdf_uniform_disk() -> Float {\n\n float::pi()\n\n}\n\n\n\n/// transform an uniformly sampled `u` in $[0,1)^2$\n\n/// into cosine-theta weighted samples on a hemisphere\n", "file_path": "src/sample/mod.rs", "rank": 12, "score": 201391.16519434337 }, { "content": "#[inline]\n\npub fn pdf_uniform_hemisphere() -> Float {\n\n 0.5 as Float * float::frac_1_pi()\n\n}\n\n\n\n/// transform an uniformly sampled `u` in $[0,1)^2$\n\n/// into uniform samples on a sphere\n", "file_path": "src/sample/mod.rs", "rank": 13, "score": 201391.16519434337 }, { "content": "#[inline]\n\npub fn pdf_uniform_sphere() -> Float {\n\n 0.25 as Float * float::frac_1_pi()\n\n}\n\n\n\n/// transform an uniformly sampled `u` in $[0,1)^2$\n\n/// into concentric samples on a disk, preserving relative\n\n/// distributions\n", "file_path": "src/sample/mod.rs", "rank": 14, "score": 201391.16519434337 }, { "content": "#[inline]\n\npub fn pdf_cosw_hemisphere(cos_theta: Float) -> Float {\n\n cos_theta * float::frac_1_pi()\n\n}\n\n\n\n/// transform an uniformly sampled `u` in $[0,1)^2$\n\n/// into uniform samples on a cone\n", "file_path": "src/sample/mod.rs", "rank": 15, "score": 199897.7521574734 }, { "content": "#[inline]\n\npub fn pdf_uniform_cone(cos_max: Float) -> Float {\n\n 1.0 as Float / ((1.0 as Float - cos_max) * 2.0 as Float * float::pi())\n\n}\n\n\n\n/// transform an uniformly sampled `u` in $[0,1)^2$\n\n/// into uniform samples on a triangle's barycentric coordinates\n", "file_path": "src/sample/mod.rs", "rank": 16, "score": 199897.75215747335 }, { "content": "/// compute fresnel reflectance for dielectrics\n\nfn fresnel_dielectric(mut cos_theta_i: Float, mut etai: Float, mut etat: Float) -> Float {\n\n if cos_theta_i < 0.0 as Float {\n\n // swap direction\n\n mem::swap(&mut etai, &mut etat);\n\n cos_theta_i = -cos_theta_i;\n\n }\n\n let sin2_theta_i = (1.0 as Float - cos_theta_i * cos_theta_i).max(0. as Float);\n\n let eta = etai / etat;\n\n let sin2_theta_t = eta*eta*sin2_theta_i;\n\n if sin2_theta_t >= 1.0 as Float {\n\n // total reflection\n\n return 1.0 as Float;\n\n }\n\n let cos_theta_t = (1.0 as Float - sin2_theta_t).sqrt();\n\n let etci = etat * cos_theta_i;\n\n let eict = etai * cos_theta_t;\n\n let r_para = (etci - eict) / (etci + eict);\n\n let eici = etai * cos_theta_i;\n\n let etct = etat * cos_theta_t;\n\n let r_perp = (eici - etct) / (eici + etct);\n\n (r_para * r_para + r_perp * r_perp) * 0.5 as Float\n\n}\n\n\n", "file_path": "src/bxdf/fresnel.rs", "rank": 17, "score": 193268.99093012916 }, { "content": "#[inline]\n\nfn correct_shading_normal(si: &SurfaceInteraction, wo: Vector3f, wi: Vector3f, mode: TransportMode) -> Float {\n\n if mode == TransportMode::Importance {\n\n let num = (wo.dot(si.shading_norm) * wi.dot(si.basic.norm)).abs();\n\n let denom = (wo.dot(si.basic.norm) * wi.dot(si.shading_norm)).abs();\n\n if denom == 0. as Float { 0. as Float }\n\n else { num/denom }\n\n } else { 1. as Float }\n\n}\n\n\n", "file_path": "src/renderer/bpt/mod.rs", "rank": 18, "score": 190055.7585557287 }, { "content": "#[inline]\n\npub fn next_up(f: Float) -> Float {\n\n if f.is_infinite() && f.is_sign_positive() {\n\n f\n\n } else if f == -0. as Float {\n\n 0. as Float\n\n } else {\n\n let t = f.to_bits();\n\n if f.is_sign_positive() {\n\n Float::from_bits(t+1)\n\n } else {\n\n Float::from_bits(t-1)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/geometry/float.rs", "rank": 19, "score": 187486.36095946102 }, { "content": "#[inline]\n\npub fn next_down(f: Float) -> Float {\n\n if f.is_infinite() && f.is_sign_negative() {\n\n f\n\n } else if f == 0. as Float {\n\n -0. as Float\n\n } else {\n\n let t = f.to_bits();\n\n if f.is_sign_negative() {\n\n Float::from_bits(t+1)\n\n } else {\n\n Float::from_bits(t-1)\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/geometry/float.rs", "rank": 20, "score": 187486.36095946102 }, { "content": "#[inline]\n\npub fn pi() -> Float {\n\n <Float as num_traits::FloatConst>::PI()\n\n}\n\n\n", "file_path": "src/geometry/float.rs", "rank": 21, "score": 186253.5685350748 }, { "content": "#[inline]\n\npub fn infinity() -> Float {\n\n <Float as num_traits::Float>::infinity()\n\n}\n\n\n", "file_path": "src/geometry/float.rs", "rank": 22, "score": 186253.5685350748 }, { "content": "#[inline]\n\npub fn epsilon() -> Float {\n\n <Float as num_traits::Float>::epsilon()\n\n}\n\n\n", "file_path": "src/geometry/float.rs", "rank": 23, "score": 186253.5685350748 }, { "content": "#[inline]\n\npub fn nan() -> Float {\n\n <Float as num_traits::Float>::nan()\n\n}\n\n\n", "file_path": "src/geometry/float.rs", "rank": 24, "score": 186253.5685350748 }, { "content": "/// error bound term given by Higham(2002)\n\npub fn eb_term(n: Float) -> Float {\n\n let ne = n * machine_epsilon();\n\n ne / (1. as Float - ne)\n\n}\n\n\n", "file_path": "src/geometry/float.rs", "rank": 25, "score": 184344.5069006558 }, { "content": "#[inline]\n\npub fn frac_pi_3() -> Float {\n\n <Float as num_traits::FloatConst>::FRAC_PI_3()\n\n}\n\n\n", "file_path": "src/geometry/float.rs", "rank": 26, "score": 182735.35671844304 }, { "content": "#[inline]\n\npub fn neg_infinity() -> Float {\n\n <Float as num_traits::Float>::neg_infinity()\n\n}\n\n\n", "file_path": "src/geometry/float.rs", "rank": 27, "score": 182735.35671844304 }, { "content": "#[inline]\n\npub fn frac_pi_6() -> Float {\n\n <Float as num_traits::FloatConst>::FRAC_PI_6()\n\n}\n\n\n", "file_path": "src/geometry/float.rs", "rank": 28, "score": 182735.35671844304 }, { "content": "#[inline]\n\npub fn machine_epsilon() -> Float {\n\n <Float as num_traits::Float>::epsilon() * 0.5 as Float\n\n}\n\n\n\n#[inline]\n", "file_path": "src/geometry/float.rs", "rank": 29, "score": 182735.35671844304 }, { "content": "#[inline]\n\npub fn frac_pi_8() -> Float {\n\n <Float as num_traits::FloatConst>::FRAC_PI_8()\n\n}\n\n\n", "file_path": "src/geometry/float.rs", "rank": 30, "score": 182735.35671844304 }, { "content": "#[inline]\n\npub fn frac_pi_4() -> Float {\n\n <Float as num_traits::FloatConst>::FRAC_PI_4()\n\n}\n\n\n", "file_path": "src/geometry/float.rs", "rank": 31, "score": 182735.35671844304 }, { "content": "#[inline]\n\npub fn frac_2_pi() -> Float {\n\n <Float as num_traits::FloatConst>::FRAC_2_PI()\n\n}\n\n\n", "file_path": "src/geometry/float.rs", "rank": 32, "score": 182735.35671844304 }, { "content": "#[inline]\n\npub fn frac_1_pi() -> Float {\n\n <Float as num_traits::FloatConst>::FRAC_1_PI()\n\n}\n\n\n", "file_path": "src/geometry/float.rs", "rank": 33, "score": 182735.35671844304 }, { "content": "#[inline]\n\npub fn frac_pi_2() -> Float {\n\n <Float as num_traits::FloatConst>::FRAC_PI_2()\n\n}\n\n\n", "file_path": "src/geometry/float.rs", "rank": 34, "score": 182735.35671844304 }, { "content": "#[inline]\n\npub fn power_heuristic(nf: usize, pdff: Float, ng: usize, pdfg: Float) -> Float {\n\n let f = nf as Float * pdff;\n\n let g = ng as Float * pdfg;\n\n (f*f)/(f*f+g*g)\n\n}\n\n\n", "file_path": "src/sample/mod.rs", "rank": 35, "score": 182470.54613907944 }, { "content": "#[inline] \n\npub fn balance_heuristic(nf: usize, pdff: Float, ng: usize, pdfg: Float) -> Float {\n\n let f = nf as Float * pdff;\n\n let g = ng as Float * pdfg;\n\n f/(f+g)\n\n}\n\n\n\npub mod naive;\n\npub mod strata;\n\npub mod filters;\n\npub mod distribution;\n\npub mod prelude;\n\nmod sink;\n", "file_path": "src/sample/mod.rs", "rank": 36, "score": 182470.54613907947 }, { "content": "#[inline]\n\npub fn one_minus_epsilon() -> Float {\n\n 1.0 as Float - epsilon()\n\n}\n\n\n", "file_path": "src/geometry/float.rs", "rank": 37, "score": 179431.49055924825 }, { "content": "#[inline]\n\npub fn frac_2_sqrt_pi() -> Float {\n\n <Float as num_traits::FloatConst>::FRAC_2_SQRT_PI()\n\n}\n\n\n", "file_path": "src/geometry/float.rs", "rank": 38, "score": 179431.49055924825 }, { "content": "/// compute fresnel reflectance for conductors\n\nfn fresnel_conductor(mut cos_theta_i: Float, mut etai: RGBSpectrumf, mut etat: RGBSpectrumf, k: RGBSpectrumf) -> RGBSpectrumf {\n\n if cos_theta_i < 0.0 as Float {\n\n // swap direction\n\n mem::swap(&mut etai, &mut etat);\n\n cos_theta_i = -cos_theta_i;\n\n }\n\n\n\n let sin_theta_i2 = 1.0 as Float - cos_theta_i * cos_theta_i;\n\n let cos_theta_i2 = cos_theta_i * cos_theta_i;\n\n let sin_theta_i4 = sin_theta_i2 * sin_theta_i2;\n\n let sin_theta_i2 = RGBSpectrumf::grey_scale(sin_theta_i2);\n\n let sin_theta_i4 = RGBSpectrumf::grey_scale(sin_theta_i4);\n\n let cos_theta_i2 = RGBSpectrumf::grey_scale(cos_theta_i2);\n\n \n\n let eta = etat/etai;\n\n let eta2 = eta * eta;\n\n let k2 = k * k;\n\n let tmp0 = eta2 - k2 - sin_theta_i2;\n\n let a2pb2 = (tmp0 * tmp0 + 4.0 as Float * eta2 * k2).sqrt();\n\n // FIXME: wrong\n\n let am2 = (a2pb2 * 2.0 as Float).sqrt();\n\n\n\n let r_perp = (a2pb2 + cos_theta_i2 - am2 * cos_theta_i) / (a2pb2 + cos_theta_i2 + am2 * cos_theta_i);\n\n let tmpa = a2pb2 * cos_theta_i2;\n\n let tmpb = am2 * cos_theta_i * sin_theta_i2 + sin_theta_i4;\n\n let r_para = r_perp * (tmpa - tmpb) / (tmpa + tmpb);\n\n (r_para * r_para + r_perp * r_perp) * 0.5 as Float \n\n}\n\n\n", "file_path": "src/bxdf/fresnel.rs", "rank": 39, "score": 178543.69777634798 }, { "content": "#[inline]\n\npub fn clamp(f: Float, min: Float, max: Float) -> Float {\n\n debug_assert!(min <= max);\n\n if f < min { min }\n\n else if f < max { f }\n\n else { max }\n\n}\n\n\n", "file_path": "src/geometry/float.rs", "rank": 40, "score": 177612.993459342 }, { "content": "/// Transform a perceived `roughness` in $[0,1]$ into an alpha value\n\n/// which can be used in the `Beckmann` and `Trowbridge` distributions\n\npub fn roughness_to_alpha(roughness: Float) -> Float {\n\n let x = roughness.max(1e-3 as Float).ln();\n\n 1.62142 as Float + 0.819955 as Float * x\n\n + 0.1734 as Float * x * x\n\n + 0.0171201 as Float * x * x * x \n\n + 0.000640711 as Float * x * x * x * x\n\n}\n\n\n\n/// A Beckmann microfacet distribution\n\n///\n\n/// With microfacet distribution specified as\n\n/// $D(\\omega_h) = \\frac{\n\n/// \\exp{-tan^2\\theta_h(cos^2\\phi_h/\\alpha_x^2+sin^2\\phi_h/\\alpha_y^2)}\n\n/// }{\n\n/// \\phi*\\alpha_x*\\alpha_y*cos^4\\theta_h\n\n/// }$\n\n#[derive(Copy, Clone, Debug)]\n\npub struct Beckmann {\n\n /// microfacet oriented perpendicular to `x`-axis\n\n pub ax: Float,\n", "file_path": "src/bxdf/microfacet.rs", "rank": 41, "score": 175917.15531159658 }, { "content": "fn g<S: Sampler>(scene: &Scene, sampler: &mut S, v0: &Node, v1: &Node) -> RGBSpectrumf {\n\n let d = v0.pos() - v1.pos();\n\n let mut g = 1. as Float / d.magnitude2();\n\n let d = d * g.sqrt();\n\n if v0.on_surface() { g *= v0.shading_norm().dot(d).abs(); }\n\n if v1.on_surface() { g *= v1.shading_norm().dot(d).abs(); }\n\n let ray = RawRay::from_od(v1.pos(), d);\n\n let epsilon = Point3f::default_epsilon();\n\n let epsilon = Vector3f::new(epsilon, epsilon, epsilon);\n\n let pfrom = v0.pos() + epsilon;\n\n let mut ray = RawRay::spawn(pfrom, v1.pos());\n\n let unoccluded = if let Some(si) = scene.aggregate.intersect_ray(&mut ray) {\n\n relative_eq!(si.basic.pos, v1.pos())\n\n } else {\n\n true\n\n };\n\n if unoccluded {\n\n // TODO: double check\n\n RGBSpectrumf::new(g, g, g)\n\n } else {\n\n RGBSpectrumf::black()\n\n }\n\n}\n\n\n", "file_path": "src/renderer/bpt/mod.rs", "rank": 42, "score": 161258.66309812124 }, { "content": "/// A renderable primitive\n\npub trait Primitive: Composable + Light {\n\n /// return if the primitive can emit lights\n\n fn is_emissive(&self) -> bool;\n\n\n\n /// return the material associated with this primitive\n\n fn get_material(&self) -> &Material;\n\n}\n\n\n", "file_path": "src/component/mod.rs", "rank": 43, "score": 151287.80977805087 }, { "content": "fn map_f32s_to_point<F>(src: &[f32], mut f: F) -> Vec<Point3f>\n\n where F: FnMut(Point3f) -> Point3f\n\n{\n\n let retlen = src.len()/3;\n\n let mut ret = Vec::with_capacity(retlen);\n\n for i in 0..retlen {\n\n let v = unsafe {\n\n Point3f::new(\n\n *src.get_unchecked(3*i) as Float,\n\n *src.get_unchecked(3*i+1) as Float,\n\n *src.get_unchecked(3*i+2) as Float\n\n )\n\n };\n\n ret.push(f(v));\n\n }\n\n ret\n\n}\n\n\n", "file_path": "src/shape/triangle.rs", "rank": 44, "score": 141753.10597538232 }, { "content": "fn map_f32s_to_vec<F>(src: &[f32], mut f: F) -> Vec<Vector3f>\n\n where F: FnMut(Vector3f) -> Vector3f\n\n{\n\n let retlen = src.len()/3;\n\n let mut ret = Vec::with_capacity(retlen);\n\n for i in 0..retlen {\n\n let v = unsafe {\n\n Vector3f::new(\n\n *src.get_unchecked(3*i) as Float,\n\n *src.get_unchecked(3*i+1) as Float,\n\n *src.get_unchecked(3*i+2) as Float\n\n )\n\n };\n\n ret.push(f(v));\n\n }\n\n ret\n\n}\n\n\n", "file_path": "src/shape/triangle.rs", "rank": 45, "score": 141414.27058597901 }, { "content": "fn map_f32s_to_point2<F>(src: &[f32], mut f: F) -> Vec<Point2f>\n\n where F: FnMut(Point2f) -> Point2f\n\n{\n\n let retlen = src.len()/2;\n\n let mut ret = Vec::with_capacity(retlen);\n\n for i in 0..retlen {\n\n let v = unsafe {\n\n Point2f::new(\n\n *src.get_unchecked(2*i) as Float,\n\n *src.get_unchecked(2*i+1) as Float\n\n )\n\n };\n\n ret.push(f(v));\n\n }\n\n ret\n\n}\n\n\n\nimpl IntoIterator for TriangleMesh {\n\n type Item = TriangleInstance;\n\n type IntoIter = TriangleInstance;\n", "file_path": "src/shape/triangle.rs", "rank": 46, "score": 141364.636047651 }, { "content": "/// A semi-infinite line\n\npub trait Ray {\n\n /// Returns where the ray originates\n\n fn origin(&self) -> Point3f;\n\n\n\n /// Sets the origin to `o`.\n\n /// Implementations must ensure that this is valid\n\n fn set_origin(&mut self, o: Point3f);\n\n\n\n /// Returns the max extend of the ray, in `self.direction().length()`\n\n fn max_extend(&self) -> Float;\n\n \n\n /// Set the max extend of the ray\n\n fn set_max_extend(&mut self, tmax: Float);\n\n\n\n /// Returns where the ray heads to.\n\n /// The length of the returned vector is the unit of the ray\n\n fn direction(&self) -> Vector3f;\n\n\n\n /// Sets the direction to `d`.\n\n /// Implementations must ensure that this is valid\n", "file_path": "src/geometry/ray.rs", "rank": 47, "score": 138414.10107956265 }, { "content": "/// Represents a spectrum\n\npub trait Spectrum\n\n where Self: Sized\n\n{\n\n type Scalar: PartialOrd + BaseNum;\n\n /// initialize to unified color\n\n fn grey_scale(n: Self::Scalar) -> Self;\n\n\n\n /// initialize to black\n\n #[inline]\n\n fn black() -> Self {\n\n <Self as Spectrum>::grey_scale(<Self::Scalar as Zero>::zero())\n\n }\n\n\n\n /// lerp\n\n fn lerp(&self, other: &Self, t: Float) -> Self;\n\n\n\n /// element-wise clamping\n\n fn clamp(&self, low: Self::Scalar, high: Self::Scalar) -> Self;\n\n\n\n /// convert to srgb\n", "file_path": "src/spectrum/mod.rs", "rank": 48, "score": 138279.62333215057 }, { "content": "pub trait ToNorm {\n\n fn to_norm(self) -> Float;\n\n \n\n fn from_norm(f: Float) -> Self;\n\n}\n\n\n\nimpl ToNorm for Float {\n\n #[inline]\n\n fn to_norm(self) -> Float {\n\n debug_assert!(self>=0. as Float);\n\n debug_assert!(self<=1. as Float);\n\n self\n\n }\n\n\n\n #[inline]\n\n fn from_norm(f: Float) -> Self {\n\n debug_assert!(f>=0. as Float);\n\n debug_assert!(f<=1. as Float);\n\n f\n\n }\n\n}\n\n\n\ndelegate_impl_to_norm!(u8);\n\ndelegate_impl_to_norm!(u16);\n\ndelegate_impl_to_norm!(u32);\n\n\n\npub mod prelude {\n\n pub use super::{RGBSpectrum, RGBSpectrumf, Spectrum};\n\n}", "file_path": "src/spectrum/mod.rs", "rank": 49, "score": 135599.1379708498 }, { "content": "// utility to bump a map\n\nfn add_bumping<T: Texture<Texel=Float> + ?Sized>(\n\n si: &mut SurfaceInteraction, dxy: &DxyInfo, bump: &T\n\n) {\n\n let mut sie = si.clone();\n\n let du = {\n\n // shifting in u\n\n let mut du = 0.5 as Float * (dxy.dudx.abs() + dxy.dudy.abs());\n\n if du == 0.0 as Float { du = 0.0005 as Float; }\n\n sie.basic.pos = si.basic.pos + du * si.shading_duv.dpdu;\n\n sie.uv.x = si.uv.x + du;\n\n sie.basic.norm = (si.shading_duv.dpdu.cross(si.shading_duv.dpdv) + du * si.duv.dndu).normalize();\n\n du\n\n };\n\n\n\n let displacement_u = bump.evaluate(&sie, dxy);\n\n\n\n let dv = {\n\n // shifting in v\n\n let mut dv = 0.5 as Float * (dxy.dvdx.abs() + dxy.dvdy.abs());\n\n if dv == 0.0 as Float { dv = 0.0005 as Float; }\n", "file_path": "src/material/mod.rs", "rank": 50, "score": 132030.69716290513 }, { "content": "#[inline]\n\nfn erf(x: Float) -> Float {\n\n // constants\n\n const A1: Float = 0.254829592 as Float;\n\n const A2: Float = -0.28449673 as Float;\n\n const A3: Float = 1.421413741 as Float;\n\n const A4: Float = -1.453152027 as Float;\n\n const A5: Float = 1.061405429 as Float;\n\n const P: Float = 0.3275911 as Float;\n\n\n\n // Save the sign of x\n\n let sign = x.signum();\n\n let x = x*sign;\n\n\n\n // A&S formula 7.1.26\n\n let t = 1.0 as Float / (1.0 as Float + P * x);\n\n let y =\n\n 1.0 as Float -\n\n (((((A5 * t + A4) * t) + A3) * t + A2) * t + A1) * t * (-x * x).exp();\n\n\n\n sign * y\n", "file_path": "src/bxdf/microfacet.rs", "rank": 51, "score": 131492.6636363352 }, { "content": "#[inline]\n\nfn schlick_fresnel(cost: Float, s: RGBSpectrumf) -> RGBSpectrumf {\n\n s + (1. as Float - cost).powi(5) * (RGBSpectrumf::grey_scale(1. as Float) - s)\n\n}\n", "file_path": "src/bxdf/microfacet.rs", "rank": 52, "score": 130600.67618057951 }, { "content": "#[inline]\n\nfn erf_inv(x: Float) -> Float {\n\n let x = x.max(-0.99999 as Float).min(0.99999 as Float);\n\n let mut w = -((1.0 as Float - x) * (1.0 as Float + x)).ln();\n\n let mut p;\n\n if w < 5.0 as Float {\n\n w = w - 2.5 as Float;\n\n p = 2.81022636e-08 as Float;\n\n p = 3.43273939e-07 as Float + p * w;\n\n p = -3.5233877e-06 as Float + p * w;\n\n p = -4.39150654e-06 as Float + p * w;\n\n p = 0.00021858087 as Float + p * w;\n\n p = -0.00125372503 as Float + p * w;\n\n p = -0.00417768164 as Float + p * w;\n\n p = 0.246640727 as Float + p * w;\n\n p = 1.50140941 as Float + p * w;\n\n } else {\n\n w = w.sqrt() - 3.0 as Float;\n\n p = -0.000200214257 as Float;\n\n p = 0.000100950558 as Float + p * w;\n\n p = 0.00134934322 as Float + p * w;\n", "file_path": "src/bxdf/microfacet.rs", "rank": 53, "score": 129210.58149463893 }, { "content": "/// A Light\n\npub trait Light: Sync+ Send {\n\n /// return the flags of the light\n\n fn flags(&self) -> LightFlag;\n\n\n\n /// test if the light has delta distribution\n\n #[inline]\n\n fn is_delta(&self) -> bool {\n\n self.flags().is_delta()\n\n }\n\n\n\n /// Given a position and an incoming direction in local coordinates,\n\n /// evaluate the light's radiance along that direction. This method\n\n /// takes an `RayDifferential` because some light implementations\n\n /// might found thouse differentials helpful.\n\n ///\n\n /// Default implementation yields zero radiance\n\n #[inline]\n\n fn evaluate_ray(&self, rd: &RayDifferential) -> RGBSpectrumf {\n\n self.evaluate_path(rd.ray.origin(), rd.ray.direction())\n\n }\n", "file_path": "src/lighting/mod.rs", "rank": 54, "score": 127437.1533995313 }, { "content": "#[allow(dead_code)]\n\nfn gamma_correct(v: Float) -> Float {\n\n if v <= 0.0031308 as Float {\n\n 12.92 as Float * v\n\n } else {\n\n 1.055 as Float * v.powf(1.0 as Float / 2.4 as Float) - 0.055 as Float\n\n }\n\n}\n\n\n", "file_path": "src/texturing/textures/image.rs", "rank": 55, "score": 127054.56124113084 }, { "content": "#[inline]\n\nfn solve_over_constrained_2x3(abc: Vector3f, m: (Vector3f, Vector3f), n: Vector3f) -> Option<Vector2f> {\n\n if n.x.abs() > n.y.abs() && n.x.abs() > n.z.abs() {\n\n Matrix2f::new(m.0.y, m.1.y, m.0.z, m.1.z)\n\n .invert().map(|m| {\n\n m * Vector2f::new(abc.y, abc.z)\n\n })\n\n } else if n.y.abs() > n.z.abs() {\n\n Matrix2f::new(m.0.x, m.1.x, m.0.z, m.1.z)\n\n .invert().map(|m| {\n\n m * Vector2f::new(abc.x, abc.z)\n\n })\n\n } else {\n\n Matrix2f::new(m.0.x, m.1.x, m.0.y, m.1.y)\n\n .invert().map(|m| {\n\n m * Vector2f::new(abc.x, abc.y)\n\n })\n\n }\n\n}", "file_path": "src/geometry/interaction.rs", "rank": 56, "score": 126931.74913207677 }, { "content": "fn inverse_gamma_correct(v: Float) -> Float {\n\n if v <= 0.04045 as Float {\n\n v * (1.0 as Float / 12.92 as Float)\n\n } else {\n\n ((1.0 as Float / 1.055 as Float) * v).powf(2.4 as Float)\n\n }\n\n}\n\n\n\nconst WEIGHT_LUT_SIZE: usize = 128;\n\n\n\nlazy_static! {\n\n static ref WEIGHT_LUT: Vec<Float> = {\n\n let mut v = Vec::with_capacity(WEIGHT_LUT_SIZE);\n\n for i in 0..WEIGHT_LUT_SIZE {\n\n let alpha = 2.0 as Float;\n\n let r2 = i as Float / ((WEIGHT_LUT_SIZE - 1) as Float);\n\n v.push((-alpha * r2).exp() - (-alpha).exp());\n\n }\n\n v\n\n };\n\n}", "file_path": "src/texturing/textures/image.rs", "rank": 57, "score": 125022.37029947573 }, { "content": "#[derive(Serialize, Deserialize, Clone)]\n\nstruct SceneDesc {\n\n lights: Vec<LightDesc>,\n\n components: Vec<Named<ComponentDesc>>,\n\n sampler: StdStrataSampler,\n\n camera: PerspecCam,\n\n multithreaded: bool,\n\n max_depth: usize,\n\n outputfilename: String,\n\n}\n\n\n", "file_path": "examples/arencli.rs", "rank": 58, "score": 107898.59106065273 }, { "content": "#[inline]\n\nfn pidx_to_pcenter(idx: Point2<isize>) -> Point2f {\n\n let mut ret: Point2f = idx.cast();\n\n ret.x += 0.5 as Float;\n\n ret.y += 0.5 as Float;\n\n ret\n\n}\n\n\n\n/// The mighty film\n\n///\n\n/// # Intended Usage:\n\n/// 1. Create with `new`.\n\n/// 2. Spawn an array of tiles\n\n/// 3. tracing was done within those tiles, possibly multithreaded\n\n/// 4. when done, collect the result into a single `image`\n\n#[derive(Serialize, Deserialize, Clone)]\n\npub struct Film {\n\n resolution: Point2<usize>,\n\n crop_window: BBox2<isize>,\n\n #[serde(skip_serializing, skip_deserializing, default = \"lanczos_default\")]\n\n filter: Arc<Filter>,\n\n filter_radius: Vector2f,\n\n // inv_filter_radius: Vector2f,\n\n}\n\n\n", "file_path": "src/filming/film.rs", "rank": 59, "score": 107436.10753551898 }, { "content": "/// An object that can transform geometry entities.\n\npub trait TransformExt: Transform3<Float> + Copy {\n\n #[inline]\n\n fn transform_ray<R>(&self, ray: &R) -> R\n\n where R: Ray\n\n {\n\n let m : Matrix4f = (*self).into();\n\n ray.apply_transform(&m)\n\n }\n\n\n\n #[inline]\n\n fn transform_ray_differential(&self, raydif: &RayDifferential) -> RayDifferential {\n\n let m : Matrix4f = (*self).into();\n\n raydif.apply_transform(&m)\n\n }\n\n\n\n #[inline]\n\n fn transform_bbox(&self, bbox: &BBox3<Float>) -> BBox3<Float> {\n\n bbox.apply_transform(self)\n\n }\n\n\n", "file_path": "src/geometry/transform.rs", "rank": 60, "score": 107260.18370430726 }, { "content": "#[inline]\n\nfn mul_float<TM, TP>(pix: TP, f: Float) -> TP\n\n where TP: Pixel<Subpixel=TM>,\n\n TM: BaseNum + image::Primitive + Copy,\n\n{\n\n pix.map(|a| {\n\n let a : Float = <Float as NumCast>::from(a).unwrap();\n\n <TM as NumCast>::from(a*f).unwrap()\n\n }) \n\n}\n\n\n", "file_path": "src/texturing/textures/image.rs", "rank": 61, "score": 104933.2774446921 }, { "content": "struct BsdfSink<'a> {\n\n bxdfs: [Option<Pointer<'a, Bxdf>>; 8],\n\n n: usize,\n\n}\n\n\n\nimpl<'a> Default for BsdfSink<'a> {\n\n fn default() -> BsdfSink<'a> {\n\n BsdfSink{\n\n bxdfs: [None, None, None, None, None, None, None, None],\n\n n: 0,\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> BsdfSink<'a> {\n\n /// adding an bxdf\n\n #[inline]\n\n fn add(&mut self, bxdf: Pointer<'a, Bxdf>) {\n\n assert!(self.n < 8);\n\n let n = self.n;\n", "file_path": "src/material/bsdf.rs", "rank": 62, "score": 102882.07003487165 }, { "content": "/// The material interface\n\npub trait Material: Sync + Send {\n\n /// \n\n fn compute_scattering<'a>(\n\n &self,\n\n si: &mut SurfaceInteraction,\n\n dxy: &DxyInfo,\n\n alloc: &'a Allocator\n\n ) -> bsdf::Bsdf<'a>;\n\n}\n\n\n\nimpl<T: Material + ?Sized> Material for Arc<T> {\n\n #[inline]\n\n fn compute_scattering<'a>(\n\n &self,\n\n si: &mut SurfaceInteraction,\n\n dxy: &DxyInfo,\n\n alloc: &'a Allocator\n\n ) -> bsdf::Bsdf<'a> {\n\n <T as Material>::compute_scattering(\n\n &*self, si, dxy, alloc\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/material/mod.rs", "rank": 63, "score": 101174.26489773284 }, { "content": "struct BsdfSinkIter<'a, 'b: 'a> {\n\n sink: &'a BsdfSink<'b>,\n\n i: usize,\n\n}\n\n\n\nimpl<'a, 'b: 'a> Iterator for BsdfSinkIter<'a, 'b> {\n\n type Item = &'a Bxdf;\n\n fn next(&mut self) -> Option<&'a Bxdf> {\n\n if self.i >= self.sink.n {\n\n None\n\n } else {\n\n let i = self.i;\n\n let ret = unsafe {\n\n self.sink.bxdfs.get_unchecked(i).as_ref().map(|p| {\n\n let ret: *const Bxdf = &**p;\n\n &*ret\n\n })\n\n };\n\n self.i += 1;\n\n ret\n\n }\n\n }\n\n}\n", "file_path": "src/material/bsdf.rs", "rank": 64, "score": 97001.59963148867 }, { "content": "fn parse_input(filename: &Path) -> Result<(Scene, StdPTRenderer), ParsingError> {\n\n let buf = {\n\n let mut file = std::fs::File::open(filename).map_err(|e| \n\n ParsingError::IOError(e)\n\n )?;\n\n let mut buf = String::new();\n\n let _ = file.read_to_string(&mut buf).map_err(|e|\n\n ParsingError::IOError(e)\n\n )?;\n\n buf\n\n };\n\n let scenedesc: SceneDesc = serde_json::from_str(buf.as_ref()).map_err(|e|\n\n ParsingError::DecodeError(e)\n\n )?;\n\n\n\n let mut meshes = HashMap::new();\n\n let mut primitives: HashMap<_, Arc<Composable>> = HashMap::new();\n\n // let mut transformed = HashMap::new();\n\n // let mut shapes = HashMap::new();\n\n let mut materials = HashMap::new();\n", "file_path": "examples/arencli.rs", "rank": 65, "score": 96331.0791263399 }, { "content": "/// The filter interface.\n\n/// A filter always lies at $(0, 0)$ in its local frame.\n\npub trait Filter: Send + Sync {\n\n /// Returns the filter's radius.\n\n /// The filter's support in local frame is thus given\n\n /// by $[-radius.x, radius.x]\\times [-radius.y, radius.y]$\n\n fn radius(&self) -> Vector2f;\n\n\n\n /// Returns the filter's support as a bounding box, in local frame.\n\n #[inline]\n\n fn support(&self) -> BBox2f {\n\n let p = self.radius();\n\n BBox2f::new(Point2f::from_vec(p), Point2f::from_vec(-p))\n\n }\n\n\n\n /// Evaluate the filter at `p` in its local frame.\n\n /// Caller MUST ensure that `p` lies inside the support of `self`.\n\n /// This method is thus marked as `unsafe`.\n\n unsafe fn evaluate_unsafe(&self, p: Point2f) -> Float;\n\n\n\n /// Evaluate the filter at `p` in its local frame.\n\n /// Point outside support is checked.\n", "file_path": "src/sample/mod.rs", "rank": 66, "score": 90218.4727297035 }, { "content": "fn lanczos_default() -> Arc<Filter> {\n\n Arc::new(filters::LanczosSincFilter::new(\n\n Vector2f::new(4. as Float, 4. as Float), 3.0 as Float\n\n ))\n\n}\n\n\n\nimpl Film {\n\n /// construction. `crop_window` specified in NDC\n\n pub fn new(resolution: Point2<usize>, crop_window: BBox2f, filter: Arc<Filter>) -> Film {\n\n let resf: Point2f = resolution.cast();\n\n let crop_window = BBox2::new(\n\n Point2::new(\n\n (resf.x * crop_window.pmin.x).ceil() as isize,\n\n (resf.y * crop_window.pmin.y).ceil() as isize\n\n ),\n\n Point2::new(\n\n (resf.x * crop_window.pmax.x).ceil() as isize,\n\n (resf.y * crop_window.pmax.y).ceil() as isize\n\n )\n\n );\n", "file_path": "src/filming/film.rs", "rank": 67, "score": 89814.11035675927 }, { "content": "// helper function for whitted rendering's light computation\n\nfn calculate_lighting<S: Sampler>(\n\n mut ray: RayDifferential, \n\n scene: &Scene, \n\n sampler: &mut S, \n\n alloc: &Allocator, \n\n depth: usize\n\n) -> RGBSpectrumf {\n\n let mut ret = RGBSpectrumf::black();\n\n if depth > 5 { return ret; }\n\n if let Some(mut surinter) = scene.aggregate.intersect_ray(&mut ray.ray) {\n\n let pos = surinter.basic.pos;\n\n let norm = surinter.shading_norm;\n\n let wo = surinter.basic.wo;\n\n let dxy = surinter.compute_dxy(&ray);\n\n if let Some(primitive) = surinter.primitive_hit {\n\n if primitive.is_emissive() {\n\n ret += primitive.evaluate_ray(&ray);\n\n // let rad = primitive.evaluate_ray(&ray);\n\n // print!(\"emission found: {:?}\", rad);\n\n }\n", "file_path": "src/renderer/whitted.rs", "rank": 68, "score": 89568.4392663423 }, { "content": "// helper function for whitted rendering's light computation\n\nfn calculate_lighting<S: Sampler>(\n\n mut ray: RayDifferential, \n\n scene: &Scene, \n\n sampler: &mut S, \n\n alloc: &Allocator,\n\n depth: usize,\n\n max_depth: usize,\n\n min_depth: usize,\n\n rr_threshold: Float\n\n) -> RGBSpectrumf {\n\n let mut ret = RGBSpectrumf::black();\n\n if depth > max_depth { return ret; }\n\n let mut beta = RGBSpectrumf::new(1. as Float, 1. as Float, 1. as Float);\n\n let mut specular_bounce = false;\n\n let mut bounces = 0;\n\n loop {\n\n if let Some(mut si) = scene.aggregate.intersect_ray(&mut ray.ray) {\n\n if bounces == 0 || specular_bounce {\n\n let term = si.le(-ray.ray.direction());\n\n if !term.valid() {\n", "file_path": "src/renderer/pt.rs", "rank": 69, "score": 89568.4392663423 }, { "content": "#[inline]\n\nfn approx_lerp<TM, TP>(pix0: TP, pix1: &TP, t: Float) -> TP\n\n where TP: Pixel<Subpixel=TM>,\n\n TM: BaseNum + image::Primitive + Copy,\n\n{\n\n pix0.map2(pix1, |a, b| {\n\n let a: Float = <Float as NumCast>::from(a).unwrap();\n\n let b: Float = <Float as NumCast>::from(b).unwrap();\n\n <TM as NumCast>::from(a*(1.0 as Float - t) + b * t).unwrap()\n\n })\n\n}\n\n\n", "file_path": "src/texturing/textures/image.rs", "rank": 70, "score": 87630.04405671227 }, { "content": "/// The sampling interface.\n\n/// Samplers should return sampled values in $[0, 1)$.\n\n///\n\n/// Additional information are provided through the interface\n\n/// (like pixel location, dimension, samples per pixel etc.)\n\n/// such that implementations might provide better-quality.\n\npub trait Sampler: Clone + Sync + Send\n\n{\n\n /// Start sampling a new pixel\n\n fn start_pixel(&mut self, p: Point2<u32>);\n\n\n\n /// get next 1-dimensional sample\n\n fn next(&mut self) -> Float;\n\n\n\n /// get next 2-dimensional sample\n\n #[inline]\n\n fn next_2d(&mut self) -> Point2f {\n\n Point2f::new(self.next(), self.next())\n\n }\n\n\n\n /// convinient method to sample a camera\n\n #[inline]\n\n fn get_camera_sample(&mut self, idx: Point2<u32>) -> filming::SampleInfo {\n\n filming::SampleInfo{\n\n pfilm: self.next_2d() + idx.cast().to_vec(),\n\n plens: self.next_2d(),\n", "file_path": "src/sample/mod.rs", "rank": 71, "score": 86760.17139159954 }, { "content": "fn generate_light_subpath<'a, S: Sampler>(\n\n scene: &'a Scene, sampler: &mut S, \n\n allocator: &mut Allocator<'a>, path: &mut [Node<'a>]\n\n) -> usize {\n\n if path.len() == 0 { return 0; }\n\n let (light_index, light_pdf, _) = scene.light_distribution.sample_discrete(sampler.next());\n\n let light = scene.get_light(light_index);\n\n // TODO\n\n let pathinfo = light.generate_path(sampler.get_light_sample());\n\n if pathinfo.pdfpos == 0. as Float || pathinfo.pdfdir == 0. as Float || pathinfo.radiance.is_black() {\n\n return 0;\n\n }\n\n path[0] = Node::Light{\n\n light: light,\n\n info: InteractInfo{\n\n // TODO: double check\n\n pos: pathinfo.ray.origin(),\n\n wo: pathinfo.ray.direction(),\n\n norm: Vector3f::zero(),\n\n },\n\n beta: pathinfo.radiance,\n\n pdf: pathinfo.pdfpos * light_pdf,\n\n pdf_reversed: 1. as Float,\n\n };\n\n let beta = pathinfo.radiance * pathinfo.ray.direction().dot(pathinfo.normal).abs() / (light_pdf * pathinfo.pdfpos * pathinfo.pdfdir);\n\n // TODO: handle infinite lights\n\n random_walk(scene, pathinfo.ray.into(), sampler, allocator, beta, pathinfo.pdfdir, TransportMode::Importance, path) + 1\n\n}\n\n\n", "file_path": "src/renderer/bpt/mod.rs", "rank": 72, "score": 83921.8167785775 }, { "content": "/// Load an `.obj` file into a vector\n\npub fn load_obj(path: &Path, transform: Matrix4f) -> Result<Vec<ComponentPointer>, tobj::LoadError> {\n\n let parent_path = path.parent().unwrap_or(\"\".as_ref());\n\n let (models, mtls) = tobj::load_obj(path)?;\n\n let mut texturess = HashMap::new();\n\n let mut bumps = HashMap::new();\n\n let mut materials: Vec<Arc<Material>> = Vec::with_capacity(mtls.len()+1);\n\n for mtl in mtls {\n\n let diffuse_texture_path = parent_path.join(mtl.diffuse_texture.clone());\n\n let diffuse = RGBImageTexture::new_as_arc(\n\n ImageInfo{\n\n name: diffuse_texture_path.into_os_string().into_string().unwrap_or_default(),\n\n trilinear: false,\n\n max_aniso: 16. as Float,\n\n wrapping: ImageWrapMode::Repeat,\n\n gamma: false,\n\n scale: 1. as Float,\n\n },\n\n UVMapping{\n\n scaling: Vector2f::new(1. as Float, 1. as Float),\n\n shifting: Vector2f::zero(),\n", "file_path": "src/component/mod.rs", "rank": 73, "score": 81070.39595076916 }, { "content": "#[derive(Copy, Clone)]\n\nstruct Bucket {\n\n count: usize,\n\n cost: Float,\n\n bound: BBox3f,\n\n initialized: bool,\n\n}\n\n\n", "file_path": "src/component/bvh.rs", "rank": 74, "score": 69480.29100677319 }, { "content": "#[derive(Copy, Clone)]\n\nstruct LinearNode {\n\n bound: BBox3f,\n\n /// `len==0` means leaf node, otherwise means length\n\n /// in the component array of this node\n\n len: usize,\n\n /// if leaf, means offset into the components array\n\n /// if interior, means offset to the second child\n\n offset: usize,\n\n split_axis: usize,\n\n}\n\n\n\nuse std::fmt::{Debug, Formatter, Result as FmtResult};\n\nimpl Debug for LinearNode {\n\n fn fmt(&self, fmt: &mut Formatter) -> FmtResult {\n\n let t = if self.len == 0 {\n\n \"Interior\"\n\n } else {\n\n \"Leaf\"\n\n };\n\n writeln!(\n\n fmt, \n\n \"\\n{}{{\\n\\tbound:{:?},\\n\\tlen:{}, offset:{}, split:{}\\n}}\", \n\n t, self.bound, self.len, self.offset, self.split_axis\n\n )\n\n }\n\n}\n\n\n", "file_path": "src/component/bvh.rs", "rank": 75, "score": 68030.38058281256 }, { "content": "#[derive(Copy, Clone)]\n\nstruct ComponentInfo {\n\n bound: BBox3f,\n\n centroid: Point3f,\n\n cost: Float,\n\n idx: usize,\n\n}\n\n\n\nimpl ComponentInfo {\n\n fn new(components: &[ComponentPointer]) -> Vec<ComponentInfo> {\n\n let mut ret = Vec::with_capacity(components.len());\n\n for (idx, c) in components.iter().enumerate() {\n\n let bound = c.bbox_parent();\n\n let centroid = (bound.pmin + bound.pmax.to_vec())/2.0 as Float;\n\n ret.push(ComponentInfo{\n\n bound, centroid, idx,\n\n cost: c.intersection_cost(),\n\n });\n\n }\n\n ret\n\n }\n", "file_path": "src/component/bvh.rs", "rank": 76, "score": 68030.38058281256 }, { "content": "#[derive(Serialize, Deserialize, Clone)]\n\nstruct Named<T> {\n\n name: String,\n\n value: Option<T>,\n\n}\n\n\n\nimpl<T> Named<T> {\n\n fn find_or_insert_with<'a, V, F>(\n\n &self, map: &'a mut HashMap<String, V>, f: F\n\n ) -> Option<&'a V>\n\n where V: 'a,\n\n F: FnOnce(&T) -> Option<V>\n\n {\n\n if let Some(ref t) = self.value {\n\n if let Some(v) = f(t) {\n\n map.insert(self.name.clone(), v);\n\n }\n\n }\n\n map.get(&self.name)\n\n }\n\n}\n\n\n", "file_path": "examples/arencli.rs", "rank": 77, "score": 67564.44043107612 }, { "content": "#[derive(Clone, Copy)]\n\nstruct BuildNode<'a> {\n\n bound: BBox3f,\n\n childs: Option<(&'a BuildNode<'a>, &'a BuildNode<'a>, usize)>,\n\n /// if leaf, means offset into the components array\n\n /// if interior, means offset to the second child in node array\n\n offset: usize,\n\n /// if leaf, means length into the components array\n\n /// if interior, means length in node array\n\n len: usize,\n\n}\n\n\n\nimpl<'a> Default for BuildNode<'a> {\n\n fn default() -> Self {\n\n unsafe {\n\n mem::uninitialized()\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> BuildNode<'a> {\n", "file_path": "src/component/bvh.rs", "rank": 78, "score": 66114.53000711548 }, { "content": "fn main() {\n\n let m = Matrix4f::from_translation(\n\n Vector3f::new(0.0 as Float, -5.0 as Float, 15.0 as Float)\n\n ) * Matrix4f::from_angle_y(Rad(float::frac_pi_2()))\n\n * Matrix4f::from_scale(1.0 as Float);\n\n println!(\"{:?}\", m);\n\n\n\n let mut camera = PerspecCam::new(\n\n Matrix4f::identity(),\n\n BBox2f::new(\n\n Point2f::new(-1.0 as Float, -0.75 as Float), \n\n Point2f::new(1.0 as Float, 1.0 as Float)\n\n // Point2f::new(-0.2 as Float, -0.2 as Float), \n\n // Point2f::new(0.2 as Float, 0.2 as Float)\n\n ),\n\n 0.1 as Float, \n\n 1000.0 as Float, \n\n // float::pi()*2.0 as Float / 3.0 as Float, \n\n float::frac_pi_2(),\n\n None, \n", "file_path": "examples/pt.rs", "rank": 79, "score": 63703.47824822433 }, { "content": "fn main() {\n\n env_logger::init().unwrap();\n\n let matches = App::new(\"The Arendur CLI\"\n\n ).version(\"0.1\").author(\"Luxko<luxko@qq.com>\")\n\n .arg(\n\n Arg::with_name(\"INPUT\")\n\n .help(\"The scene description input file\")\n\n .required(true)\n\n ).arg(\n\n Arg::with_name(\"thread\")\n\n .help(\"The number of working t\")\n\n .short(\"t\")\n\n .long(\"thread\")\n\n .value_name(\"NUM\")\n\n .takes_value(true)\n\n ).get_matches();\n\n\n\n let input_filename = matches.value_of(\"INPUT\").unwrap();\n\n if let Some(threads) = matches.value_of(\"thread\") {\n\n let threads = usize::from_str(threads.as_ref()).expect(\"Invalid input: thread needs to be a number\");\n", "file_path": "examples/arencli.rs", "rank": 80, "score": 63703.47824822433 }, { "content": "#[inline]\n\nfn partition(\n\n buckets: &mut [Bucket], component: &ComponentInfo, \n\n centroid_lb: Point3f, diagonal: Vector3f, axis: usize\n\n) {\n\n let dif = component.centroid - centroid_lb;\n\n let mut idx = (dif[axis]/diagonal[axis]*buckets.len() as Float) as usize;\n\n if idx == buckets.len() { idx -= 1; }\n\n let bucket = unsafe { buckets.get_unchecked_mut(idx)};\n\n if !bucket.initialized {\n\n bucket.count = 1;\n\n bucket.cost = component.cost;\n\n bucket.bound = component.bound;\n\n bucket.initialized = true;\n\n } else {\n\n bucket.count += 1;\n\n bucket.cost += component.cost;\n\n bucket.bound = bucket.bound.union(&component.bound);\n\n }\n\n}\n\n\n", "file_path": "src/component/bvh.rs", "rank": 81, "score": 62164.0251428702 }, { "content": "/// A renderer\n\npub trait Renderer {\n\n /// render a scene\n\n fn render(&mut self, scene: &Scene);\n\n}\n\n\n\npub mod scene;\n\npub mod whitted;\n\n// pub mod bpt;\n\npub mod pt;\n\npub mod prelude {\n\n pub use super::Renderer;\n\n pub use super::scene::Scene;\n\n pub use super::whitted::WhittedRenderer;\n\n // pub use super::bpt::BPTRenderer;\n\n pub use super::pt::PTRenderer;\n\n}\n", "file_path": "src/renderer/mod.rs", "rank": 82, "score": 61701.18452267429 }, { "content": "/// A bidirectional distribution function\n\npub trait Bxdf {\n\n /// returns the type of the bxdf\n\n fn kind(&self) -> BxdfType;\n\n\n\n /// check if the type matches\n\n #[inline]\n\n fn is(&self, t: BxdfType) -> bool {\n\n self.kind().intersects(t)\n\n }\n\n\n\n /// evaluate the function given two normalized directions\n\n fn evaluate(&self, wo: Vector3f, wi: Vector3f) -> RGBSpectrumf;\n\n\n\n /// Given an outgoing direction `wo`, and a uniform sample\n\n /// `u` from $[0,1)^2$, sample an incoming direction `wi`,\n\n /// and returns it with function value evaluated as `f(wo, wi)`,\n\n /// as well as the pdf associated with the incoming direction,\n\n /// as well as the type of the scattering event.\n\n ///\n\n /// The default implementation samples the incoming direction\n", "file_path": "src/bxdf/mod.rs", "rank": 83, "score": 61701.18452267429 }, { "content": "/// A fresnel interface\n\npub trait Fresnel {\n\n /// given an incoming direction, specify the reflectance factor\n\n fn evaluate(&self, cos_theta_i: Float) -> RGBSpectrumf;\n\n}\n\n\n\n/// A fresnel conductor\n\n#[derive(Copy, Clone, Debug)]\n\npub struct Conductor {\n\n pub etai: RGBSpectrumf,\n\n pub etat: RGBSpectrumf,\n\n pub k: RGBSpectrumf,\n\n}\n\n\n\nimpl Conductor {\n\n /// construction\n\n #[inline]\n\n pub fn new(etai: RGBSpectrumf, etat: RGBSpectrumf, k: RGBSpectrumf) -> Conductor {\n\n Conductor {\n\n etai: etai, etat: etat, k: k\n\n }\n", "file_path": "src/bxdf/fresnel.rs", "rank": 84, "score": 61701.18452267429 }, { "content": "fn sah_midpoint(\n\n components: &[ComponentInfo], split_axis: usize, cb: BBox3f, inv_area: Float\n\n) -> Point3f {\n\n const BUCKETS: usize = 32;\n\n let mut buckets = [Bucket::default(); BUCKETS];\n\n let diagonal = cb.diagonal();\n\n for component in components.iter() {\n\n partition(\n\n &mut buckets, component, \n\n cb.pmin, diagonal, split_axis\n\n );\n\n }\n\n let mut accum = [Default::default(); BUCKETS];\n\n accum[0] = buckets[0];\n\n let mut accum_rev = [Default::default(); BUCKETS];\n\n accum_rev[BUCKETS-1] = buckets[BUCKETS-1];\n\n for i in 1..BUCKETS-1 {\n\n accum[i] = accum[i-1].union(&buckets[i]);\n\n accum_rev[BUCKETS-1-i] = accum_rev[BUCKETS-i].union(&buckets[BUCKETS-i]);\n\n }\n", "file_path": "src/component/bvh.rs", "rank": 85, "score": 60731.93385883656 }, { "content": "/// 2D texture mapping interface\n\npub trait Mapping2D {\n\n /// given a surface interaction and its dxyinfo, compute the texture info\n\n fn map(&self, si: &SurfaceInteraction, dxy: &DxyInfo) -> TexInfo2D;\n\n}\n\n\n", "file_path": "src/texturing/mod.rs", "rank": 86, "score": 60371.83504730063 }, { "content": "/// A microfacet distribution description\n\npub trait MicrofacetDistribution {\n\n /// Given the macro surface normal `wh`, returns the differential\n\n /// area of microfacets from that angle\n\n ///\n\n /// A physically plausible microfacet distribution $D$ should be\n\n /// normalized, s.t. $\\integral_{H^2(n)}D(\\omega_h)cos\\theta_h d\\omega_h=1$\n\n fn distribution(&self, wh: Vector3f) -> Float;\n\n\n\n /// Given a direction `w`, return masked prjected surface area\n\n /// per visible projected surface area along that direction,\n\n /// assuming this value is independent from `wh`.\n\n fn lambda(&self, w: Vector3f) -> Float;\n\n\n\n /// fraction of facets visible from `w`.\n\n /// The masking-shadowing function $G_l$\n\n /// This interface assumes independence from `wh`\n\n #[inline]\n\n fn visible(&self, w: Vector3f) -> Float {\n\n 1. as Float / (1. as Float + self.lambda(w))\n\n }\n", "file_path": "src/bxdf/microfacet.rs", "rank": 87, "score": 60371.83504730063 }, { "content": "/// 3D texture mapping interface\n\npub trait Mapping3D {\n\n /// given a surface interaction and its dxyinfo, compute the texture info\n\n fn map(&self, si: &SurfaceInteraction, dxy: &DxyInfo) -> TexInfo3D;\n\n}\n\n\n", "file_path": "src/texturing/mod.rs", "rank": 88, "score": 60371.83504730063 }, { "content": "#[inline]\n\nfn handle_tails<'a>(\n\n alloc: &mut Allocator<'a>, components: &mut [ComponentInfo], offset: usize,\n\n node_count: &mut usize, ordered: &mut [ComponentInfo], strategy: BVHStrategy,\n\n i: usize, split_axis: usize, ret: &mut BuildNode<'a>, bound: BBox3f\n\n) {\n\n if i == 0 || i == components.len() {\n\n ret.to_leaf(offset, components.len(), bound);\n\n } else {\n\n let child0 = recursive_build(\n\n alloc, &mut components[0..i], offset,\n\n node_count, &mut ordered[0..i], strategy\n\n );\n\n let child1 = recursive_build(\n\n alloc, &mut components[i..], offset+i,\n\n node_count, &mut ordered[i..], strategy\n\n );\n\n ret.to_interior(\n\n child0, child1, split_axis\n\n );\n\n }\n\n}\n", "file_path": "src/component/bvh.rs", "rank": 89, "score": 59236.04587438982 }, { "content": "fn sort_mid<'a>(\n\n alloc: &mut Allocator<'a>, components: &mut [ComponentInfo], offset: usize,\n\n node_count: &mut usize, ordered: &mut [ComponentInfo], strategy: BVHStrategy,\n\n mid: Float, split_axis: usize, ret: &mut BuildNode<'a>, bound: BBox3f\n\n) {\n\n assert!(components.len()==ordered.len());\n\n let mut j = ordered.len();\n\n let mut i = 0;\n\n unsafe {\n\n for component in components.iter() {\n\n if component.centroid[split_axis] < mid {\n\n *ordered.get_unchecked_mut(i) = *component;\n\n i += 1;\n\n } else {\n\n j -= 1;\n\n *ordered.get_unchecked_mut(j) = *component;\n\n }\n\n }\n\n assert!(j == i);\n\n }\n\n components.copy_from_slice(ordered);\n\n handle_tails(\n\n alloc, components, offset, node_count, ordered,\n\n strategy, i, split_axis, ret, bound\n\n );\n\n}\n\n\n", "file_path": "src/component/bvh.rs", "rank": 90, "score": 59231.508590434954 }, { "content": "fn recursive_build<'a>(\n\n alloc: &mut Allocator<'a>, components: &mut [ComponentInfo], offset: usize,\n\n node_count: &mut usize, ordered: &mut [ComponentInfo], strategy: BVHStrategy\n\n) -> &'a mut BuildNode<'a> {\n\n assert!(components.len()==ordered.len());\n\n assert!(components.len()!=0);\n\n *node_count += 1;\n\n let mut ret: &'a mut BuildNode<'a> = alloc.alloc_default();\n\n if components.len() == 1 { unsafe {\n\n ret.to_leaf(offset, 1, components.get_unchecked(0).bound);\n\n *ordered.get_unchecked_mut(0) = *components.get_unchecked(0);\n\n }} else {\n\n let (bound, centroid_bound) = {\n\n let mut b = unsafe {components.get_unchecked(0).bound};\n\n let mut cb = unsafe {\n\n BBox3f::new(\n\n components.get_unchecked(0).centroid,\n\n components.get_unchecked(0).centroid\n\n )\n\n };\n", "file_path": "src/component/bvh.rs", "rank": 91, "score": 59231.508590434954 }, { "content": "fn cal_mis_weight(\n\n scene: &Scene, cam_nodes: &[Node],\n\n light_nodes: &[Node]\n\n) -> Float {\n\n let t = cam_nodes.len() as usize;\n\n let s = light_nodes.len() as usize;\n\n if s + t == 2 {return 1. as Float; }\n\n let mut sum_ri = 0. as Float;\n\n let remap0 = |f| {\n\n if f == 0. as Float {\n\n 1. as Float\n\n } else {\n\n f\n\n }\n\n };\n\n let mut ri = 1. as Float;\n\n for i in 1..t {\n\n let pdfrev = cam_nodes[t-i-1].get_pdf_rev();\n\n let pdffwd = cam_nodes[t-i-1].get_pdf();\n\n ri *= remap0(pdfrev)/remap0(pdffwd);\n", "file_path": "src/renderer/bpt/mod.rs", "rank": 92, "score": 58177.14485259443 }, { "content": "/// A shape\n\npub trait Shape: Sync + Send\n\n{\n\n // /// returns basic info of this shape\n\n // fn info(&self) -> &ShapeInfo;\n\n // fn orientation_reversed(&self) -> bool;\n\n\n\n // fn reverse_orientation(&mut self, reverse: bool);\n\n \n\n /// returns bounding box of the shape in its local frame\n\n fn bbox_local(&self) -> BBox3f;\n\n\n\n /// Tests for intersection.\n\n // /// - `ray` is in parent frame\n\n /// - if hit, return `t` as the parametric distance along the ray\n\n /// to the hitting point., and a `surface_interaction` for hitting\n\n /// information at the surface, in local frame.\n\n fn intersect_ray(&self, ray: &RawRay) -> Option<(Float, SurfaceInteraction)>;\n\n\n\n /// Tests if the interaction can occur. Implementation maybe faster\n\n /// than `self.intersect_ray`\n", "file_path": "src/shape/mod.rs", "rank": 93, "score": 56374.923197732016 }, { "content": "/// A renderable composable component.\n\npub trait Composable: Sync + Send {\n\n /// returns bounding box in parent frame.\n\n fn bbox_parent(&self) -> BBox3f;\n\n\n\n /// test for intersection. Note that its guarantees are from `Shape`'s:\n\n /// - `ray` is specified in parent frame,\n\n /// - if hit, returns surface interaction data in *parent* frame.\n\n /// - if hit, `ray`'s `tmax` would be updated to the hitting `t`.\n\n fn intersect_ray(&self, ray: &mut RawRay) -> Option<SurfaceInteraction>;\n\n\n\n /// test if an intersection can occur. Might be more efficient\n\n #[inline]\n\n fn can_intersect(&self, ray: &RawRay) -> bool {\n\n let mut ray = ray.clone();\n\n self.intersect_ray(&mut ray).is_some()\n\n }\n\n\n\n fn as_light(&self) -> &Light {\n\n unimplemented!();\n\n }\n", "file_path": "src/component/mod.rs", "rank": 94, "score": 56374.923197732016 }, { "content": "/// A camera!\n\npub trait Camera: Send + Sync {\n\n /// parent to view-space transform\n\n fn parent_to_view(&self) -> Matrix4f;\n\n\n\n /// view to parent\n\n fn view_to_parent(&self) -> Matrix4f {\n\n self.parent_to_view().inverse_transform().expect(\"matrix inversion failure\")\n\n }\n\n\n\n /// evaluate importance, given `posw` and `dirw` of a camera ray\n\n /// returns the importance and the raster position of the ray\n\n fn evaluate_importance(\n\n &self, posw: Point3f, dirw: Vector3f\n\n ) -> Option<(RGBSpectrumf, Point2f)>;\n\n\n\n /// Given a `posw` in the world with a uniform `sample` in $[0, 1)$,\n\n /// sample an incoming direction from the camera to that `pos`,\n\n /// returns the sampling result in a `ImportanceSample`.\n\n fn evaluate_importance_sampled(&self, posw: Point3f, sample: Point2f) -> (ImportanceSample, Point2f);\n\n\n", "file_path": "src/filming/mod.rs", "rank": 95, "score": 56374.923197732016 }, { "content": "/// The texture interface\n\npub trait Texture: Send + Sync {\n\n type Texel;\n\n\n\n /// Evaluate the texture given interaction info and partial\n\n /// differential info\n\n fn evaluate(&self, si: &SurfaceInteraction, dxy: &DxyInfo) -> Self::Texel;\n\n\n\n /// Mean value of the texture\n\n fn mean(&self) -> Self::Texel;\n\n}\n\n\n\nimpl<'a, T: 'a> Texture for &'a T\n\n where T: Texture\n\n{\n\n type Texel = <T as Texture>::Texel;\n\n\n\n #[inline]\n\n fn evaluate(&self, si: &SurfaceInteraction, dxy: &DxyInfo) -> Self::Texel {\n\n (*self).evaluate(si, dxy)\n\n }\n", "file_path": "src/texturing/mod.rs", "rank": 96, "score": 56374.923197732016 }, { "content": "fn connect<S: Sampler>(\n\n scene: &Scene, cam_nodes: &mut [Node],\n\n light_nodes: &mut [Node], camera: &Camera,\n\n sampler: &mut S, praster: &mut Point2f, \n\n mis_weight: &mut Float\n\n) -> RGBSpectrumf {\n\n let mut ret = RGBSpectrumf::black();\n\n let t = cam_nodes.len();\n\n let s = light_nodes.len();\n\n if t > 1 \n\n && s != 0 \n\n && cam_nodes.last().unwrap().is_light_node() {\n\n // invalid connection strategy\n\n return ret;\n\n }\n\n\n\n let mut sampled;\n\n if s == 0 {\n\n // no lights\n\n let pt = cam_nodes.last().unwrap();\n", "file_path": "src/renderer/bpt/mod.rs", "rank": 97, "score": 55350.81112137198 }, { "content": "fn random_walk<'a, S: Sampler>(\n\n scene: &'a Scene, mut ray_differential: RayDifferential,\n\n sampler: &mut S, allocator: &mut Allocator<'a>,\n\n mut beta: RGBSpectrumf, mut pdf: Float, mode: TransportMode,\n\n path: &mut [Node<'a>]\n\n) -> usize {\n\n if path.len() == 1 { return 0; }\n\n let mut pdfrev = 0. as Float;\n\n let mut bounces = 1usize;\n\n // let pathptr = path.as_mut_ptr();\n\n loop {\n\n // let (node, prev) = unsafe {\n\n // (pathptr.offset(bounces).as_mut().unwrap(),\n\n // pathptr.offset(bounces-1).as_mut().unwrap())\n\n // };\n\n // TODO: handle medium\n\n if let Some(mut si) = scene.aggregate.intersect_ray(&mut ray_differential.ray) {\n\n // TODO: handle infinite lights\n\n if let Some(primitive) = si.primitive_hit {\n\n let dxy = si.compute_dxy(&ray_differential);\n", "file_path": "src/renderer/bpt/mod.rs", "rank": 98, "score": 53023.06405922641 }, { "content": "fn generate_camera_subpath<'a, S: Sampler>(\n\n scene: &'a Scene, sampler: &mut S, \n\n allocator: &mut Allocator<'a>,\n\n camera: &'a Camera, pfilm: Point2f, path: &mut [Node<'a>]\n\n) -> usize {\n\n if path.len() == 0 { return 0; }\n\n let plens = sampler.next_2d();\n\n let sampleinfo = SampleInfo{\n\n pfilm: pfilm, plens: plens,\n\n };\n\n let mut ray_differential = camera.generate_path_differential(sampleinfo);\n\n ray_differential.scale_differentials(1.0 as Float / sampler.sample_per_pixel() as Float);\n\n // TODO: double check ray direction\n\n let (pdfpos, pdfdir) = camera.pdf(\n\n ray_differential.ray.origin(), ray_differential.ray.direction()\n\n );\n\n let beta = RGBSpectrumf::new(1. as Float, 1. as Float, 1. as Float);\n\n path[0] = Node::Camera{\n\n camera: camera,\n\n info: InteractInfo{\n", "file_path": "src/renderer/bpt/mod.rs", "rank": 99, "score": 51950.68282067358 } ]
Rust
src/game.rs
frellica/bevy_mine_sweeper
31bf44c00615a25f03e1dabc12c61a2e4d4c666b
use std::cmp; use bevy::{ diagnostic::{Diagnostics, FrameTimeDiagnosticsPlugin}, prelude::*, }; use crate::mine_core::{ BlockType, BlockStatus, MinePlayground, MineBlock, Position, ClickResult }; pub fn game_app(config: GameConfig) { App::build() .add_resource(WindowDescriptor { vsync: false, width: cmp::max(config.width * BLOCK_WIDTH, MIN_WIDTH) as f32, height: cmp::max(config.height * BLOCK_WIDTH + Y_MARGIN, MIN_HEIGHT) as f32, title: String::from("Mine Sweeper"), resizable: false, ..Default::default() }) .add_resource(config) .add_plugins(DefaultPlugins) .add_plugin(GamePlugin) .run(); } struct GamePlugin; impl Plugin for GamePlugin { fn build(&self, app: &mut AppBuilder) { app.init_resource::<ButtonMaterials>() .add_resource(CursorLocation(Vec2::new(0.0, 0.0))) .add_plugin(FrameTimeDiagnosticsPlugin) .add_resource(State::new(GameState::Prepare)) .add_startup_system(setup.system()) .add_system(fps_update.system()) .add_system(debug_text_update.system()) .add_system(restart_button_system.system()) .add_startup_system(new_map.system()) .add_system(handle_movement.system()) .add_system(handle_click.system()) .add_system(render_map.system()) .add_stage_after(stage::UPDATE, STAGE, StateStage::<GameState>::default()) .on_state_enter(STAGE, GameState::Prepare, init_map_render.system()) .on_state_enter(STAGE, GameState::Ready, new_map.system()); } } const BLOCK_WIDTH: usize = 24; const MIN_HEIGHT: usize = 160; const MIN_WIDTH: usize = 160; const Y_MARGIN: usize = 50; const SPRITE_SIZE: f32 = 48.0; const STAGE: &str = "game_state"; const NEW_GAME_TEXT: &str = "New Game"; const HIDDEN_INDEX: usize = 10; struct RefreshButton; struct DebugText; struct MapData { map_entity: Entity, } struct WindowOffset { x: f32, y: f32, } #[derive(Debug, Clone, Copy)] pub struct GameConfig { pub width: usize, pub height: usize, pub mine_count: usize, } #[derive(Debug, Clone, PartialEq)] enum GameState { Prepare, Ready, Running, Over, } #[derive(Default, Debug)] struct CursorLocation(Vec2); struct LastActionText(String); struct ButtonMaterials { normal: Handle<ColorMaterial>, hovered: Handle<ColorMaterial>, pressed: Handle<ColorMaterial>, } impl FromResources for ButtonMaterials { fn from_resources(resources: &Resources) -> Self { let mut materials = resources.get_mut::<Assets<ColorMaterial>>().unwrap(); ButtonMaterials { normal: materials.add(Color::rgb(0.15, 0.15, 0.15).into()), hovered: materials.add(Color::rgb(0.25, 0.25, 0.25).into()), pressed: materials.add(Color::rgb(0.35, 0.75, 0.35).into()), } } } impl MineBlock { fn get_sprite_index(&self) -> usize { match self.bstatus { BlockStatus::Flaged => 12, BlockStatus::QuestionMarked => 11, BlockStatus::Shown => { match self.btype { BlockType::Mine => 9, BlockType::Tip(val) => val, BlockType::Space => 0, } }, BlockStatus::Hidden => HIDDEN_INDEX, } } } struct FpsRefresh; fn setup( commands: &mut Commands, asset_server: Res<AssetServer>, button_materials: Res<ButtonMaterials>, windows: ResMut<Windows>, mut texture_atlases: ResMut<Assets<TextureAtlas>>, ) { let font = asset_server.load("fonts/pointfree.ttf"); let window = windows.get_primary().unwrap(); commands .spawn(CameraUiBundle::default()) .spawn(Camera2dBundle::default()); commands.spawn(TextBundle { style: Style { align_self: AlignSelf::FlexEnd, position_type: PositionType::Absolute, position: Rect { top: Val::Px(5.0), left: Val::Px(5.0), ..Default::default() }, ..Default::default() }, text: Text { value: "debug text here".to_string(), font: font.clone(), style: TextStyle { font_size: 18.0, color: Color::rgba(0.0, 0.5, 0.5, 0.5), alignment: TextAlignment::default(), }, }, ..Default::default() }).with(DebugText); commands .spawn(TextBundle { style: Style { align_self: AlignSelf::FlexEnd, position_type: PositionType::Absolute, position: Rect { bottom: Val::Px(5.0), right: Val::Px(5.0), ..Default::default() }, ..Default::default() }, text: Text { value: "-".to_string(), font: font.clone(), style: TextStyle { font_size: 20.0, color: Color::rgba(0.0, 0.5, 0.5, 0.5), alignment: TextAlignment::default(), }, }, ..Default::default() }) .with(FpsRefresh); commands.insert_resource(WindowOffset { x: window.width() as f32 / 2.0 - BLOCK_WIDTH as f32 / 2.0, y: window.height() as f32 / 2.0 - BLOCK_WIDTH as f32 / 2.0, }); commands .insert_resource(LastActionText(NEW_GAME_TEXT.to_string())) .spawn(ButtonBundle { style: Style { size: Size::new(Val::Px(100.0), Val::Px(25.0)), position_type: PositionType::Absolute, position: Rect { left: Val::Px((window.width() as f32) / 2.0 - 50.0), top: Val::Px(12.5), ..Default::default() }, justify_content: JustifyContent::Center, align_items: AlignItems::Center, ..Default::default() }, material: button_materials.normal.clone(), ..Default::default() }) .with_children(|parent| { parent.spawn(TextBundle { text: Text { value: "New Game".to_string(), font: asset_server.load("fonts/pointfree.ttf"), style: TextStyle { font_size: 20.0, color: Color::rgb(0.9, 0.9, 0.9), ..Default::default() }, }, ..Default::default() }).with(RefreshButton); }); let texture_handle = asset_server.load("textures/block.png"); let texture_atlas = TextureAtlas::from_grid(texture_handle, Vec2::new(SPRITE_SIZE, SPRITE_SIZE), 13, 1); let texture_atlas_handle = texture_atlases.add(texture_atlas); commands.insert_resource(texture_atlas_handle); } struct RenderBlock { pos: Position, } fn new_map( commands: &mut Commands, config: Res<GameConfig>, ) { commands .insert_resource(MinePlayground::init(&config.width, &config.height, &config.mine_count).unwrap()); commands.spawn((MinePlayground::init(&config.width, &config.height, &config.mine_count).unwrap(), )); commands.insert_resource(MapData { map_entity: commands.current_entity().unwrap(), }); } fn init_map_render( commands: &mut Commands, texture_atlases: Res<Assets<TextureAtlas>>, atlas_handle: Res<Handle<TextureAtlas>>, window_offset: Res<WindowOffset>, config: Res<GameConfig>, mut game_state: ResMut<State<GameState>>, ) { println!("111init_map_render run once"); for y in 0..config.height { for x in 0..config.width { let texture_atlas = texture_atlases.get_handle(atlas_handle.clone()); commands .spawn(SpriteSheetBundle { transform: Transform { translation: Vec3::new( (x * BLOCK_WIDTH) as f32 - window_offset.x, (y * BLOCK_WIDTH) as f32 - window_offset.y, 0.0 ), scale: Vec3::splat(0.5), ..Default::default() }, texture_atlas, sprite: TextureAtlasSprite::new(HIDDEN_INDEX as u32), ..Default::default() }) .with(RenderBlock { pos: Position { x, y } }); } } println!("{:?}", game_state.current()); game_state.set_next(GameState::Ready).unwrap(); } fn render_map ( query: Query< &MinePlayground, Changed<MinePlayground>, >, mut sprites: Query<(&mut TextureAtlasSprite, &RenderBlock)>, ) { for mp in query.iter() { println!("detect mp changed{:?}", mp.shown_count); for (mut sprite, rb) in sprites.iter_mut() { sprite.index = mp.map[rb.pos.y][rb.pos.x].get_sprite_index() as u32; } } } fn handle_movement( mut cursor_pos: ResMut<CursorLocation>, cursor_moved_events: Res<Events<CursorMoved>>, mut evr_cursor: Local<EventReader<CursorMoved>>, ) { for ev in evr_cursor.iter(&cursor_moved_events) { cursor_pos.0 = ev.position; } } fn handle_click( btns: Res<Input<MouseButton>>, cursor_pos: Res<CursorLocation>, config: Res<GameConfig>, mut mquery: Query<&mut MinePlayground>, map_data: Res<MapData>, mut text_query: Query<&mut Text, With<RefreshButton>>, mut last_action_text: ResMut<LastActionText>, mut game_state: ResMut<State<GameState>>, ) { if let GameState::Over = game_state.current() { return; } if btns.just_released(MouseButton::Left) { if let Some((x, y)) = get_block_index_by_cursor_pos(cursor_pos.0, *config) { println!("{:?}-{:?}", x, y); let mut mp: Mut<MinePlayground> = mquery.get_component_mut(map_data.map_entity).unwrap(); if let GameState::Ready = game_state.current() { if let BlockType::Mine = mp.map[y][x].btype { mp.fix(&x, &y); } } let click_result = mp.click(&x, &y); println!("{:?}", click_result); match click_result { ClickResult::Wasted => { let mut text = text_query.iter_mut().next().unwrap(); text.value = String::from("Game Over"); *last_action_text = LastActionText(String::from("Game Over")); game_state.set_next(GameState::Over).unwrap(); return; }, ClickResult::Win => { let mut text = text_query.iter_mut().next().unwrap(); text.value = String::from("Finished!"); *last_action_text = LastActionText(String::from("Finished!")); game_state.set_next(GameState::Over).unwrap(); return; } _ => {} } if let GameState::Ready = game_state.current() { game_state.set_next(GameState::Running).unwrap(); } } } if btns.just_released(MouseButton::Right) { if let Some((x, y)) = get_block_index_by_cursor_pos(cursor_pos.0, *config) { println!("{:?}-{:?}", x, y); if let GameState::Ready = game_state.current() { game_state.set_next(GameState::Running).unwrap(); } let mut mp: Mut<MinePlayground> = mquery.get_component_mut(map_data.map_entity).unwrap(); mp.right_click(&x, &y); } } } fn fps_update( diagnostics: Res<Diagnostics>, mut query: Query<&mut Text, With<FpsRefresh>>, ) { for mut text in query.iter_mut() { let mut fps = 0.0; if let Some(fps_diagnostic) = diagnostics.get(FrameTimeDiagnosticsPlugin::FPS) { if let Some(fps_avg) = fps_diagnostic.average() { fps = fps_avg; } } text.value = format!( "{:.1} fps", fps, ); } } fn debug_text_update( mut query: Query<&mut Text, With<DebugText>>, game_state: Res<State<GameState>>, ) { for mut text in query.iter_mut() { text.value = format!("state: {:?}", game_state.current()); } } fn restart_button_system( button_materials: Res<ButtonMaterials>, mut interaction_query: Query< (&Interaction, &mut Handle<ColorMaterial>, &Children), (Mutated<Interaction>, With<Button>), >, mut text_query: Query<&mut Text>, mut last_action_text: ResMut<LastActionText>, mut game_state: ResMut<State<GameState>>, ) { for (interaction, mut material, children) in interaction_query.iter_mut() { let mut text = text_query.get_mut(children[0]).unwrap(); match *interaction { Interaction::Clicked => { *material = button_materials.pressed.clone(); text.value = NEW_GAME_TEXT.to_string(); *last_action_text = LastActionText(NEW_GAME_TEXT.to_string()); if *game_state.current() != GameState::Prepare { game_state.set_next(GameState::Prepare).unwrap(); } } Interaction::Hovered => { *material = button_materials.hovered.clone(); text.value = NEW_GAME_TEXT.to_string(); } Interaction::None => { *material = button_materials.normal.clone(); text.value = (*last_action_text.0).to_string(); } } } } fn get_block_index_by_cursor_pos(pos: Vec2, config: GameConfig) -> Option<(usize, usize)> { let x = (pos.x / BLOCK_WIDTH as f32).floor() as usize; let y = (pos.y / BLOCK_WIDTH as f32).floor() as usize; if (0..config.height).contains(&y) && (0..config.width).contains(&x) { return Some((x, y)); } None }
use std::cmp; use bevy::{ diagnostic::{Diagnostics, FrameTimeDiagnosticsPlugin}, prelude::*, }; use crate::mine_core::{ BlockType, BlockStatus, MinePlayground, MineBlock, Position, ClickResult }; pub fn game_app(config: GameConfig) { App::build() .add_resource(WindowDescriptor { vsync: false, width: cmp::max(config.width * BLOCK_WIDTH, MIN_WIDTH) as f32, height: cmp::max(config.height * BLOCK_WIDTH + Y_MARGIN, MIN_HEIGHT) as f32, title: String::from("Mine Sweeper"), resizable: false, ..Default::default() }) .add_resource(config) .add_plugins(DefaultPlugins) .add_plugin(GamePlugin) .run(); } struct GamePlugin; impl Plugin for GamePlugin { fn build(&self, app: &mut AppBuilder) { app.init_resource::<ButtonMaterials>() .add_resource(CursorLocation(Vec2::new(0.0, 0.0))) .add_plugin(FrameTimeDiagnosticsPlugin) .add_resource(State::new(GameState::Prepare)) .add_startup_system(setup.system()) .add_system(fps_update.system()) .add_system(debug_text_update.system()) .add_system(restart_button_system.system()) .add_startup_system(new_map.system()) .add_system(handle_movement.system()) .add_system(handle_click.system()) .add_system(render_map.system()) .add_stage_after(stage::UPDATE, STAGE, StateStage::<GameState>::default()) .on_state_enter(STAGE, GameState::Prepare, init_map_render.system()) .on_state_enter(STAGE, GameState::Ready, new_map.system()); } } const BLOCK_WIDTH: usize = 24; const MIN_HEIGHT: usize = 160; const MIN_WIDTH: usize = 160; const Y_MARGIN: usize = 50; const SPRITE_SIZE: f32 = 48.0; const STAGE: &str = "game_state"; const NEW_GAME_TEXT: &str = "New Game"; const HIDDEN_INDEX: usize = 10; struct RefreshButton; struct DebugText; struct MapData { map_entity: Entity, } struct WindowOffset { x: f32, y: f32, } #[derive(Debug, Clone, Copy)] pub struct GameConfig { pub width: usize, pub height: usize, pub mine_count: usi
lexEnd, position_type: PositionType::Absolute, position: Rect { top: Val::Px(5.0), left: Val::Px(5.0), ..Default::default() }, ..Default::default() }, text: Text { value: "debug text here".to_string(), font: font.clone(), style: TextStyle { font_size: 18.0, color: Color::rgba(0.0, 0.5, 0.5, 0.5), alignment: TextAlignment::default(), }, }, ..Default::default() }).with(DebugText); commands .spawn(TextBundle { style: Style { align_self: AlignSelf::FlexEnd, position_type: PositionType::Absolute, position: Rect { bottom: Val::Px(5.0), right: Val::Px(5.0), ..Default::default() }, ..Default::default() }, text: Text { value: "-".to_string(), font: font.clone(), style: TextStyle { font_size: 20.0, color: Color::rgba(0.0, 0.5, 0.5, 0.5), alignment: TextAlignment::default(), }, }, ..Default::default() }) .with(FpsRefresh); commands.insert_resource(WindowOffset { x: window.width() as f32 / 2.0 - BLOCK_WIDTH as f32 / 2.0, y: window.height() as f32 / 2.0 - BLOCK_WIDTH as f32 / 2.0, }); commands .insert_resource(LastActionText(NEW_GAME_TEXT.to_string())) .spawn(ButtonBundle { style: Style { size: Size::new(Val::Px(100.0), Val::Px(25.0)), position_type: PositionType::Absolute, position: Rect { left: Val::Px((window.width() as f32) / 2.0 - 50.0), top: Val::Px(12.5), ..Default::default() }, justify_content: JustifyContent::Center, align_items: AlignItems::Center, ..Default::default() }, material: button_materials.normal.clone(), ..Default::default() }) .with_children(|parent| { parent.spawn(TextBundle { text: Text { value: "New Game".to_string(), font: asset_server.load("fonts/pointfree.ttf"), style: TextStyle { font_size: 20.0, color: Color::rgb(0.9, 0.9, 0.9), ..Default::default() }, }, ..Default::default() }).with(RefreshButton); }); let texture_handle = asset_server.load("textures/block.png"); let texture_atlas = TextureAtlas::from_grid(texture_handle, Vec2::new(SPRITE_SIZE, SPRITE_SIZE), 13, 1); let texture_atlas_handle = texture_atlases.add(texture_atlas); commands.insert_resource(texture_atlas_handle); } struct RenderBlock { pos: Position, } fn new_map( commands: &mut Commands, config: Res<GameConfig>, ) { commands .insert_resource(MinePlayground::init(&config.width, &config.height, &config.mine_count).unwrap()); commands.spawn((MinePlayground::init(&config.width, &config.height, &config.mine_count).unwrap(), )); commands.insert_resource(MapData { map_entity: commands.current_entity().unwrap(), }); } fn init_map_render( commands: &mut Commands, texture_atlases: Res<Assets<TextureAtlas>>, atlas_handle: Res<Handle<TextureAtlas>>, window_offset: Res<WindowOffset>, config: Res<GameConfig>, mut game_state: ResMut<State<GameState>>, ) { println!("111init_map_render run once"); for y in 0..config.height { for x in 0..config.width { let texture_atlas = texture_atlases.get_handle(atlas_handle.clone()); commands .spawn(SpriteSheetBundle { transform: Transform { translation: Vec3::new( (x * BLOCK_WIDTH) as f32 - window_offset.x, (y * BLOCK_WIDTH) as f32 - window_offset.y, 0.0 ), scale: Vec3::splat(0.5), ..Default::default() }, texture_atlas, sprite: TextureAtlasSprite::new(HIDDEN_INDEX as u32), ..Default::default() }) .with(RenderBlock { pos: Position { x, y } }); } } println!("{:?}", game_state.current()); game_state.set_next(GameState::Ready).unwrap(); } fn render_map ( query: Query< &MinePlayground, Changed<MinePlayground>, >, mut sprites: Query<(&mut TextureAtlasSprite, &RenderBlock)>, ) { for mp in query.iter() { println!("detect mp changed{:?}", mp.shown_count); for (mut sprite, rb) in sprites.iter_mut() { sprite.index = mp.map[rb.pos.y][rb.pos.x].get_sprite_index() as u32; } } } fn handle_movement( mut cursor_pos: ResMut<CursorLocation>, cursor_moved_events: Res<Events<CursorMoved>>, mut evr_cursor: Local<EventReader<CursorMoved>>, ) { for ev in evr_cursor.iter(&cursor_moved_events) { cursor_pos.0 = ev.position; } } fn handle_click( btns: Res<Input<MouseButton>>, cursor_pos: Res<CursorLocation>, config: Res<GameConfig>, mut mquery: Query<&mut MinePlayground>, map_data: Res<MapData>, mut text_query: Query<&mut Text, With<RefreshButton>>, mut last_action_text: ResMut<LastActionText>, mut game_state: ResMut<State<GameState>>, ) { if let GameState::Over = game_state.current() { return; } if btns.just_released(MouseButton::Left) { if let Some((x, y)) = get_block_index_by_cursor_pos(cursor_pos.0, *config) { println!("{:?}-{:?}", x, y); let mut mp: Mut<MinePlayground> = mquery.get_component_mut(map_data.map_entity).unwrap(); if let GameState::Ready = game_state.current() { if let BlockType::Mine = mp.map[y][x].btype { mp.fix(&x, &y); } } let click_result = mp.click(&x, &y); println!("{:?}", click_result); match click_result { ClickResult::Wasted => { let mut text = text_query.iter_mut().next().unwrap(); text.value = String::from("Game Over"); *last_action_text = LastActionText(String::from("Game Over")); game_state.set_next(GameState::Over).unwrap(); return; }, ClickResult::Win => { let mut text = text_query.iter_mut().next().unwrap(); text.value = String::from("Finished!"); *last_action_text = LastActionText(String::from("Finished!")); game_state.set_next(GameState::Over).unwrap(); return; } _ => {} } if let GameState::Ready = game_state.current() { game_state.set_next(GameState::Running).unwrap(); } } } if btns.just_released(MouseButton::Right) { if let Some((x, y)) = get_block_index_by_cursor_pos(cursor_pos.0, *config) { println!("{:?}-{:?}", x, y); if let GameState::Ready = game_state.current() { game_state.set_next(GameState::Running).unwrap(); } let mut mp: Mut<MinePlayground> = mquery.get_component_mut(map_data.map_entity).unwrap(); mp.right_click(&x, &y); } } } fn fps_update( diagnostics: Res<Diagnostics>, mut query: Query<&mut Text, With<FpsRefresh>>, ) { for mut text in query.iter_mut() { let mut fps = 0.0; if let Some(fps_diagnostic) = diagnostics.get(FrameTimeDiagnosticsPlugin::FPS) { if let Some(fps_avg) = fps_diagnostic.average() { fps = fps_avg; } } text.value = format!( "{:.1} fps", fps, ); } } fn debug_text_update( mut query: Query<&mut Text, With<DebugText>>, game_state: Res<State<GameState>>, ) { for mut text in query.iter_mut() { text.value = format!("state: {:?}", game_state.current()); } } fn restart_button_system( button_materials: Res<ButtonMaterials>, mut interaction_query: Query< (&Interaction, &mut Handle<ColorMaterial>, &Children), (Mutated<Interaction>, With<Button>), >, mut text_query: Query<&mut Text>, mut last_action_text: ResMut<LastActionText>, mut game_state: ResMut<State<GameState>>, ) { for (interaction, mut material, children) in interaction_query.iter_mut() { let mut text = text_query.get_mut(children[0]).unwrap(); match *interaction { Interaction::Clicked => { *material = button_materials.pressed.clone(); text.value = NEW_GAME_TEXT.to_string(); *last_action_text = LastActionText(NEW_GAME_TEXT.to_string()); if *game_state.current() != GameState::Prepare { game_state.set_next(GameState::Prepare).unwrap(); } } Interaction::Hovered => { *material = button_materials.hovered.clone(); text.value = NEW_GAME_TEXT.to_string(); } Interaction::None => { *material = button_materials.normal.clone(); text.value = (*last_action_text.0).to_string(); } } } } fn get_block_index_by_cursor_pos(pos: Vec2, config: GameConfig) -> Option<(usize, usize)> { let x = (pos.x / BLOCK_WIDTH as f32).floor() as usize; let y = (pos.y / BLOCK_WIDTH as f32).floor() as usize; if (0..config.height).contains(&y) && (0..config.width).contains(&x) { return Some((x, y)); } None }
ze, } #[derive(Debug, Clone, PartialEq)] enum GameState { Prepare, Ready, Running, Over, } #[derive(Default, Debug)] struct CursorLocation(Vec2); struct LastActionText(String); struct ButtonMaterials { normal: Handle<ColorMaterial>, hovered: Handle<ColorMaterial>, pressed: Handle<ColorMaterial>, } impl FromResources for ButtonMaterials { fn from_resources(resources: &Resources) -> Self { let mut materials = resources.get_mut::<Assets<ColorMaterial>>().unwrap(); ButtonMaterials { normal: materials.add(Color::rgb(0.15, 0.15, 0.15).into()), hovered: materials.add(Color::rgb(0.25, 0.25, 0.25).into()), pressed: materials.add(Color::rgb(0.35, 0.75, 0.35).into()), } } } impl MineBlock { fn get_sprite_index(&self) -> usize { match self.bstatus { BlockStatus::Flaged => 12, BlockStatus::QuestionMarked => 11, BlockStatus::Shown => { match self.btype { BlockType::Mine => 9, BlockType::Tip(val) => val, BlockType::Space => 0, } }, BlockStatus::Hidden => HIDDEN_INDEX, } } } struct FpsRefresh; fn setup( commands: &mut Commands, asset_server: Res<AssetServer>, button_materials: Res<ButtonMaterials>, windows: ResMut<Windows>, mut texture_atlases: ResMut<Assets<TextureAtlas>>, ) { let font = asset_server.load("fonts/pointfree.ttf"); let window = windows.get_primary().unwrap(); commands .spawn(CameraUiBundle::default()) .spawn(Camera2dBundle::default()); commands.spawn(TextBundle { style: Style { align_self: AlignSelf::F
random
[ { "content": "fn get_surroundings(&x: &usize, &y: &usize, &max_width: &usize, &max_height: &usize) -> Vec<(usize, usize)> {\n\n let max_x = max_width - 1;\n\n let max_y = max_height - 1;\n\n let mut r = vec![];\n\n if x > 0 { r.push((x - 1, y)); }\n\n if x < max_x { r.push((x + 1, y)); }\n\n if y > 0 {\n\n r.push((x, y - 1));\n\n if x > 0 { r.push((x - 1, y - 1)); }\n\n if x < max_x { r.push((x + 1, y - 1)); }\n\n }\n\n if y < max_y {\n\n r.push((x, y + 1));\n\n if x > 0 { r.push((x - 1, y + 1)); }\n\n if x < max_x { r.push((x + 1, y + 1)); }\n\n }\n\n r\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/mine_core.rs", "rank": 2, "score": 65947.36286248115 }, { "content": "fn main() {\n\n println!(\"Hello, minesweeper!\");\n\n let args: Vec<String> = env::args().collect();\n\n let config_map: Vec<(usize, usize, usize)> = vec![(8, 8, 10), (16, 16, 40), (30, 16, 99)];\n\n let (width, height, mine_count) = match args.len() {\n\n 1 => config_map[0],\n\n 3 => config_map[args[2].parse().unwrap_or(0)],\n\n _ => {\n\n panic!(\"usage: ./minesweeper --level NUM\")\n\n }\n\n };\n\n println!(\"{:?}-{:?}-{:?}\", width, height, mine_count);\n\n game::game_app(game::GameConfig { width, height, mine_count });\n\n}\n", "file_path": "src/main.rs", "rank": 22, "score": 31224.3389452142 }, { "content": "# bevy_mine_sweeper\n\nMine sweeper game clone with rust and bevy engine\n\n\n\n![image](https://user-images.githubusercontent.com/1101456/109294297-f6a67200-7867-11eb-80ba-dbe06aff9cd8.png)\n", "file_path": "README.md", "rank": 23, "score": 21766.01774274986 }, { "content": "use rand::seq::SliceRandom;\n\nstatic SIZE_RANGE: std::ops::Range<usize> = 5..200;\n\nstatic MINE_COUNT_RANGE: std::ops::Range<usize> = 1..100;\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum BlockType {\n\n Mine,\n\n Space,\n\n Tip(usize),\n\n}\n\n#[derive(Debug, PartialEq)]\n\npub enum BlockStatus {\n\n Shown,\n\n Hidden,\n\n QuestionMarked,\n\n Flaged,\n\n}\n\n#[derive(Debug, Copy, Clone)]\n\npub struct Position {\n\n pub x: usize,\n", "file_path": "src/mine_core.rs", "rank": 36, "score": 8.256360568867718 }, { "content": " Self::Tip(val @ 2..=8) => Self::Tip(val - 1),\n\n Self::Mine => Self::Mine,\n\n _ => Self::Space,\n\n }\n\n }\n\n}\n\nimpl MineBlock {\n\n fn add_tip(&mut self) {\n\n self.btype.increase();\n\n }\n\n fn remove_tip(&mut self) {\n\n self.btype.decrease();\n\n }\n\n}\n\n\n\nimpl MinePlayground {\n\n pub fn init(&width: &usize, &height: &usize, &mine_count: &usize) -> Result<MinePlayground, String> {\n\n if !SIZE_RANGE.contains(&width) || !SIZE_RANGE.contains(&height) || !MINE_COUNT_RANGE.contains(&mine_count) {\n\n return Err(String::from(\"Parameters not in specific range!\"));\n\n }\n", "file_path": "src/mine_core.rs", "rank": 37, "score": 8.226042344102963 }, { "content": " BlockStatus::Flaged => { block.bstatus = BlockStatus::QuestionMarked; }\n\n BlockStatus::QuestionMarked => { block.bstatus = BlockStatus::Hidden; }\n\n _ => {}\n\n }\n\n }\n\n pub fn fix(&mut self, x: &usize, y: &usize) {\n\n println!(\"fixing!-{:?}-{:?}\", x, y);\n\n self.map[*y][*x].btype = BlockType::Space;\n\n let surroundings = get_surroundings(x, y, &self.width, &self.height);\n\n for (cur_x, cur_y) in surroundings.iter() {\n\n if let BlockType::Mine = self.map[*cur_y][*cur_x].btype {\n\n self.map[*y][*x].add_tip();\n\n }\n\n self.map[*cur_y][*cur_x].remove_tip();\n\n }\n\n for new_y in 0..self.height {\n\n for new_x in 0..self.width {\n\n if new_x == *x && new_y == *y {\n\n return;\n\n }\n", "file_path": "src/mine_core.rs", "rank": 38, "score": 7.917014039320369 }, { "content": " pub y: usize,\n\n}\n\n#[derive(Debug)]\n\npub struct MineBlock {\n\n pub btype: BlockType,\n\n pub bstatus: BlockStatus,\n\n pub pos: Position,\n\n}\n\npub struct MinePlayground {\n\n pub shown_count: usize,\n\n safety_block_count: usize,\n\n width: usize,\n\n height: usize,\n\n pub map: Vec<Vec<MineBlock>>,\n\n}\n\n#[derive(Debug)]\n\npub enum ClickResult {\n\n Wasted,\n\n NothingHappened,\n\n Win,\n", "file_path": "src/mine_core.rs", "rank": 39, "score": 7.800226846852148 }, { "content": " }\n\n pub fn click(&mut self, x: &usize, y: &usize) -> ClickResult {\n\n let mut block = &mut self.map[*y][*x];\n\n if let BlockStatus::Hidden = block.bstatus {\n\n match block.btype {\n\n BlockType::Mine => {\n\n // game over\n\n for y in 0..self.height {\n\n for x in 0..self.width {\n\n self.map[y][x].bstatus = BlockStatus::Shown;\n\n }\n\n }\n\n return ClickResult::Wasted;\n\n },\n\n BlockType::Tip(_) => {\n\n block.bstatus = BlockStatus::Shown;\n\n self.shown_count += 1;\n\n },\n\n BlockType::Space => {\n\n block.bstatus = BlockStatus::Shown;\n", "file_path": "src/mine_core.rs", "rank": 40, "score": 7.217320367398827 }, { "content": " let surroundings = get_surroundings(x, y, &self.width, &self.height);\n\n self.shown_count += 1;\n\n for (cur_x, cur_y) in surroundings.iter() {\n\n self.click(cur_x, cur_y);\n\n }\n\n }\n\n }\n\n if self.shown_count == self.safety_block_count {\n\n return ClickResult::Win;\n\n }\n\n }\n\n ClickResult::NothingHappened\n\n }\n\n pub fn right_click(&mut self, x: &usize, y: &usize) {\n\n let mut block = &mut self.map[*y][*x];\n\n if let BlockStatus::Shown = block.bstatus {\n\n return;\n\n }\n\n match block.bstatus {\n\n BlockStatus::Hidden => { block.bstatus = BlockStatus::Flaged; }\n", "file_path": "src/mine_core.rs", "rank": 41, "score": 6.19815204060232 }, { "content": " let seeds_length = width * height;\n\n let mut mine_seeds: Vec<bool> = Vec::with_capacity(seeds_length.into());\n\n for i in 0..seeds_length {\n\n if i < mine_count { mine_seeds.push(true); }\n\n else { mine_seeds.push(false); }\n\n }\n\n let mut rng = rand::thread_rng();\n\n mine_seeds.shuffle(&mut rng);\n\n let mut mine_map: Vec<Vec<MineBlock>> = vec![];\n\n for i in 0..height {\n\n mine_map.push(mine_seeds[i * width..i * width + width].iter().enumerate().map(|(j, &is_mine_block)| {\n\n MineBlock {\n\n btype: if is_mine_block { BlockType::Mine } else { BlockType::Space },\n\n pos: Position { x: j, y: i },\n\n ..Default::default()\n\n }\n\n }).collect());\n\n }\n\n for y in 0..mine_map.len() {\n\n let row = &mine_map[y];\n", "file_path": "src/mine_core.rs", "rank": 42, "score": 5.303608389803182 }, { "content": " if self.map[new_y][new_x].btype != BlockType::Mine {\n\n self.map[new_y][new_x].btype = BlockType::Mine;\n\n let surroundings = get_surroundings(&new_x, &new_y, &self.width, &self.height);\n\n for (cur_x, cur_y) in surroundings.iter() {\n\n self.map[*cur_x][*cur_y].add_tip();\n\n }\n\n return;\n\n }\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/mine_core.rs", "rank": 43, "score": 5.026992701469589 }, { "content": "}\n\nimpl Default for MineBlock {\n\n fn default() -> MineBlock {\n\n MineBlock {\n\n bstatus: BlockStatus::Hidden,\n\n btype: BlockType::Space,\n\n pos: Position { x: 0, y: 0},\n\n }\n\n }\n\n}\n\nimpl BlockType {\n\n fn increase (&mut self) {\n\n *self = match *self {\n\n Self::Tip(val) => Self::Tip(val + 1),\n\n Self::Space => Self::Tip(1),\n\n Self::Mine => Self::Mine,\n\n }\n\n }\n\n fn decrease (&mut self) {\n\n *self = match *self {\n", "file_path": "src/mine_core.rs", "rank": 44, "score": 4.490051208299206 }, { "content": "mod game;\n\nmod mine_core;\n\nuse std::env;\n", "file_path": "src/main.rs", "rank": 45, "score": 3.9128430686423483 }, { "content": " for x in 0..row.len() {\n\n if let BlockType::Space = mine_map[y][x].btype {\n\n let surroundings = get_surroundings(&x, &y, &width, &height);\n\n for (cur_x, cur_y) in surroundings.iter() {\n\n if let BlockType::Mine = mine_map[*cur_y][*cur_x].btype {\n\n mine_map[y][x].add_tip();\n\n }\n\n }\n\n }\n\n }\n\n }\n\n // println!(\"{:?}\", mine_map);\n\n\n\n Ok(MinePlayground {\n\n shown_count: 0,\n\n safety_block_count: height * width - mine_count,\n\n width,\n\n height,\n\n map: mine_map,\n\n })\n", "file_path": "src/mine_core.rs", "rank": 46, "score": 3.5065331814281477 }, { "content": "mod tests{\n\n use super::*;\n\n #[test]\n\n fn test_init_map() {\n\n assert!(MinePlayground::init(&0, &0, &0).is_err());\n\n assert!(MinePlayground::init(&8, &8, &10).is_ok());\n\n }\n\n #[test]\n\n fn test_get_surroundings() {\n\n assert_eq!(get_surroundings(&9, &9, &10, &10), vec![(8, 9), (9, 8), (8, 8)]);\n\n }\n\n}", "file_path": "src/mine_core.rs", "rank": 47, "score": 1.4424452834825772 } ]
Rust
src/parser/error.rs
boxofrox/combine
5b89f1913d7932b20c37e6a11bebd347a5942df1
use crate::{ error::{ ErrorInfo, ParseError, ParseResult::{self, *}, StreamError, Tracked, }, lib::marker::PhantomData, parser::ParseMode, Parser, Stream, StreamOnce, }; #[derive(Clone)] pub struct Unexpected<I, T, E>(E, PhantomData<fn(I) -> (I, T)>) where I: Stream; impl<Input, T, E> Parser<Input> for Unexpected<Input, T, E> where Input: Stream, E: for<'s> ErrorInfo<'s, Input::Token, Input::Range>, { type Output = T; type PartialState = (); #[inline] fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<T, <Input as StreamOnce>::Error> { EmptyErr(<Input as StreamOnce>::Error::empty(input.position()).into()) } fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) { errors.error.add(StreamError::unexpected(&self.0)); } } pub fn unexpected<Input, S>(message: S) -> Unexpected<Input, (), S> where Input: Stream, S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>, { unexpected_any(message) } pub fn unexpected_any<Input, S, T>(message: S) -> Unexpected<Input, T, S> where Input: Stream, S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>, { Unexpected(message, PhantomData) } #[derive(Clone)] pub struct Message<P, S>(P, S); impl<Input, P, S> Parser<Input> for Message<P, S> where Input: Stream, P: Parser<Input>, S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>, { type Output = P::Output; type PartialState = P::PartialState; parse_mode!(Input); #[inline] fn parse_mode_impl<M>( &mut self, mode: M, input: &mut Input, state: &mut Self::PartialState, ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> where M: ParseMode, { match self.0.parse_mode(mode, input, state) { ConsumedOk(x) => ConsumedOk(x), EmptyOk(x) => EmptyOk(x), ConsumedErr(mut err) => { err.add_message(&self.1); ConsumedErr(err) } EmptyErr(err) => EmptyErr(err), } } fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) { self.0.add_error(errors); errors.error.add_message(&self.1); } forward_parser!(Input, parser_count add_consumed_expected_error, 0); } pub fn message<Input, P, S>(p: P, msg: S) -> Message<P, S> where P: Parser<Input>, Input: Stream, S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>, { Message(p, msg) } #[derive(Clone)] pub struct Expected<P, S>(P, S); impl<Input, P, S> Parser<Input> for Expected<P, S> where P: Parser<Input>, Input: Stream, S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>, { type Output = P::Output; type PartialState = P::PartialState; parse_mode!(Input); #[inline] fn parse_mode_impl<M>( &mut self, mode: M, input: &mut Input, state: &mut Self::PartialState, ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> where M: ParseMode, { self.0.parse_mode(mode, input, state) } fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) { ParseError::set_expected(errors, StreamError::expected(&self.1), |errors| { self.0.add_error(errors); }) } forward_parser!(Input, parser_count add_consumed_expected_error, 0); } pub fn expected<Input, P, S>(p: P, info: S) -> Expected<P, S> where P: Parser<Input>, Input: Stream, S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>, { Expected(p, info) } #[derive(Clone)] pub struct Silent<P>(P); impl<Input, P> Parser<Input> for Silent<P> where P: Parser<Input>, Input: Stream, { type Output = P::Output; type PartialState = P::PartialState; parse_mode!(Input); #[inline] fn parse_mode_impl<M>( &mut self, mode: M, input: &mut Input, state: &mut Self::PartialState, ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> where M: ParseMode, { self.0.parse_mode(mode, input, state).map_err(|mut err| { err.clear_expected(); err }) } fn add_error(&mut self, _errors: &mut Tracked<<Input as StreamOnce>::Error>) {} fn add_consumed_expected_error(&mut self, _errors: &mut Tracked<<Input as StreamOnce>::Error>) { } forward_parser!(Input, parser_count, 0); } pub fn silent<Input, P>(p: P) -> Silent<P> where P: Parser<Input>, Input: Stream, { Silent(p) }
use crate::{ error::{ ErrorInfo, ParseError, ParseResult::{self, *}, StreamError, Tracked, }, lib::marker::PhantomData, parser::ParseMode, Parser, Stream, StreamOnce, }; #[derive(Clone)] pub struct Unexpected<I, T, E>(E, PhantomData<fn(I) -> (I, T)>) where I: Stream; impl<Input, T, E> Parser<Input> for Unexpected<Input, T, E> where Input: Stream, E: for<'s> ErrorInfo<'s, Input::Token, Input::Range>, { type Output = T; type PartialState = (); #[inline] fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<T, <Input as StreamOnce>::Error> { EmptyErr(<Input as StreamOnce>::Error::empty(input.position()).into()) } fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) { errors.error.add(StreamError::unexpected(&self.0)); } } pub fn unexpected<Input, S>(message: S) -> Unexpected<Input, (), S> where Input: Stream, S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>, { unexpected_any(message) } pub fn unexpected_any<Input, S, T>(message: S) -> Unexpected<Input, T, S> where Input: Stream, S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>, { Unexpected(message, PhantomData) } #[derive(Clone)] pub struct Message<P, S>(P, S); impl<Input, P, S> Parser<Input> for Message<P, S> where Input: Stream, P: Parser<Input>, S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>, { type Output = P::Output; type PartialState = P::PartialState; parse_mode!(Input); #[inline] fn parse_mode_impl<M>( &mut self, mode: M, input: &mut Input, state: &mut Self::PartialState, ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> where M: ParseMode, { match self.0.parse_mode(mode, input, state) { ConsumedOk(x) => ConsumedOk(x), EmptyOk(x) => EmptyOk(x), ConsumedErr(mut err) => { err.add_message(&self.1); ConsumedErr(err) } EmptyErr(err) => EmptyErr(err), } } fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) { self.0.add_error(errors); errors.error.add_message(&self.1); } forward_parser!(Input, parser_count add_consumed_expected_error, 0); } pub fn message<Input, P, S>(p: P, msg: S) -> Message<P, S> where P: Parser<Input>, Input: Stream, S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>, { Message(p, msg) } #[derive(Clone)] pub struct Expected<P, S>(P, S); impl<Input, P, S> Parser<Input> for Expected<P, S> where P: Parser<Input>, Input: Stream, S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>, { type Output = P::Output; type PartialState = P::PartialState; parse_mode!(Input); #[inline] fn parse_mode_impl<M>( &mut self, mode: M, input: &mut Input, state: &mut Self::PartialState, ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> where M: ParseMode, { self.0.parse_mode(mode, input, state) } fn add_error(&mut self, errors: &mut Tracked<<Input as StreamOnce>::Error>) { ParseError::set_expected(errors, StreamError::expected(&self.1), |errors| { self.0.add_error(errors); }) } forward_parser!(Input, parser_count add_consumed_expected_error, 0); } pub fn expected<Input, P, S>(p: P, info: S) -> Expected<P, S> where P: Parser<Input>, Input: Stream, S: for<'s> ErrorInfo<'s, Input::Token, Input::Range>, { Expected(p, info) } #[derive(Clone)] pub struct Silent<P>(P); impl<Input, P> Parser<Input> for Silent<P> where P: Parser<Input>, Input: Stream, { type Output = P::Output; type PartialState = P::PartialState; parse_mode!(Input); #[inline] fn parse_mode_impl<M>( &mut self, mode: M, inpu
pected(); err }) } fn add_error(&mut self, _errors: &mut Tracked<<Input as StreamOnce>::Error>) {} fn add_consumed_expected_error(&mut self, _errors: &mut Tracked<<Input as StreamOnce>::Error>) { } forward_parser!(Input, parser_count, 0); } pub fn silent<Input, P>(p: P) -> Silent<P> where P: Parser<Input>, Input: Stream, { Silent(p) }
t: &mut Input, state: &mut Self::PartialState, ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> where M: ParseMode, { self.0.parse_mode(mode, input, state).map_err(|mut err| { err.clear_ex
function_block-random_span
[ { "content": "pub fn repeat_until<F, Input, P, E>(parser: P, end: E) -> RepeatUntil<F, P, E>\n\nwhere\n\n Input: Stream,\n\n F: Extend<P::Output> + Default,\n\n P: Parser<Input>,\n\n E: Parser<Input>,\n\n{\n\n RepeatUntil {\n\n parser,\n\n end,\n\n _marker: PhantomData,\n\n }\n\n}\n\n\n\nparser! {\n\n pub struct SkipRepeatUntil;\n\n type PartialState = <With<RepeatUntil<Sink, P, E>, Value<Input, ()>> as Parser<Input>>::PartialState;\n\n /// Skips input until `end` is encountered or `end` indicates that it has consumed input before\n\n /// failing (`attempt` can be used to make it look like it has not consumed any input)\n\n ///\n", "file_path": "src/parser/repeat.rs", "rank": 0, "score": 295507.2655001839 }, { "content": "fn lex<Input, P>(p: P) -> impl Parser<Input, Output = P::Output>\n\nwhere\n\n P: Parser<Input>,\n\n Input: Stream<Token = char>,\n\n <Input as StreamOnce>::Error: ParseError<\n\n <Input as StreamOnce>::Token,\n\n <Input as StreamOnce>::Range,\n\n <Input as StreamOnce>::Position,\n\n >,\n\n{\n\n p.skip(spaces())\n\n}\n\n\n", "file_path": "benches/json.rs", "rank": 1, "score": 288978.6784485705 }, { "content": "#[cfg(feature = \"std\")]\n\npub fn any_partial_state<Input, P>(p: P) -> AnyPartialStateParser<P>\n\nwhere\n\n Input: Stream,\n\n P: Parser<Input>,\n\n P::PartialState: 'static,\n\n{\n\n AnyPartialStateParser(p)\n\n}\n\n\n\n#[cfg(feature = \"std\")]\n\n#[derive(Default)]\n\npub struct AnySendPartialState(Option<Box<dyn std::any::Any + Send>>);\n\n\n\n#[cfg(feature = \"std\")]\n\npub struct AnySendPartialStateParser<P>(P);\n\n\n\n#[cfg(feature = \"std\")]\n\nimpl<Input, P> Parser<Input> for AnySendPartialStateParser<P>\n\nwhere\n\n Input: Stream,\n", "file_path": "src/parser/combinator.rs", "rank": 4, "score": 278144.1850103626 }, { "content": "#[cfg(feature = \"std\")]\n\npub trait EasyParser<Input: Stream>: Parser<crate::easy::Stream<Input>>\n\nwhere\n\n Input::Token: PartialEq,\n\n Input::Range: PartialEq,\n\n{\n\n /// Entry point of the parser. Takes some input and tries to parse it, returning an easy to use\n\n /// and format error if parsing did not succeed.\n\n ///\n\n /// Returns the parsed result and the remaining input if the parser succeeds, or a\n\n /// This function wraps requires `Input == easy::Stream<Input>` which makes it return\n\n /// return `easy::Errors` if an error occurs. Due to this wrapping it is recommended that the\n\n /// parser `Self` is written with a generic input type.\n\n ///\n\n /// ```\n\n /// # #[macro_use]\n\n /// # extern crate combine;\n\n ///\n\n /// use combine::*;\n\n /// use combine::parser::repeat::many1;\n\n /// use combine::parser::char::letter;\n", "file_path": "src/parser/mod.rs", "rank": 5, "score": 277451.166410003 }, { "content": "/// Parses `open` followed by `parser` followed by `close`.\n\n/// Returns the value of `parser`.\n\n///\n\n/// ```\n\n/// # extern crate combine;\n\n/// # use combine::*;\n\n/// # use combine::parser::char::string;\n\n/// # fn main() {\n\n/// let result = between(token('['), token(']'), string(\"rust\"))\n\n/// .parse(\"[rust]\")\n\n/// .map(|x| x.0);\n\n/// assert_eq!(result, Ok(\"rust\"));\n\n/// # }\n\n/// ```\n\npub fn between[Input, L, R, P](open: L, close: R, parser: P)(Input) -> P::Output\n\nwhere [\n\n Input: Stream,\n\n L: Parser< Input>,\n\n R: Parser< Input>,\n\n P: Parser< Input>,\n\n]\n\n{\n\n fn middle<T, U, V>((_, x, _): (T, U, V)) -> U {\n\n x\n\n }\n\n (open, parser, close).map(middle)\n\n}\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct Then<P, F>(P, F);\n\nimpl<Input, P, N, F> Parser<Input> for Then<P, F>\n\nwhere\n\n Input: Stream,\n", "file_path": "src/parser/sequence.rs", "rank": 6, "score": 277258.5323849849 }, { "content": "fn satisfy_impl<Input, P, R>(input: &mut Input, mut predicate: P) -> ParseResult<R, Input::Error>\n\nwhere\n\n Input: Stream,\n\n P: FnMut(Input::Token) -> Option<R>,\n\n{\n\n let position = input.position();\n\n match uncons(input) {\n\n EmptyOk(c) | ConsumedOk(c) => match predicate(c.clone()) {\n\n Some(c) => ConsumedOk(c),\n\n None => EmptyErr(Input::Error::empty(position).into()),\n\n },\n\n EmptyErr(err) => EmptyErr(err),\n\n ConsumedErr(err) => ConsumedErr(err),\n\n }\n\n}\n\n\n\nimpl<Input, P> Parser<Input> for Satisfy<Input, P>\n\nwhere\n\n Input: Stream,\n\n P: FnMut(Input::Token) -> bool,\n", "file_path": "src/parser/token.rs", "rank": 7, "score": 276029.6224745091 }, { "content": "/// Parses `parser` and outputs `Some(value)` if it succeeds, `None` if it fails without\n\n/// consuming any input. Fails if `parser` fails after having consumed some input.\n\n///\n\n/// ```\n\n/// # extern crate combine;\n\n/// # use combine::*;\n\n/// # use combine::parser::char::string;\n\n/// # fn main() {\n\n/// let mut parser = optional(string(\"hello\"));\n\n/// assert_eq!(parser.parse(\"hello\"), Ok((Some(\"hello\"), \"\")));\n\n/// assert_eq!(parser.parse(\"world\"), Ok((None, \"world\")));\n\n/// assert!(parser.parse(\"heya\").is_err());\n\n/// # }\n\n/// ```\n\npub fn optional<Input, P>(parser: P) -> Optional<P>\n\nwhere\n\n Input: Stream,\n\n P: Parser<Input>,\n\n{\n\n Optional(parser)\n\n}\n\n\n\n#[macro_export]\n\n#[doc(hidden)]\n\nmacro_rules! parse_mode_dispatch {\n\n () => {\n\n fn parse_partial(\n\n &mut self,\n\n input: &mut Input,\n\n state: &mut Self::PartialState,\n\n ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error> {\n\n self.parse_mode_dispatch($crate::parser::PartialMode::default(), input, state)\n\n }\n\n\n", "file_path": "src/parser/choice.rs", "rank": 9, "score": 271489.2330264711 }, { "content": "/// Succeeds only if `parser` fails.\n\n/// Never consumes any input.\n\n///\n\n/// ```\n\n/// # extern crate combine;\n\n/// # use combine::*;\n\n/// # use combine::parser::char::{alpha_num, string};\n\n/// # fn main() {\n\n/// let result = string(\"let\")\n\n/// .skip(not_followed_by(alpha_num()))\n\n/// .parse(\"letx\")\n\n/// .map(|x| x.0);\n\n/// assert!(result.is_err());\n\n///\n\n/// # }\n\n/// ```\n\npub fn not_followed_by<Input, P>(parser: P) -> NotFollowedBy<P>\n\nwhere\n\n Input: Stream,\n\n P: Parser<Input>,\n\n P::Output: Into<Info<<Input as StreamOnce>::Token, <Input as StreamOnce>::Range, &'static str>>,\n\n{\n\n NotFollowedBy(parser)\n\n}\n\n\n\n/*\n\n * TODO :: Rename `Try` to `Attempt`\n\n * Because this is public, it's name cannot be changed without also making a breaking change.\n\n */\n\n#[derive(Copy, Clone)]\n\npub struct Try<P>(P);\n\nimpl<Input, O, P> Parser<Input> for Try<P>\n\nwhere\n\n Input: Stream,\n\n P: Parser<Input, Output = O>,\n\n{\n", "file_path": "src/parser/combinator.rs", "rank": 10, "score": 271484.7841989738 }, { "content": "/// Takes a parser that outputs a string like value (`&str`, `String`, `&[u8]` or `Vec<u8>`) and parses it\n\n/// using `std::str::FromStr`. Errors if the output of `parser` is not UTF-8 or if\n\n/// `FromStr::from_str` returns an error.\n\n///\n\n/// ```\n\n/// # extern crate combine;\n\n/// # use combine::parser::range;\n\n/// # use combine::parser::repeat::many1;\n\n/// # use combine::parser::combinator::from_str;\n\n/// # use combine::char;\n\n/// # use combine::*;\n\n/// # fn main() {\n\n/// let mut parser = from_str(many1::<String, _, _>(char::digit()));\n\n/// let result = parser.parse(\"12345\\r\\n\");\n\n/// assert_eq!(result, Ok((12345i32, \"\\r\\n\")));\n\n///\n\n/// // Range parsers work as well\n\n/// let mut parser = from_str(range::take_while1(|c: char| c.is_digit(10)));\n\n/// let result = parser.parse(\"12345\\r\\n\");\n\n/// assert_eq!(result, Ok((12345i32, \"\\r\\n\")));\n\n///\n\n/// // As do parsers that work with bytes\n\n/// let digits = || range::take_while1(|b: u8| b >= b'0' && b <= b'9');\n\n/// let mut parser = from_str(range::recognize((\n\n/// digits(),\n\n/// byte::byte(b'.'),\n\n/// digits(),\n\n/// )));\n\n/// let result = parser.parse(&b\"123.45\\r\\n\"[..]);\n\n/// assert_eq!(result, Ok((123.45f64, &b\"\\r\\n\"[..])));\n\n/// # }\n\n/// ```\n\npub fn from_str[Input, O, P](parser: P)(Input) -> O\n\nwhere [\n\n P: Parser<Input>,\n\n P::Output: StrLike,\n\n O: str::FromStr,\n\n O::Err: fmt::Display,\n\n]\n\n{\n\n parser.and_then(|r| {\n\n r.from_utf8()\n\n .map_err(|_| StreamErrorFor::<Input>::expected_static_message(\"UTF-8\"))\n\n .and_then(|s| s.parse().map_err(StreamErrorFor::<Input>::message_format))\n\n })\n\n}\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct Opaque<F, Input, O, S>(F, PhantomData<fn(&mut Input, &mut S) -> O>);\n\nimpl<Input, F, O, S> Parser<Input> for Opaque<F, Input, O, S>\n\nwhere\n", "file_path": "src/parser/combinator.rs", "rank": 11, "score": 269851.90672897594 }, { "content": "/// Skips over [`space`] zero or more times\n\n///\n\n/// [`space`]: fn.space.html\n\n///\n\n/// ```\n\n/// use combine::Parser;\n\n/// use combine::parser::byte::spaces;\n\n/// assert_eq!(spaces().parse(&b\"\"[..]), Ok(((), &b\"\"[..])));\n\n/// assert_eq!(spaces().parse(&b\" \"[..]), Ok(((), &b\"\"[..])));\n\n/// ```\n\npub fn spaces<Input>() -> impl Parser<Input, Output = ()>\n\nwhere\n\n Input: Stream<Token = u8>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n skip_many(space()).expected(\"whitespaces\")\n\n}\n\n\n", "file_path": "src/parser/byte.rs", "rank": 12, "score": 269471.3939891887 }, { "content": "/// Skips over zero or more spaces according to [`std::char::is_whitespace`].\n\n///\n\n/// This includes space characters, tabs and newlines.\n\n///\n\n/// [`std::char::is_whitespace`]: https://doc.rust-lang.org/std/primitive.char.html#method.is_whitespace\n\n///\n\n/// ```\n\n/// use combine::Parser;\n\n/// use combine::parser::char::spaces;\n\n/// assert_eq!(spaces().parse(\"\"), Ok(((), \"\")));\n\n/// assert_eq!(spaces().parse(\" \"), Ok(((), \"\")));\n\n/// ```\n\npub fn spaces<Input>() -> impl Parser<Input, Output = ()>\n\nwhere\n\n Input: Stream<Token = char>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n skip_many(space()).expected(\"whitespaces\")\n\n}\n\n\n", "file_path": "src/parser/char.rs", "rank": 13, "score": 269466.91394365736 }, { "content": "/// Parses a `b' '`, `b'\\t'`, `b'\\n'` or `'b\\'r'`.\n\n///\n\n/// ```\n\n/// use combine::Parser;\n\n/// use combine::parser::byte::space;\n\n/// assert_eq!(space().parse(&b\" \"[..]), Ok((b' ', &b\"\"[..])));\n\n/// assert_eq!(space().parse(&b\" \"[..]), Ok((b' ', &b\" \"[..])));\n\n/// assert!(space().parse(&b\"!\"[..]).is_err());\n\n/// assert!(space().parse(&b\"\"[..]).is_err());\n\n/// ```\n\npub fn space<Input>() -> impl Parser<Input, Output = u8, PartialState = ()>\n\nwhere\n\n Input: Stream<Token = u8>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n byte_parser!(space, Space, is_whitespace)\n\n}\n\n\n", "file_path": "src/parser/byte.rs", "rank": 14, "score": 268875.2517927574 }, { "content": "/// Parses carriage return and newline (`\"\\r\\n\"`), returning the newline character.\n\n///\n\n/// ```\n\n/// use combine::Parser;\n\n/// use combine::parser::char::crlf;\n\n/// assert_eq!(crlf().parse(\"\\r\\n\"), Ok(('\\n', \"\")));\n\n/// assert!(crlf().parse(\"\\r\").is_err());\n\n/// assert!(crlf().parse(\"\\n\").is_err());\n\n/// ```\n\npub fn crlf<Input>() -> impl Parser<Input, Output = char, PartialState = ()>\n\nwhere\n\n Input: Stream<Token = char>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n no_partial(satisfy(|ch: char| ch == '\\r').with(newline())).expected(\"crlf newline\")\n\n}\n\n\n", "file_path": "src/parser/char.rs", "rank": 15, "score": 268875.15526776225 }, { "content": "/// Parses carriage return and newline (`&b\"\\r\\n\"`), returning the newline byte.\n\n///\n\n/// ```\n\n/// use combine::Parser;\n\n/// use combine::parser::byte::crlf;\n\n/// assert_eq!(crlf().parse(&b\"\\r\\n\"[..]), Ok((b'\\n', &b\"\"[..])));\n\n/// assert!(crlf().parse(&b\"\\r\"[..]).is_err());\n\n/// assert!(crlf().parse(&b\"\\n\"[..]).is_err());\n\n/// ```\n\npub fn crlf<Input>() -> impl Parser<Input, Output = u8, PartialState = ()>\n\nwhere\n\n Input: Stream<Token = u8>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n no_partial(satisfy(|ch: u8| ch == b'\\r').with(newline())).expected(\"crlf newline\")\n\n}\n\n\n", "file_path": "src/parser/byte.rs", "rank": 16, "score": 268875.1552677623 }, { "content": "/// Parses a newline byte (`b'\\n'`).\n\n///\n\n/// ```\n\n/// use combine::Parser;\n\n/// use combine::parser::byte::newline;\n\n/// assert_eq!(newline().parse(&b\"\\n\"[..]), Ok((b'\\n', &b\"\"[..])));\n\n/// assert!(newline().parse(&b\"\\r\"[..]).is_err());\n\n/// ```\n\npub fn newline<Input>() -> impl Parser<Input, Output = u8, PartialState = ()>\n\nwhere\n\n Input: Stream<Token = u8>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n satisfy(|ch: u8| ch == b'\\n').expected(\"lf newline\")\n\n}\n\n\n", "file_path": "src/parser/byte.rs", "rank": 17, "score": 268875.00111372594 }, { "content": "/// Parses a tab character (`'\\t'`).\n\n///\n\n/// ```\n\n/// use combine::Parser;\n\n/// use combine::parser::char::tab;\n\n/// assert_eq!(tab().parse(\"\\t\"), Ok(('\\t', \"\")));\n\n/// assert!(tab().parse(\" \").is_err());\n\n/// ```\n\npub fn tab<Input>() -> impl Parser<Input, Output = char, PartialState = ()>\n\nwhere\n\n Input: Stream<Token = char>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n satisfy(|ch: char| ch == '\\t').expected(\"tab\")\n\n}\n\n\n", "file_path": "src/parser/char.rs", "rank": 18, "score": 268875.00111372594 }, { "content": "/// Parses a newline character (`'\\n'`).\n\n///\n\n/// ```\n\n/// use combine::Parser;\n\n/// use combine::parser::char::newline;\n\n/// assert_eq!(newline().parse(\"\\n\"), Ok(('\\n', \"\")));\n\n/// assert!(newline().parse(\"\\r\").is_err());\n\n/// ```\n\npub fn newline<Input>() -> impl Parser<Input, Output = char, PartialState = ()>\n\nwhere\n\n Input: Stream<Token = char>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n satisfy(|ch: char| ch == '\\n').expected(\"lf newline\")\n\n}\n\n\n", "file_path": "src/parser/char.rs", "rank": 19, "score": 268875.00111372594 }, { "content": "/// Parses a tab byte (`b'\\t'`).\n\n///\n\n/// ```\n\n/// use combine::Parser;\n\n/// use combine::parser::byte::tab;\n\n/// assert_eq!(tab().parse(&b\"\\t\"[..]), Ok((b'\\t', &b\"\"[..])));\n\n/// assert!(tab().parse(&b\" \"[..]).is_err());\n\n/// ```\n\npub fn tab<Input>() -> impl Parser<Input, Output = u8, PartialState = ()>\n\nwhere\n\n Input: Stream<Token = u8>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n satisfy(|ch| ch == b'\\t').expected(\"tab\")\n\n}\n\n\n", "file_path": "src/parser/byte.rs", "rank": 20, "score": 268875.00111372594 }, { "content": "/// Parses a base-10 digit (0–9).\n\n///\n\n/// ```\n\n/// use combine::Parser;\n\n/// use combine::parser::byte::digit;\n\n/// assert_eq!(digit().parse(&b\"9\"[..]), Ok((b'9', &b\"\"[..])));\n\n/// assert!(digit().parse(&b\"A\"[..]).is_err());\n\n/// ```\n\npub fn digit<Input>() -> impl Parser<Input, Output = u8, PartialState = ()>\n\nwhere\n\n Input: Stream<Token = u8>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n byte_parser!(digit, Digit, is_digit(10))\n\n}\n\n\n", "file_path": "src/parser/byte.rs", "rank": 21, "score": 268875.00111372594 }, { "content": "/// Parses an lowercase ASCII letter (a–z).\n\n///\n\n/// ```\n\n/// use combine::Parser;\n\n/// use combine::parser::byte::lower;\n\n/// assert_eq!(lower().parse(&b\"a\"[..]), Ok((b'a', &b\"\"[..])));\n\n/// assert!(lower().parse(&b\"A\"[..]).is_err());\n\n/// ```\n\npub fn lower<Input>() -> impl Parser<Input, Output = u8, PartialState = ()>\n\nwhere\n\n Input: Stream<Token = u8>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n byte_parser!(lower, Lower, is_lowercase)\n\n}\n\n\n", "file_path": "src/parser/byte.rs", "rank": 22, "score": 268874.7709021389 }, { "content": "/// Parses an uppercase ASCII letter (A–Z).\n\n///\n\n/// ```\n\n/// use combine::Parser;\n\n/// use combine::parser::byte::upper;\n\n/// assert_eq!(upper().parse(&b\"A\"[..]), Ok((b'A', &b\"\"[..])));\n\n/// assert!(upper().parse(&b\"a\"[..]).is_err());\n\n/// ```\n\npub fn upper<Input>() -> impl Parser<Input, Output = u8, PartialState = ()>\n\nwhere\n\n Input: Stream<Token = u8>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n byte_parser!(upper, Upper, is_uppercase)\n\n}\n\n\n", "file_path": "src/parser/byte.rs", "rank": 23, "score": 268874.7709021389 }, { "content": "/// Parses an ASCII alphabet letter (a–z, A–Z).\n\n///\n\n/// ```\n\n/// use combine::Parser;\n\n/// use combine::parser::byte::letter;\n\n/// assert_eq!(letter().parse(&b\"a\"[..]), Ok((b'a', &b\"\"[..])));\n\n/// assert_eq!(letter().parse(&b\"A\"[..]), Ok((b'A', &b\"\"[..])));\n\n/// assert!(letter().parse(&b\"9\"[..]).is_err());\n\n/// ```\n\npub fn letter<Input>() -> impl Parser<Input, Output = u8, PartialState = ()>\n\nwhere\n\n Input: Stream<Token = u8>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n byte_parser!(letter, Letter, is_alphabetic)\n\n}\n\n\n", "file_path": "src/parser/byte.rs", "rank": 24, "score": 268874.13100816094 }, { "content": "/// Parses an lowercase letter according to [`std::char::is_lowercase`].\n\n///\n\n/// [`std::char::is_lowercase`]: https://doc.rust-lang.org/std/primitive.char.html#method.is_lowercase\n\n///\n\n/// ```\n\n/// use combine::Parser;\n\n/// use combine::parser::char::lower;\n\n/// assert_eq!(lower().parse(\"a\"), Ok(('a', \"\")));\n\n/// assert!(lower().parse(\"A\").is_err());\n\n/// ```\n\npub fn lower<Input>() -> impl Parser<Input, Output = char, PartialState = ()>\n\nwhere\n\n Input: Stream<Token = char>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n satisfy(|ch: char| ch.is_lowercase()).expected(\"lowercase letter\")\n\n}\n\n\n", "file_path": "src/parser/char.rs", "rank": 25, "score": 268873.20710209315 }, { "content": "/// Parse a single whitespace according to [`std::char::is_whitespace`].\n\n///\n\n/// This includes space characters, tabs and newlines.\n\n///\n\n/// [`std::char::is_whitespace`]: https://doc.rust-lang.org/std/primitive.char.html#method.is_whitespace\n\n///\n\n/// ```\n\n/// use combine::Parser;\n\n/// use combine::parser::char::space;\n\n/// assert_eq!(space().parse(\" \"), Ok((' ', \"\")));\n\n/// assert_eq!(space().parse(\" \"), Ok((' ', \" \")));\n\n/// assert!(space().parse(\"!\").is_err());\n\n/// assert!(space().parse(\"\").is_err());\n\n/// ```\n\npub fn space<Input>() -> impl Parser<Input, Output = char, PartialState = ()>\n\nwhere\n\n Input: Stream<Token = char>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n let f: fn(char) -> bool = char::is_whitespace;\n\n satisfy(f).expected(\"whitespace\")\n\n}\n\n\n", "file_path": "src/parser/char.rs", "rank": 26, "score": 268873.21290910814 }, { "content": "/// Parses an uppercase letter according to [`std::char::is_uppercase`].\n\n///\n\n/// [`std::char::is_uppercase`]: https://doc.rust-lang.org/std/primitive.char.html#method.is_uppercase\n\n///\n\n/// ```\n\n/// use combine::Parser;\n\n/// use combine::parser::char::upper;\n\n/// assert_eq!(upper().parse(\"A\"), Ok(('A', \"\")));\n\n/// assert!(upper().parse(\"a\").is_err());\n\n/// ```\n\npub fn upper<Input>() -> impl Parser<Input, Output = char, PartialState = ()>\n\nwhere\n\n Input: Stream<Token = char>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n satisfy(|ch: char| ch.is_uppercase()).expected(\"uppercase letter\")\n\n}\n\n\n", "file_path": "src/parser/char.rs", "rank": 27, "score": 268873.20710209315 }, { "content": "/// Parses an alphabet letter according to [`std::char::is_alphabetic`].\n\n///\n\n/// [`std::char::is_alphabetic`]: https://doc.rust-lang.org/std/primitive.char.html#method.is_alphabetic\n\n///\n\n/// ```\n\n/// use combine::Parser;\n\n/// use combine::parser::char::letter;\n\n/// assert_eq!(letter().parse(\"a\"), Ok(('a', \"\")));\n\n/// assert_eq!(letter().parse(\"A\"), Ok(('A', \"\")));\n\n/// assert!(letter().parse(\"9\").is_err());\n\n/// ```\n\npub fn letter<Input>() -> impl Parser<Input, Output = char, PartialState = ()>\n\nwhere\n\n Input: Stream<Token = char>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n satisfy(|ch: char| ch.is_alphabetic()).expected(\"letter\")\n\n}\n\n\n", "file_path": "src/parser/char.rs", "rank": 28, "score": 268872.80034750805 }, { "content": "#[cfg(feature = \"std\")]\n\npub fn any_send_partial_state<Input, P>(p: P) -> AnySendPartialStateParser<P>\n\nwhere\n\n Input: Stream,\n\n P: Parser<Input>,\n\n P::PartialState: Send + 'static,\n\n{\n\n AnySendPartialStateParser(p)\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct Lazy<P>(P);\n\nimpl<Input, O, P, R> Parser<Input> for Lazy<P>\n\nwhere\n\n Input: Stream,\n\n P: FnMut() -> R,\n\n R: Parser<Input, Output = O>,\n\n{\n\n type Output = O;\n\n type PartialState = R::PartialState;\n\n\n", "file_path": "src/parser/combinator.rs", "rank": 29, "score": 268491.4035223451 }, { "content": "#[inline]\n\npub fn uncons<Input>(input: &mut Input) -> ParseResult<Input::Token, Input::Error>\n\nwhere\n\n Input: ?Sized + Stream,\n\n{\n\n match input.uncons() {\n\n Ok(x) => ConsumedOk(x),\n\n Err(err) => wrap_stream_error(input, err),\n\n }\n\n}\n\n\n", "file_path": "src/stream/mod.rs", "rank": 30, "score": 267850.1526653256 }, { "content": "/// Parses an octal digit.\n\n///\n\n/// ```\n\n/// use combine::Parser;\n\n/// use combine::parser::char::oct_digit;\n\n/// assert_eq!(oct_digit().parse(\"7\"), Ok(('7', \"\")));\n\n/// assert!(oct_digit().parse(\"8\").is_err());\n\n/// ```\n\npub fn oct_digit<Input>() -> impl Parser<Input, Output = char, PartialState = ()>\n\nwhere\n\n Input: Stream<Token = char>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n satisfy(|ch: char| ch.is_digit(8)).expected(\"octal digit\")\n\n}\n\n\n", "file_path": "src/parser/char.rs", "rank": 31, "score": 263939.7857838867 }, { "content": "/// Parses an octal digit.\n\n///\n\n/// ```\n\n/// use combine::Parser;\n\n/// use combine::parser::byte::oct_digit;\n\n/// assert_eq!(oct_digit().parse(&b\"7\"[..]), Ok((b'7', &b\"\"[..])));\n\n/// assert!(oct_digit().parse(&b\"8\"[..]).is_err());\n\n/// ```\n\npub fn oct_digit<Input>() -> impl Parser<Input, Output = u8, PartialState = ()>\n\nwhere\n\n Input: Stream<Token = u8>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n satisfy(|ch| ch >= b'0' && ch <= b'7').expected(\"octal digit\")\n\n}\n\n\n", "file_path": "src/parser/byte.rs", "rank": 32, "score": 263939.7857838867 }, { "content": "/// Parses a hexdecimal digit with uppercase and lowercase.\n\n///\n\n/// ```\n\n/// use combine::Parser;\n\n/// use combine::parser::char::hex_digit;\n\n/// assert_eq!(hex_digit().parse(\"F\"), Ok(('F', \"\")));\n\n/// assert!(hex_digit().parse(\"H\").is_err());\n\n/// ```\n\npub fn hex_digit<Input>() -> impl Parser<Input, Output = char, PartialState = ()>\n\nwhere\n\n Input: Stream<Token = char>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n satisfy(|ch: char| ch.is_digit(0x10)).expected(\"hexadecimal digit\")\n\n}\n\n\n", "file_path": "src/parser/char.rs", "rank": 33, "score": 263939.2576643069 }, { "content": "/// Parses an ASCII hexdecimal digit (accepts both uppercase and lowercase).\n\n///\n\n/// ```\n\n/// use combine::Parser;\n\n/// use combine::parser::byte::hex_digit;\n\n/// assert_eq!(hex_digit().parse(&b\"F\"[..]), Ok((b'F', &b\"\"[..])));\n\n/// assert!(hex_digit().parse(&b\"H\"[..]).is_err());\n\n/// ```\n\npub fn hex_digit<Input>() -> impl Parser<Input, Output = u8, PartialState = ()>\n\nwhere\n\n Input: Stream<Token = u8>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n byte_parser!(hex_digit, HexDigit, is_digit(16))\n\n}\n\n\n\nparser! {\n", "file_path": "src/parser/byte.rs", "rank": 34, "score": 263939.0596895659 }, { "content": "/// Parses either an ASCII alphabet letter or digit (a–z, A–Z, 0–9).\n\n///\n\n/// ```\n\n/// use combine::Parser;\n\n/// use combine::parser::byte::alpha_num;\n\n/// assert_eq!(alpha_num().parse(&b\"A\"[..]), Ok((b'A', &b\"\"[..])));\n\n/// assert_eq!(alpha_num().parse(&b\"1\"[..]), Ok((b'1', &b\"\"[..])));\n\n/// assert!(alpha_num().parse(&b\"!\"[..]).is_err());\n\n/// ```\n\npub fn alpha_num<Input>() -> impl Parser<Input, Output = u8, PartialState = ()>\n\nwhere\n\n Input: Stream<Token = u8>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n byte_parser!(alpha_num, AlphaNum, is_alphanumeric)\n\n}\n\n\n", "file_path": "src/parser/byte.rs", "rank": 35, "score": 263938.68423943094 }, { "content": "/// Parses either an alphabet letter or digit according to [`std::char::is_alphanumeric`].\n\n///\n\n/// [`std::char::is_alphanumeric`]: https://doc.rust-lang.org/std/primitive.char.html#method.is_alphanumeric\n\n///\n\n/// ```\n\n/// use combine::Parser;\n\n/// use combine::parser::char::alpha_num;\n\n/// assert_eq!(alpha_num().parse(\"A\"), Ok(('A', \"\")));\n\n/// assert_eq!(alpha_num().parse(\"1\"), Ok(('1', \"\")));\n\n/// assert!(alpha_num().parse(\"!\").is_err());\n\n/// ```\n\npub fn alpha_num<Input>() -> impl Parser<Input, Output = char, PartialState = ()>\n\nwhere\n\n Input: Stream<Token = char>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n satisfy(|ch: char| ch.is_alphanumeric()).expected(\"letter or digit\")\n\n}\n\n\n", "file_path": "src/parser/char.rs", "rank": 36, "score": 263937.4794816456 }, { "content": "/// Zero-copy parser which returns a pair: (consumed input range, parsed value).\n\n///\n\n///\n\n/// [`combinator::recognize_with_value`][] is a non-`RangeStream` alternative.\n\n///\n\n/// [`combinator::recognize_with_value`]: ../../parser/combinator/fn.recognize_with_value.html\n\n/// ```\n\n/// # extern crate combine;\n\n/// # use combine::parser::range::recognize_with_value;\n\n/// # use combine::parser::char::{digit, char};\n\n/// # use combine::*;\n\n/// # fn main() {\n\n/// let mut parser = recognize_with_value((\n\n/// skip_many1(digit()),\n\n/// optional((attempt(char('.')), skip_many1(digit()))),\n\n/// ).map(|(_, opt)| opt.is_some()));\n\n///\n\n/// assert_eq!(parser.parse(\"1234!\"), Ok(((\"1234\", false), \"!\")));\n\n/// assert_eq!(parser.parse(\"1234.0001!\"), Ok(((\"1234.0001\", true), \"!\")));\n\n/// assert!(parser.parse(\"!\").is_err());\n\n/// assert!(parser.parse(\"1234.\").is_err());\n\n/// # }\n\n/// ```\n\npub fn recognize_with_value<Input, P>(parser: P) -> RecognizeWithValue<P>\n\nwhere\n\n P: Parser<Input>,\n\n Input: RangeStream,\n\n <Input as StreamOnce>::Range: crate::stream::Range,\n\n{\n\n RecognizeWithValue(parser)\n\n}\n\n\n", "file_path": "src/parser/range.rs", "rank": 37, "score": 262559.54321333603 }, { "content": "/// Parses `p` zero or more times ignoring the result.\n\n///\n\n/// NOTE: If `p` can succeed without consuming any input this may hang forever as `skip_many` will\n\n/// repeatedly use `p` to parse the same location in the input every time\n\n///\n\n/// ```\n\n/// # extern crate combine;\n\n/// # use combine::*;\n\n/// # use combine::parser::char::digit;\n\n/// # fn main() {\n\n/// let result = skip_many(digit())\n\n/// .parse(\"A\");\n\n/// assert_eq!(result, Ok(((), \"A\")));\n\n/// # }\n\n/// ```\n\npub fn skip_many[Input, P](p: P)(Input) -> ()\n\nwhere [\n\n P: Parser<Input>,\n\n]\n\n{\n\n ignore(many::<Sink, _, _>(ignore(p)))\n\n}\n\n}\n\n\n\nparser! {\n\n pub struct SkipMany1;\n\n type PartialState = <Ignore<Many1<Sink, Ignore<P>>> as Parser<Input>>::PartialState;\n", "file_path": "src/parser/repeat.rs", "rank": 38, "score": 260783.77176875947 }, { "content": "/// Parses `p` one or more times ignoring the result.\n\n///\n\n/// NOTE: If `p` can succeed without consuming any input this may hang forever as `skip_many1` will\n\n/// repeatedly use `p` to parse the same location in the input every time\n\n///\n\n/// ```\n\n/// # extern crate combine;\n\n/// # use combine::*;\n\n/// # use combine::parser::char::digit;\n\n/// # fn main() {\n\n/// let result = skip_many1(digit())\n\n/// .parse(\"123A\");\n\n/// assert_eq!(result, Ok(((), \"A\")));\n\n/// # }\n\n/// ```\n\npub fn skip_many1[Input, P](p: P)(Input) -> ()\n\nwhere [\n\n P: Parser<Input>,\n\n]\n\n{\n\n ignore(many1::<Sink, _, _>(ignore(p)))\n\n}\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct SepBy<F, P, S> {\n\n parser: P,\n\n separator: S,\n\n _marker: PhantomData<fn() -> F>,\n\n}\n\nimpl<F, Input, P, S> Parser<Input> for SepBy<F, P, S>\n\nwhere\n\n Input: Stream,\n\n F: Extend<P::Output> + Default,\n\n P: Parser<Input>,\n", "file_path": "src/parser/repeat.rs", "rank": 39, "score": 260783.7717687594 }, { "content": "/// Equivalent to [`p.and_then(f)`].\n\n///\n\n/// [`p.and_then(f)`]: ../trait.Parser.html#method.and_then\n\npub fn and_then<Input, P, F, O, E>(p: P, f: F) -> AndThen<P, F>\n\nwhere\n\n P: Parser<Input>,\n\n F: FnMut(P::Output) -> Result<O, E>,\n\n Input: Stream,\n\n E: Into<<Input::Error as ParseError<Input::Token, Input::Range, Input::Position>>::StreamError>,\n\n{\n\n AndThen(p, f)\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct Recognize<F, P>(P, PhantomData<fn() -> F>);\n\n\n\nimpl<F, P> Recognize<F, P> {\n\n #[inline]\n\n fn recognize_result<Input>(\n\n elements: &mut F,\n\n before: <Input as ResetStream>::Checkpoint,\n\n input: &mut Input,\n\n result: ParseResult<P::Output, <Input as StreamOnce>::Error>,\n", "file_path": "src/parser/combinator.rs", "rank": 40, "score": 256972.47155833698 }, { "content": "/// Constructs a parser which returns the tokens parsed by `parser` accumulated in\n\n/// `F: Extend<Input::Token>` instead of `P::Output`.\n\n///\n\n/// ```\n\n/// use combine::Parser;\n\n/// use combine::combinator::{skip_many1, token, recognize};\n\n/// use combine::parser::char::digit;\n\n///\n\n/// let mut parser = recognize((skip_many1(digit()), token('.'), skip_many1(digit())));\n\n/// assert_eq!(parser.parse(\"123.45\"), Ok((\"123.45\".to_string(), \"\")));\n\n/// assert_eq!(parser.parse(\"123.45\"), Ok((\"123.45\".to_string(), \"\")));\n\n/// ```\n\npub fn recognize<F, Input, P>(parser: P) -> Recognize<F, P>\n\nwhere\n\n Input: Stream,\n\n P: Parser<Input>,\n\n F: Default + Extend<<Input as StreamOnce>::Token>,\n\n{\n\n Recognize(parser, PhantomData)\n\n}\n\n\n\nimpl<Input, L, R> Parser<Input> for Either<L, R>\n\nwhere\n\n Input: Stream,\n\n L: Parser<Input>,\n\n R: Parser<Input, Output = L::Output>,\n\n{\n\n type Output = L::Output;\n\n type PartialState = Option<Either<L::PartialState, R::PartialState>>;\n\n\n\n #[inline]\n\n fn parse_lazy(\n", "file_path": "src/parser/combinator.rs", "rank": 41, "score": 253277.22826659703 }, { "content": "/// Parses `parser` from zero up to `count` times.\n\n///\n\n/// ```\n\n/// # extern crate combine;\n\n/// # use combine::*;\n\n/// # use combine::error::Info;\n\n/// # use combine::stream::easy::Error;\n\n/// # fn main() {\n\n/// let mut parser = count(2, token(b'a'));\n\n///\n\n/// let result = parser.parse(&b\"aaab\"[..]);\n\n/// assert_eq!(result, Ok((b\"aa\"[..].to_owned(), &b\"ab\"[..])));\n\n/// # }\n\n/// ```\n\npub fn count[F, Input, P](count: usize, parser: P)(Input) -> F\n\nwhere [\n\n Input: Stream,\n\n P: Parser<Input>,\n\n F: Extend<P::Output> + Default,\n\n]\n\n{\n\n count_min_max(0, *count, parser)\n\n}\n\n}\n\n\n\nparser! {\n\n pub struct SkipCount;\n\n type PartialState = <With<Count<Sink, Input, P>, Value<Input, ()>> as Parser<Input>>::PartialState;\n\n /// Parses `parser` from zero up to `count` times skipping the output of `parser`.\n\n ///\n\n /// ```\n\n /// # extern crate combine;\n\n /// # use combine::*;\n\n /// # use combine::stream::easy::{Error, Info};\n", "file_path": "src/parser/repeat.rs", "rank": 42, "score": 252460.4809758364 }, { "content": "/// Decodes `input` using `parser`.\n\n///\n\n/// Return `Ok(Some(token), consumed_data)` if there was enough data to finish parsing using\n\n/// `parser`.\n\n/// Returns `Ok(None, consumed_data)` if `input` did not contain enough data to finish parsing\n\n/// using `parser`.\n\n///\n\n/// See `examples/async.rs` for example usage in a `tokio_io::codec::Decoder`\n\npub fn decode<Input, P>(\n\n mut parser: P,\n\n mut input: &mut Input,\n\n partial_state: &mut P::PartialState,\n\n) -> Result<(Option<P::Output>, usize), <Input as StreamOnce>::Error>\n\nwhere\n\n P: Parser<Input>,\n\n Input: RangeStream,\n\n{\n\n let start = input.checkpoint();\n\n match parser.parse_with_state(&mut input, partial_state) {\n\n Ok(message) => Ok((Some(message), input.distance(&start))),\n\n Err(err) => {\n\n if input.is_partial() && err.is_unexpected_end_of_input() {\n\n Ok((None, input.distance(&start)))\n\n } else {\n\n Err(err)\n\n }\n\n }\n\n }\n", "file_path": "src/stream/mod.rs", "rank": 43, "score": 252344.10065195488 }, { "content": "/// Parses a token and succeeds depending on the result of `predicate`.\n\n///\n\n/// ```\n\n/// # extern crate combine;\n\n/// # use combine::*;\n\n/// # fn main() {\n\n/// let mut parser = satisfy(|c| c == '!' || c == '?');\n\n/// assert_eq!(parser.parse(\"!\").map(|x| x.0), Ok('!'));\n\n/// assert_eq!(parser.parse(\"?\").map(|x| x.0), Ok('?'));\n\n/// # }\n\n/// ```\n\npub fn satisfy<Input, P>(predicate: P) -> Satisfy<Input, P>\n\nwhere\n\n Input: Stream,\n\n P: FnMut(Input::Token) -> bool,\n\n{\n\n Satisfy {\n\n predicate: predicate,\n\n _marker: PhantomData,\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct SatisfyMap<Input, P> {\n\n predicate: P,\n\n _marker: PhantomData<Input>,\n\n}\n\n\n\nimpl<Input, P, R> Parser<Input> for SatisfyMap<Input, P>\n\nwhere\n\n Input: Stream,\n\n P: FnMut(Input::Token) -> Option<R>,\n\n{\n\n type Output = R;\n\n type PartialState = ();\n\n #[inline]\n\n fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<Self::Output, Input::Error> {\n\n satisfy_impl(input, &mut self.predicate)\n\n }\n\n}\n\n\n", "file_path": "src/parser/token.rs", "rank": 44, "score": 249757.87632879603 }, { "content": "#[doc(hidden)]\n\npub fn input_at_eof<Input>(input: &mut Input) -> bool\n\nwhere\n\n Input: ?Sized + Stream,\n\n{\n\n let before = input.checkpoint();\n\n let x = input\n\n .uncons()\n\n .err()\n\n .map_or(false, |err| err.is_unexpected_end_of_input());\n\n input.reset(before).is_ok() && x\n\n}\n\n\n\n/// Removes items from the input while `predicate` returns `true`.\n", "file_path": "src/stream/mod.rs", "rank": 45, "score": 248110.67091978545 }, { "content": "/// `attempt(p)` behaves as `p` except it acts as if the parser hadn't consumed any input if `p` fails\n\n/// after consuming input.\n\n///\n\n/// ```\n\n/// # extern crate combine;\n\n/// # use combine::*;\n\n/// # use combine::parser::char::string;\n\n/// # fn main() {\n\n/// let mut p = attempt(string(\"let\"))\n\n/// .or(string(\"lex\"));\n\n/// let result = p.parse(\"lex\").map(|x| x.0);\n\n/// assert_eq!(result, Ok(\"lex\"));\n\n/// let result = p.parse(\"aet\").map(|x| x.0);\n\n/// assert!(result.is_err());\n\n/// # }\n\n/// ```\n\npub fn attempt<Input, P>(p: P) -> Try<P>\n\nwhere\n\n Input: Stream,\n\n P: Parser<Input>,\n\n{\n\n Try(p)\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct LookAhead<P>(P);\n\n\n\nimpl<Input, O, P> Parser<Input> for LookAhead<P>\n\nwhere\n\n Input: Stream,\n\n P: Parser<Input, Output = O>,\n\n{\n\n type Output = O;\n\n type PartialState = ();\n\n\n\n #[inline]\n", "file_path": "src/parser/combinator.rs", "rank": 46, "score": 246684.25246231118 }, { "content": "#[doc(hidden)]\n\npub fn ignore<Input, P>(p: P) -> Ignore<P>\n\nwhere\n\n Input: Stream,\n\n P: Parser<Input>,\n\n{\n\n Ignore(p)\n\n}\n\n\n\n#[cfg(feature = \"std\")]\n\n#[derive(Default)]\n\npub struct AnyPartialState(Option<Box<dyn std::any::Any>>);\n\n\n\n#[cfg(feature = \"std\")]\n\npub struct AnyPartialStateParser<P>(P);\n\n\n\n#[cfg(feature = \"std\")]\n\nimpl<Input, P> Parser<Input> for AnyPartialStateParser<P>\n\nwhere\n\n Input: Stream,\n\n P: Parser<Input>,\n", "file_path": "src/parser/combinator.rs", "rank": 47, "score": 246662.98991450225 }, { "content": "pub fn no_partial<Input, P>(p: P) -> NoPartial<P>\n\nwhere\n\n Input: Stream,\n\n P: Parser<Input>,\n\n{\n\n NoPartial(p)\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct Ignore<P>(P);\n\nimpl<Input, P> Parser<Input> for Ignore<P>\n\nwhere\n\n Input: Stream,\n\n P: Parser<Input>,\n\n{\n\n type Output = ();\n\n type PartialState = P::PartialState;\n\n\n\n #[inline]\n\n fn parse_lazy(\n", "file_path": "src/parser/combinator.rs", "rank": 48, "score": 246662.9899145023 }, { "content": "/// Constructs a parser out of an environment and a function which needs the given environment to\n\n/// do the parsing. This is commonly useful to allow multiple parsers to share some environment\n\n/// while still allowing the parsers to be written in separate functions.\n\n///\n\n/// ```\n\n/// # extern crate combine;\n\n/// # use std::collections::HashMap;\n\n/// # use combine::*;\n\n/// # use combine::char::letter;\n\n/// # fn main() {\n\n/// struct Interner(HashMap<String, u32>);\n\n/// impl Interner {\n\n/// fn string<Input>(&self, input: &mut Input) -> StdParseResult<u32, Input>\n\n/// where Input: Stream<Token = char>,\n\n/// Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n/// {\n\n/// many(letter())\n\n/// .map(|s: String| self.0.get(&s).cloned().unwrap_or(0))\n\n/// .parse_stream(input)\n\n/// .into_result()\n\n/// }\n\n/// }\n\n///\n\n/// let mut map = HashMap::new();\n\n/// map.insert(\"hello\".into(), 1);\n\n/// map.insert(\"test\".into(), 2);\n\n///\n\n/// let env = Interner(map);\n\n/// let mut parser = env_parser(&env, Interner::string);\n\n///\n\n/// let result = parser.parse(\"hello\");\n\n/// assert_eq!(result, Ok((1, \"\")));\n\n///\n\n/// let result = parser.parse(\"world\");\n\n/// assert_eq!(result, Ok((0, \"\")));\n\n/// # }\n\n/// ```\n\npub fn env_parser<E, Input, O>(\n\n env: E,\n\n parser: fn(E, &mut Input) -> StdParseResult<O, Input>,\n\n) -> EnvParser<E, Input, O>\n\nwhere\n\n E: Clone,\n\n Input: Stream,\n\n{\n\n EnvParser { env, parser }\n\n}\n", "file_path": "src/parser/function.rs", "rank": 49, "score": 242653.21450299217 }, { "content": "/// Takes a tuple, a slice or an array of parsers and tries to apply them each in order.\n\n/// Fails if all the parsers fails or if an applied parser consumes input before failing.\n\n///\n\n/// ```\n\n/// # extern crate combine;\n\n/// # use combine::*;\n\n/// # use combine::char::{digit, string};\n\n/// # fn main() {\n\n/// // `choice` is overloaded on tuples so that different types of parsers can be used\n\n/// // (each parser must still have the same input and output types)\n\n/// let mut parser = choice((\n\n/// string(\"Apple\").map(|s| s.to_string()),\n\n/// many1(digit()),\n\n/// string(\"Orange\").map(|s| s.to_string()),\n\n/// ));\n\n/// assert_eq!(parser.parse(\"1234\"), Ok((\"1234\".to_string(), \"\")));\n\n/// assert_eq!(parser.parse(\"Orangexx\"), Ok((\"Orange\".to_string(), \"xx\")));\n\n/// assert!(parser.parse(\"Appl\").is_err());\n\n/// assert!(parser.parse(\"Pear\").is_err());\n\n///\n\n/// // If arrays or slices are used then all parsers must have the same type\n\n/// // (`string` in this case)\n\n/// let mut parser2 = choice([string(\"one\"), string(\"two\"), string(\"three\")]);\n\n/// // Fails as the parser for \"two\" consumes the first 't' before failing\n\n/// assert!(parser2.parse(\"three\").is_err());\n\n///\n\n/// // Use 'attempt' to make failing parsers always act as if they have not consumed any input\n\n/// let mut parser3 = choice([attempt(string(\"one\")), attempt(string(\"two\")), attempt(string(\"three\"))]);\n\n/// assert_eq!(parser3.parse(\"three\"), Ok((\"three\", \"\")));\n\n/// # }\n\n/// ```\n\npub fn choice<Input, P>(ps: P) -> Choice<P>\n\nwhere\n\n Input: Stream,\n\n P: ChoiceParser<Input>,\n\n{\n\n Choice(ps)\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct Or<P1, P2>(Choice<(P1, P2)>);\n\nimpl<Input, O, P1, P2> Parser<Input> for Or<P1, P2>\n\nwhere\n\n Input: Stream,\n\n P1: Parser<Input, Output = O>,\n\n P2: Parser<Input, Output = O>,\n\n{\n\n type Output = O;\n\n type PartialState = <Choice<(P1, P2)> as Parser<Input>>::PartialState;\n\n\n\n parse_mode!(Input);\n", "file_path": "src/parser/choice.rs", "rank": 50, "score": 238703.33096874552 }, { "content": "/// Constructs the parser lazily on each `parse_*` call. Can be used to effectively reduce the\n\n/// size of deeply nested parsers as only the function producing the parser is stored.\n\n///\n\n/// NOTE: Expects that the parser returned is always the same one, if that is not the case the\n\n/// reported error may be wrong. If different parsers may be returned, use the [`factory`][] parser\n\n/// instead.\n\n///\n\n/// [`factory`]: fn.factory.html\n\npub fn lazy<Input, P, R>(p: P) -> Lazy<P>\n\nwhere\n\n Input: Stream,\n\n P: FnMut() -> R,\n\n R: Parser<Input>,\n\n{\n\n Lazy(p)\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct Factory<P, R>(P, Option<R>);\n\n\n\nimpl<P, R> Factory<P, R> {\n\n fn parser<Input>(&mut self, input: &mut Input) -> &mut R\n\n where\n\n P: FnMut(&mut Input) -> R,\n\n {\n\n if let Some(ref mut r) = self.1 {\n\n return r;\n\n }\n", "file_path": "src/parser/combinator.rs", "rank": 51, "score": 238121.0672018811 }, { "content": "/// Parses `p` 1 or more times separated by `op`. The value returned is the one produced by the\n\n/// left associative application of the function returned by the parser `op`.\n\n///\n\n/// ```\n\n/// # extern crate combine;\n\n/// # use combine::*;\n\n/// # use combine::parser::char::digit;\n\n/// # fn main() {\n\n/// let number = digit().map(|c: char| c.to_digit(10).unwrap());\n\n/// let sub = token('-').map(|_| |l: u32, r: u32| l - r);\n\n/// let mut parser = chainl1(number, sub);\n\n/// assert_eq!(parser.parse(\"9-3-5\"), Ok((1, \"\")));\n\n/// # }\n\n/// ```\n\npub fn chainl1<Input, P, Op>(parser: P, op: Op) -> Chainl1<P, Op>\n\nwhere\n\n Input: Stream,\n\n P: Parser<Input>,\n\n Op: Parser<Input>,\n\n Op::Output: FnOnce(P::Output, P::Output) -> P::Output,\n\n{\n\n Chainl1(parser, op)\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct Chainr1<P, Op>(P, Op);\n\nimpl<Input, P, Op> Parser<Input> for Chainr1<P, Op>\n\nwhere\n\n Input: Stream,\n\n P: Parser<Input>,\n\n Op: Parser<Input>,\n\n Op::Output: FnOnce(P::Output, P::Output) -> P::Output,\n\n{\n\n type Output = P::Output;\n", "file_path": "src/parser/repeat.rs", "rank": 52, "score": 237501.31627932168 }, { "content": "/// Parses `p` one or more times separated by `op`. The value returned is the one produced by the\n\n/// right associative application of the function returned by `op`.\n\n///\n\n/// ```\n\n/// # extern crate combine;\n\n/// # use combine::*;\n\n/// # use combine::parser::char::digit;\n\n/// # fn main() {\n\n/// let number = digit().map(|c: char| c.to_digit(10).unwrap());\n\n/// let pow = token('^').map(|_| |l: u32, r: u32| l.pow(r));\n\n/// let mut parser = chainr1(number, pow);\n\n/// assert_eq!(parser.parse(\"2^3^2\"), Ok((512, \"\")));\n\n/// }\n\n/// ```\n\npub fn chainr1<Input, P, Op>(parser: P, op: Op) -> Chainr1<P, Op>\n\nwhere\n\n Input: Stream,\n\n P: Parser<Input>,\n\n Op: Parser<Input>,\n\n Op::Output: FnOnce(P::Output, P::Output) -> P::Output,\n\n{\n\n Chainr1(parser, op)\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct TakeUntil<F, P> {\n\n end: P,\n\n _marker: PhantomData<fn() -> F>,\n\n}\n\nimpl<F, Input, P> Parser<Input> for TakeUntil<F, P>\n\nwhere\n\n Input: Stream,\n\n F: Extend<<Input as StreamOnce>::Token> + Default,\n\n P: Parser<Input>,\n", "file_path": "src/parser/repeat.rs", "rank": 53, "score": 237500.8538410177 }, { "content": "/// Parses an escaped string by first applying `parser` which accept the normal characters which do\n\n/// not need escaping. Once `parser` can not consume any more input it checks if the next token\n\n/// is `escape`. If it is then `escape_parser` is used to parse the escaped character and then\n\n/// resumes parsing using `parser`. If `escape` was not found then the parser finishes\n\n/// successfully.\n\n///\n\n/// This returns `()` since there isn't a good way to collect the output of the parsers so it is\n\n/// best paired with one of the `recognize` parsers.\n\n///\n\n/// ```\n\n/// # extern crate combine;\n\n/// # use combine::*;\n\n/// # use combine::parser::repeat::escaped;\n\n/// # use combine::parser::char;\n\n/// # use combine::parser::range::{recognize, take_while1};\n\n/// # fn main() {\n\n/// let mut parser = recognize(\n\n/// escaped(take_while1(|c| c != '\"' && c != '\\\\'), '\\\\', one_of(r#\"nr\"\\\"#.chars()))\n\n/// );\n\n/// assert_eq!(parser.parse(r#\"ab\\\"12\\n\\rc\"\"#), Ok((r#\"ab\\\"12\\n\\rc\"#, r#\"\"\"#)));\n\n/// assert!(parser.parse(r#\"\\\"#).is_err());\n\n/// assert!(parser.parse(r#\"\\a\"#).is_err());\n\n/// }\n\n/// ```\n\npub fn escaped<Input, P, Q>(\n\n parser: P,\n\n escape: <Input as StreamOnce>::Token,\n\n escape_parser: Q,\n\n) -> Escaped<P, Q, Input::Token>\n\nwhere\n\n Input: Stream,\n\n P: Parser<Input>,\n\n <Input as StreamOnce>::Token: PartialEq,\n\n Q: Parser<Input>,\n\n{\n\n Escaped {\n\n parser,\n\n escape,\n\n escape_parser,\n\n }\n\n}\n\n\n\npub struct Iterate<F, I, P> {\n\n parser: P,\n", "file_path": "src/parser/repeat.rs", "rank": 54, "score": 237234.3938891145 }, { "content": "/// `look_ahead(p)` acts as `p` but doesn't consume input on success.\n\n///\n\n/// ```\n\n/// # extern crate combine;\n\n/// # use combine::*;\n\n/// # use combine::parser::char::string;\n\n/// # fn main() {\n\n/// let mut p = look_ahead(string(\"test\"));\n\n///\n\n/// let result = p.parse(\"test str\");\n\n/// assert_eq!(result, Ok((\"test\", \"test str\")));\n\n///\n\n/// let result = p.parse(\"aet\");\n\n/// assert!(result.is_err());\n\n/// # }\n\n/// ```\n\npub fn look_ahead<Input, P>(p: P) -> LookAhead<P>\n\nwhere\n\n Input: Stream,\n\n P: Parser<Input>,\n\n{\n\n LookAhead(p)\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct Map<P, F>(P, F);\n\nimpl<Input, A, B, P, F> Parser<Input> for Map<P, F>\n\nwhere\n\n Input: Stream,\n\n P: Parser<Input, Output = A>,\n\n F: FnMut(A) -> B,\n\n{\n\n type Output = B;\n\n type PartialState = P::PartialState;\n\n\n\n parse_mode!(Input);\n", "file_path": "src/parser/combinator.rs", "rank": 55, "score": 237230.47522554488 }, { "content": "/// By implementing the `Parser` trait a type says that it can be used to parse an input stream\n\n/// into the type `Output`.\n\n///\n\n/// All methods have a default implementation but there needs to be at least an implementation of\n\n/// [`parse_stream`], [`parse_stream`], or [`parse_lazy`]. If the last is implemented, an\n\n/// implementation of [`add_error`] may also be required. See the documentation for\n\n/// [`parse_lazy`] for details.\n\n///\n\n/// [`parse_stream`]: trait.Parser.html#method.parse_stream\n\n/// [`parse_stream`]: trait.Parser.html#method.parse_stream\n\n/// [`parse_lazy`]: trait.Parser.html#method.parse_lazy\n\n/// [`add_error`]: trait.Parser.html#method.add_error\n\npub trait Parser<Input: Stream> {\n\n /// The type which is returned if the parser is successful.\n\n type Output;\n\n\n\n /// Determines the state necessary to resume parsing after more input is supplied.\n\n ///\n\n /// If partial parsing is not supported this can be set to `()`.\n\n type PartialState: Default;\n\n\n\n /// Entry point of the parser. Takes some input and tries to parse it.\n\n ///\n\n /// Returns the parsed result and the remaining input if the parser succeeds, or a\n\n /// error otherwise.\n\n ///\n\n /// This is the most straightforward entry point to a parser. Since it does not decorate the\n\n /// input in any way you may find the error messages a hard to read. If that is the case you\n\n /// may want to try wrapping your input with an [`easy::Stream`][] or call [`easy_parse`][]\n\n /// instead.\n\n ///\n\n /// [`easy::Stream`]: ../easy/struct.Stream.html\n", "file_path": "src/parser/mod.rs", "rank": 56, "score": 235660.3300296187 }, { "content": "/// Parses a token and passes it to `predicate`. If `predicate` returns `Some` the parser succeeds\n\n/// and returns the value inside the `Option`. If `predicate` returns `None` the parser fails\n\n/// without consuming any input.\n\n///\n\n/// ```\n\n/// # extern crate combine;\n\n/// # use combine::*;\n\n/// # fn main() {\n\n/// #[derive(Debug, PartialEq)]\n\n/// enum YesNo {\n\n/// Yes,\n\n/// No,\n\n/// }\n\n/// let mut parser = satisfy_map(|c| {\n\n/// match c {\n\n/// 'Y' => Some(YesNo::Yes),\n\n/// 'N' => Some(YesNo::No),\n\n/// _ => None,\n\n/// }\n\n/// });\n\n/// assert_eq!(parser.parse(\"Y\").map(|x| x.0), Ok(YesNo::Yes));\n\n/// assert!(parser.parse(\"A\").map(|x| x.0).is_err());\n\n/// # }\n\n/// ```\n\npub fn satisfy_map<Input, P, R>(predicate: P) -> SatisfyMap<Input, P>\n\nwhere\n\n Input: Stream,\n\n P: FnMut(Input::Token) -> Option<R>,\n\n{\n\n SatisfyMap {\n\n predicate: predicate,\n\n _marker: PhantomData,\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct Token<Input>\n\nwhere\n\n Input: Stream,\n\n Input::Token: PartialEq,\n\n{\n\n c: Input::Token,\n\n _marker: PhantomData<Input>,\n\n}\n", "file_path": "src/parser/token.rs", "rank": 57, "score": 234035.03777271748 }, { "content": "pub fn input_converter<Input, InputInner, P, C>(\n\n parser: P,\n\n converter: C,\n\n) -> InputConverter<InputInner, P, C>\n\nwhere\n\n Input: Stream,\n\n InputInner: Stream,\n\n P: Parser<InputInner>,\n\n for<'c> C: Converter<'c, Input, InputInner = InputInner>,\n\n{\n\n InputConverter {\n\n parser,\n\n converter,\n\n _marker: PhantomData,\n\n }\n\n}\n", "file_path": "src/parser/combinator.rs", "rank": 58, "score": 233273.3969483509 }, { "content": "/// Constructs the parser lazily on each `parse_*` call. This is similar to [`lazy`][] but it\n\n/// takes `Input` as an argument and allows different parsers to be returned on each call to\n\n/// `p` while still reporting the correct errors.\n\n///\n\n/// [`lazy`]: fn.lazy.html\n\n///\n\n/// ```\n\n/// # use combine::*;\n\n/// # use combine::parser::char::{digit, letter};\n\n/// # use combine::parser::combinator::{FnOpaque, opaque, factory};\n\n///\n\n/// let mut parsers: Vec<FnOpaque<_, _>> = vec![opaque(|f| f(&mut digit())), opaque(|f| f(&mut letter()))];\n\n/// let mut iter = parsers.into_iter().cycle();\n\n/// let mut parser = many(factory(move |_| iter.next().unwrap()));\n\n/// assert_eq!(parser.parse(\"1a2b3cd\"), Ok((\"1a2b3c\".to_string(), \"d\")));\n\n/// ```\n\npub fn factory<Input, P, R>(p: P) -> Factory<P, R>\n\nwhere\n\n Input: Stream,\n\n P: FnMut(&mut Input) -> R,\n\n R: Parser<Input>,\n\n{\n\n Factory(p, None)\n\n}\n\n\n\nmod internal {\n", "file_path": "src/parser/combinator.rs", "rank": 59, "score": 230190.50249530707 }, { "content": "/// Parses `p` one or more times returning a collection with the values from `p`.\n\n///\n\n/// If the returned collection cannot be inferred type annotations must be supplied, either by\n\n/// annotating the resulting type binding `let collection: Vec<_> = ...` or by specializing when\n\n/// calling many1 `many1::<Vec<_>, _>(...)`.\n\n///\n\n/// NOTE: If `p` can succeed without consuming any input this may hang forever as `many1` will\n\n/// repeatedly use `p` to parse the same location in the input every time\n\n///\n\n///\n\n/// ```\n\n/// # extern crate combine;\n\n/// # use combine::*;\n\n/// # use combine::parser::char::digit;\n\n/// # fn main() {\n\n/// let result = many1::<Vec<_>, _, _>(digit())\n\n/// .parse(\"A123\");\n\n/// assert!(result.is_err());\n\n/// # }\n\n/// ```\n\npub fn many1<F, Input, P>(p: P) -> Many1<F, P>\n\nwhere\n\n Input: Stream,\n\n F: Extend<P::Output> + Default,\n\n P: Parser<Input>,\n\n{\n\n Many1(p, PhantomData)\n\n}\n\n\n\n#[derive(Clone)]\n\n#[doc(hidden)]\n\n// FIXME Should not be public\n\npub struct Sink;\n\n\n\nimpl Default for Sink {\n\n fn default() -> Self {\n\n Sink\n\n }\n\n}\n\n\n", "file_path": "src/parser/repeat.rs", "rank": 60, "score": 230189.43380342808 }, { "content": "/// Parses `p` zero or more times returning a collection with the values from `p`.\n\n///\n\n/// If the returned collection cannot be inferred type annotations must be supplied, either by\n\n/// annotating the resulting type binding `let collection: Vec<_> = ...` or by specializing when\n\n/// calling many, `many::<Vec<_>, _, _>(...)`.\n\n///\n\n/// NOTE: If `p` can succeed without consuming any input this may hang forever as `many` will\n\n/// repeatedly use `p` to parse the same location in the input every time\n\n///\n\n/// ```\n\n/// # extern crate combine;\n\n/// # use combine::*;\n\n/// # use combine::parser::char::digit;\n\n/// # fn main() {\n\n/// let result = many(digit())\n\n/// .parse(\"123A\")\n\n/// .map(|x| x.0);\n\n/// assert_eq!(result, Ok(vec!['1', '2', '3']));\n\n/// # }\n\n/// ```\n\npub fn many<F, Input, P>(p: P) -> Many<F, P>\n\nwhere\n\n Input: Stream,\n\n P: Parser<Input>,\n\n F: Extend<P::Output> + Default,\n\n{\n\n Many(p, PhantomData)\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct Many1<F, P>(P, PhantomData<fn() -> F>);\n\nimpl<F, Input, P> Parser<Input> for Many1<F, P>\n\nwhere\n\n Input: Stream,\n\n F: Extend<P::Output> + Default,\n\n P: Parser<Input>,\n\n{\n\n type Output = F;\n\n type PartialState = (bool, bool, F, P::PartialState);\n\n\n", "file_path": "src/parser/repeat.rs", "rank": 61, "score": 230187.07848257228 }, { "content": "/// Matches `regex` on the input returning the entire input if it matches.\n\n/// Never consumes any input.\n\n///\n\n/// ```\n\n/// extern crate regex;\n\n/// extern crate combine;\n\n/// use regex::Regex;\n\n/// use combine::Parser;\n\n/// use combine::parser::regex::match_;\n\n///\n\n/// fn main() {\n\n/// let regex = Regex::new(\"[:alpha:]+\").unwrap();\n\n/// assert_eq!(\n\n/// match_(&regex).parse(\"abc123\"),\n\n/// Ok((\"abc123\", \"abc123\"))\n\n/// );\n\n/// }\n\n/// ```\n\npub fn match_<R, Input>(regex: R) -> Match<R, Input>\n\nwhere\n\n R: Regex<Input::Range>,\n\n Input: RangeStream,\n\n{\n\n Match(regex, PhantomData)\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct Find<R, Input>(R, PhantomData<fn() -> Input>);\n\n\n\nimpl<'a, Input, R> Parser<Input> for Find<R, Input>\n\nwhere\n\n R: Regex<Input::Range>,\n\n Input: RangeStream,\n\n Input::Range: crate::stream::Range,\n\n{\n\n type Output = Input::Range;\n\n type PartialState = ();\n\n\n", "file_path": "src/parser/regex.rs", "rank": 62, "score": 230107.87940750772 }, { "content": "/// Parses the string `s`.\n\n///\n\n/// ```\n\n/// # extern crate combine;\n\n/// # use combine::*;\n\n/// # use combine::parser::char::string;\n\n/// # fn main() {\n\n/// let result = string(\"rust\")\n\n/// .parse(\"rust\")\n\n/// .map(|x| x.0);\n\n/// assert_eq!(result, Ok(\"rust\"));\n\n/// # }\n\n/// ```\n\npub fn string<'a, Input>(s: &'static str) -> impl Parser<Input, Output = &'a str>\n\nwhere\n\n Input: Stream<Token = char>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n string_cmp(s, |l, r| l == r)\n\n}\n\n\n", "file_path": "src/parser/char.rs", "rank": 63, "score": 228901.86619821144 }, { "content": "/// Parses `parser` zero or more time separated by `separator`, returning a collection with the\n\n/// values from `p`.\n\n///\n\n/// If the returned collection cannot be inferred type annotations must be supplied, either by\n\n/// annotating the resulting type binding `let collection: Vec<_> = ...` or by specializing when\n\n/// calling `sep_by`, `sep_by::<Vec<_>, _, _>(...)`.\n\n///\n\n/// ```\n\n/// # extern crate combine;\n\n/// # use combine::*;\n\n/// # use combine::parser::char::digit;\n\n/// # fn main() {\n\n/// let mut parser = sep_by(digit(), token(','));\n\n/// let result_ok = parser.parse(\"1,2,3\");\n\n/// assert_eq!(result_ok, Ok((vec!['1', '2', '3'], \"\")));\n\n/// let result_ok2 = parser.parse(\"\");\n\n/// assert_eq!(result_ok2, Ok((vec![], \"\")));\n\n/// # }\n\n/// ```\n\npub fn sep_by<F, Input, P, S>(parser: P, separator: S) -> SepBy<F, P, S>\n\nwhere\n\n Input: Stream,\n\n F: Extend<P::Output> + Default,\n\n P: Parser<Input>,\n\n S: Parser<Input>,\n\n{\n\n SepBy {\n\n parser,\n\n separator,\n\n _marker: PhantomData,\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct SepBy1<F, P, S> {\n\n parser: P,\n\n separator: S,\n\n _marker: PhantomData<fn() -> F>,\n\n}\n", "file_path": "src/parser/repeat.rs", "rank": 64, "score": 223686.57706525692 }, { "content": "/// Equivalent to [`p.map_input(f)`].\n\n///\n\n/// [`p.map_input(f)`]: ../trait.Parser.html#method.map_input\n\npub fn map_input<Input, P, F, B>(p: P, f: F) -> MapInput<P, F>\n\nwhere\n\n Input: Stream,\n\n P: Parser<Input>,\n\n F: FnMut(P::Output, &mut Input) -> B,\n\n{\n\n MapInput(p, f)\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct FlatMap<P, F>(P, F);\n\nimpl<Input, A, B, P, F> Parser<Input> for FlatMap<P, F>\n\nwhere\n\n Input: Stream,\n\n P: Parser<Input, Output = A>,\n\n F: FnMut(A) -> Result<B, Input::Error>,\n\n{\n\n type Output = B;\n\n type PartialState = P::PartialState;\n\n\n", "file_path": "src/parser/combinator.rs", "rank": 65, "score": 223314.87003046556 }, { "content": "/// Takes input until `end` is encountered or `end` indicates that it has consumed input before\n\n/// failing (`attempt` can be used to make it look like it has not consumed any input)\n\n///\n\n/// ```\n\n/// # extern crate combine;\n\n/// # use combine::*;\n\n/// # use combine::parser::char;\n\n/// # use combine::parser::byte;\n\n/// # use combine::parser::combinator::attempt;\n\n/// # use combine::parser::repeat::take_until;\n\n/// # fn main() {\n\n/// let mut char_parser = take_until(char::digit());\n\n/// assert_eq!(char_parser.parse(\"abc123\"), Ok((\"abc\".to_string(), \"123\")));\n\n///\n\n/// let mut byte_parser = take_until(byte::bytes(&b\"TAG\"[..]));\n\n/// assert_eq!(byte_parser.parse(&b\"123TAG\"[..]), Ok((b\"123\".to_vec(), &b\"TAG\"[..])));\n\n/// assert!(byte_parser.parse(&b\"123TATAG\"[..]).is_err());\n\n///\n\n/// // `attempt` must be used if the `end` should be consume input before failing\n\n/// let mut byte_parser = take_until(attempt(byte::bytes(&b\"TAG\"[..])));\n\n/// assert_eq!(byte_parser.parse(&b\"123TATAG\"[..]), Ok((b\"123TA\".to_vec(), &b\"TAG\"[..])));\n\n/// }\n\n/// ```\n\npub fn take_until<F, Input, P>(end: P) -> TakeUntil<F, P>\n\nwhere\n\n Input: Stream,\n\n F: Extend<<Input as StreamOnce>::Token> + Default,\n\n P: Parser<Input>,\n\n{\n\n TakeUntil {\n\n end,\n\n _marker: PhantomData,\n\n }\n\n}\n\n\n\nparser! {\n\n pub struct SkipUntil;\n\n type PartialState = <With<TakeUntil<Sink, P>, Value<Input, ()>> as Parser<Input>>::PartialState;\n\n /// Skips input until `end` is encountered or `end` indicates that it has consumed input before\n\n /// failing (`attempt` can be used to make it look like it has not consumed any input)\n\n ///\n\n /// ```\n\n /// # extern crate combine;\n", "file_path": "src/parser/repeat.rs", "rank": 66, "score": 222629.7629057754 }, { "content": "#[doc(hidden)]\n\npub fn wrap_stream_error<T, Input>(\n\n input: &Input,\n\n err: <Input::Error as ParseError<Input::Token, Input::Range, Input::Position>>::StreamError,\n\n) -> ParseResult<T, <Input as StreamOnce>::Error>\n\nwhere\n\n Input: ?Sized + StreamOnce + Positioned,\n\n{\n\n let err = Input::Error::from_error(input.position(), err);\n\n if input.is_partial() {\n\n ConsumedErr(err)\n\n } else {\n\n EmptyErr(err.into())\n\n }\n\n}\n\n\n", "file_path": "src/stream/mod.rs", "rank": 67, "score": 222334.8101189952 }, { "content": "/// Equivalent to [`p.then(f)`].\n\n///\n\n/// [`p.then(f)`]: ../trait.Parser.html#method.then\n\npub fn then<Input, P, F, N>(p: P, f: F) -> Then<P, F>\n\nwhere\n\n Input: Stream,\n\n F: FnMut(P::Output) -> N,\n\n P: Parser<Input>,\n\n N: Parser<Input>,\n\n{\n\n Then(p, f)\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct ThenPartial<P, F>(P, F);\n\nimpl<Input, P, N, F> Parser<Input> for ThenPartial<P, F>\n\nwhere\n\n Input: Stream,\n\n F: FnMut(&mut P::Output) -> N,\n\n P: Parser<Input>,\n\n N: Parser<Input>,\n\n{\n\n type Output = N::Output;\n", "file_path": "src/parser/sequence.rs", "rank": 68, "score": 221447.64635989873 }, { "content": "/// Parses `parser` one or more time separated by `separator`, returning a collection with the\n\n/// values from `p`.\n\n///\n\n/// If the returned collection cannot be inferred type annotations must be supplied, either by\n\n/// annotating the resulting type binding `let collection: Vec<_> = ...` or by specializing when\n\n/// calling `sep_by`, `sep_by1::<Vec<_>, _, _>(...)`.\n\n///\n\n/// ```\n\n/// # extern crate combine;\n\n/// # use combine::*;\n\n/// # use combine::parser::char::digit;\n\n/// # use combine::stream::easy;\n\n/// # use combine::stream::position::{self, SourcePosition};\n\n/// # fn main() {\n\n/// let mut parser = sep_by1(digit(), token(','));\n\n/// let result_ok = parser.easy_parse(position::Stream::new(\"1,2,3\"))\n\n/// .map(|(vec, state)| (vec, state.input));\n\n/// assert_eq!(result_ok, Ok((vec!['1', '2', '3'], \"\")));\n\n/// let result_err = parser.easy_parse(position::Stream::new(\"\"));\n\n/// assert_eq!(result_err, Err(easy::Errors {\n\n/// position: SourcePosition::default(),\n\n/// errors: vec![\n\n/// easy::Error::end_of_input(),\n\n/// easy::Error::Expected(\"digit\".into())\n\n/// ]\n\n/// }));\n\n/// # }\n\n/// ```\n\npub fn sep_by1<F, Input, P, S>(parser: P, separator: S) -> SepBy1<F, P, S>\n\nwhere\n\n Input: Stream,\n\n F: Extend<P::Output> + Default,\n\n P: Parser<Input>,\n\n S: Parser<Input>,\n\n{\n\n SepBy1 {\n\n parser,\n\n separator,\n\n _marker: PhantomData,\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct SepEndBy<F, P, S> {\n\n parser: P,\n\n separator: S,\n\n _marker: PhantomData<fn() -> F>,\n\n}\n", "file_path": "src/parser/repeat.rs", "rank": 69, "score": 217394.87901257724 }, { "content": "/// Parses `parser` zero or more times separated and ended by `separator`, returning a collection\n\n/// with the values from `p`.\n\n///\n\n/// If the returned collection cannot be inferred type annotations must be supplied, either by\n\n/// annotating the resulting type binding `let collection: Vec<_> = ...` or by specializing when\n\n/// calling `sep_by`, `sep_by::<Vec<_>, _, _>(...)`\n\n///\n\n/// ```\n\n/// # extern crate combine;\n\n/// # use combine::*;\n\n/// # use combine::parser::char::digit;\n\n/// # fn main() {\n\n/// let mut parser = sep_end_by(digit(), token(';'));\n\n/// let result_ok = parser.parse(\"1;2;3;\");\n\n/// assert_eq!(result_ok, Ok((vec!['1', '2', '3'], \"\")));\n\n/// let result_ok2 = parser.parse(\"1;2;3\");\n\n/// assert_eq!(result_ok2, Ok((vec!['1', '2', '3'], \"\")));\n\n/// # }\n\n/// ```\n\npub fn sep_end_by<F, Input, P, S>(parser: P, separator: S) -> SepEndBy<F, P, S>\n\nwhere\n\n Input: Stream,\n\n F: Extend<P::Output> + Default,\n\n P: Parser<Input>,\n\n S: Parser<Input>,\n\n{\n\n SepEndBy {\n\n parser,\n\n separator,\n\n _marker: PhantomData,\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct SepEndBy1<F, P, S> {\n\n parser: P,\n\n separator: S,\n\n _marker: PhantomData<fn() -> F>,\n\n}\n", "file_path": "src/parser/repeat.rs", "rank": 70, "score": 217380.17216452872 }, { "content": "fn number<Input>() -> impl Parser<Input, Output = f64>\n\nwhere\n\n Input: Stream<Token = char>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n let i = char('0').map(|_| 0.0).or(integer().map(|x| x as f64));\n\n let fractional = many(digit()).map(|digits: String| {\n\n let mut magnitude = 1.0;\n\n digits.chars().fold(0.0, |acc, d| {\n\n magnitude /= 10.0;\n\n match d.to_digit(10) {\n\n Some(d) => acc + (d as f64) * magnitude,\n\n None => panic!(\"Not a digit\"),\n\n }\n\n })\n\n });\n\n\n\n let exp = satisfy(|c| c == 'e' || c == 'E').with(optional(char('-')).and(integer()));\n\n lex(optional(char('-'))\n\n .and(i)\n", "file_path": "benches/json.rs", "rank": 71, "score": 215402.28795348312 }, { "content": "/// Parses a time\n\n/// 12:30:02\n\nfn time<Input>() -> impl Parser<Input, Output = Time>\n\nwhere\n\n Input: Stream<Token = char>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n (\n\n two_digits(),\n\n char(':'),\n\n two_digits(),\n\n char(':'),\n\n two_digits(),\n\n time_zone(),\n\n )\n\n .map(|(hour, _, minute, _, second, time_zone)| {\n\n // Its ok to just unwrap since we only parsed digits\n\n Time {\n\n hour: hour,\n\n minute: minute,\n\n second: second,\n\n time_zone: time_zone,\n\n }\n\n })\n\n}\n\n\n", "file_path": "examples/date.rs", "rank": 72, "score": 215402.28795348312 }, { "content": "fn integer<Input>() -> impl Parser<Input, Output = i64>\n\nwhere\n\n Input: Stream<Token = char>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n lex(many1(digit()))\n\n .map(|s: String| {\n\n let mut n = 0;\n\n for c in s.chars() {\n\n n = n * 10 + (c as i64 - '0' as i64);\n\n }\n\n n\n\n })\n\n .expected(\"integer\")\n\n}\n\n\n", "file_path": "benches/json.rs", "rank": 73, "score": 215402.28795348312 }, { "content": "/// Parses a date\n\n/// 2010-01-30\n\nfn date<Input>() -> impl Parser<Input, Output = Date>\n\nwhere\n\n Input: Stream<Token = char>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n (\n\n many::<String, _, _>(digit()),\n\n char('-'),\n\n two_digits(),\n\n char('-'),\n\n two_digits(),\n\n )\n\n .map(|(year, _, month, _, day)| {\n\n // Its ok to just unwrap since we only parsed digits\n\n Date {\n\n year: year.parse().unwrap(),\n\n month: month,\n\n day: day,\n\n }\n\n })\n\n}\n\n\n", "file_path": "examples/date.rs", "rank": 74, "score": 215402.28795348312 }, { "content": "fn ini<Input>() -> impl Parser<Input, Output = Ini>\n\nwhere\n\n Input: Stream<Token = char>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n (whitespace(), properties(), many(section())).map(|(_, global, sections)| Ini {\n\n global: global,\n\n sections: sections,\n\n })\n\n}\n\n\n", "file_path": "examples/ini.rs", "rank": 75, "score": 215402.28795348312 }, { "content": "fn object<Input>() -> impl Parser<Input, Output = Value>\n\nwhere\n\n Input: Stream<Token = char>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n let field = (json_string(), lex(char(':')), json_value()).map(|t| (t.0, t.2));\n\n let fields = sep_by(field, lex(char(',')));\n\n between(lex(char('{')), lex(char('}')), fields)\n\n .map(Value::Object)\n\n .expected(\"object\")\n\n}\n\n\n", "file_path": "benches/json.rs", "rank": 76, "score": 215402.28795348312 }, { "content": "/// Equivalent to [`p.then_partial(f)`].\n\n///\n\n/// [`p.then_partial(f)`]: ../trait.Parser.html#method.then_partial\n\npub fn then_partial<Input, P, F, N>(p: P, f: F) -> ThenPartial<P, F>\n\nwhere\n\n Input: Stream,\n\n F: FnMut(&mut P::Output) -> N,\n\n P: Parser<Input>,\n\n N: Parser<Input>,\n\n{\n\n ThenPartial(p, f)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n\n\n use crate::parser::{token::any, EasyParser};\n\n\n\n #[test]\n\n fn sequence_single_parser() {\n\n assert!((any(),).easy_parse(\"a\").is_ok());\n\n }\n\n}\n", "file_path": "src/parser/sequence.rs", "rank": 77, "score": 213193.75514314554 }, { "content": "/// Equivalent to [`p.map(f)`].\n\n///\n\n/// [`p.map(f)`]: ../trait.Parser.html#method.map\n\npub fn map<Input, P, F, B>(p: P, f: F) -> Map<P, F>\n\nwhere\n\n Input: Stream,\n\n P: Parser<Input>,\n\n F: FnMut(P::Output) -> B,\n\n{\n\n Map(p, f)\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct MapInput<P, F>(P, F);\n\nimpl<Input, A, B, P, F> Parser<Input> for MapInput<P, F>\n\nwhere\n\n Input: Stream,\n\n P: Parser<Input, Output = A>,\n\n F: FnMut(A, &mut Input) -> B,\n\n{\n\n type Output = B;\n\n type PartialState = P::PartialState;\n\n\n", "file_path": "src/parser/combinator.rs", "rank": 78, "score": 213193.75514314554 }, { "content": "/// Wraps a function, turning it into a parser.\n\n///\n\n/// Mainly needed to turn closures into parsers as function types can be casted to function pointers\n\n/// to make them usable as a parser.\n\n///\n\n/// ```\n\n/// extern crate combine;\n\n/// # use combine::*;\n\n/// # use combine::parser::char::digit;\n\n/// # use combine::error::{Consumed, StreamError};\n\n/// # use combine::stream::easy;\n\n/// # fn main() {\n\n/// let mut even_digit = parser(|input| {\n\n/// // Help type inference out\n\n/// let _: &mut easy::Stream<&str> = input;\n\n/// let position = input.position();\n\n/// let (char_digit, consumed) = digit().parse_stream(input).into_result()?;\n\n/// let d = (char_digit as i32) - ('0' as i32);\n\n/// if d % 2 == 0 {\n\n/// Ok((d, consumed))\n\n/// }\n\n/// else {\n\n/// //Return an empty error since we only tested the first token of the stream\n\n/// let errors = easy::Errors::new(\n\n/// position,\n\n/// StreamError::expected(\"even number\")\n\n/// );\n\n/// Err(Consumed::Empty(errors.into()))\n\n/// }\n\n/// });\n\n/// let result = even_digit\n\n/// .easy_parse(\"8\")\n\n/// .map(|x| x.0);\n\n/// assert_eq!(result, Ok(8));\n\n/// # }\n\n/// ```\n\npub fn parser<Input, O, F>(f: F) -> FnParser<Input, F>\n\nwhere\n\n Input: Stream,\n\n F: FnMut(&mut Input) -> StdParseResult<O, Input>,\n\n{\n\n FnParser(f, PhantomData)\n\n}\n\n\n\nimpl<Input, O, F> Parser<Input> for FnParser<Input, F>\n\nwhere\n\n Input: Stream,\n\n F: FnMut(&mut Input) -> StdParseResult<O, Input>,\n\n{\n\n type Output = O;\n\n type PartialState = ();\n\n\n\n #[inline]\n\n fn parse_lazy(&mut self, input: &mut Input) -> ParseResult<O, Input::Error> {\n\n (self.0)(input).into()\n\n }\n", "file_path": "src/parser/function.rs", "rank": 79, "score": 212473.68948733265 }, { "content": "fn slice_parse_mode<Input, P, M>(\n\n self_: &mut [P],\n\n mode: M,\n\n input: &mut Input,\n\n state: &mut (usize, P::PartialState),\n\n) -> ParseResult<P::Output, <Input as StreamOnce>::Error>\n\nwhere\n\n P: Parser<Input>,\n\n Input: Stream,\n\n M: ParseMode,\n\n{\n\n let mut prev_err = None;\n\n let mut last_parser_having_non_1_offset = 0;\n\n let before = input.checkpoint();\n\n\n\n let (ref mut index_state, ref mut child_state) = *state;\n\n if !mode.is_first() && *index_state != 0 {\n\n return self_[*index_state - 1]\n\n .parse_partial(input, child_state)\n\n .map(|x| {\n", "file_path": "src/parser/choice.rs", "rank": 80, "score": 212015.64749884448 }, { "content": "/// Parses `parser` one or more times separated and ended by `separator`, returning a collection\n\n/// with the values from `p`.\n\n///\n\n/// If the returned collection cannot be inferred type annotations must be\n\n/// supplied, either by annotating the resulting type binding `let collection: Vec<_> = ...` or by\n\n/// specializing when calling `sep_by`, `sep_by1::<Vec<_>, _, _>(...)`.\n\n///\n\n/// ```\n\n/// # extern crate combine;\n\n/// # use combine::*;\n\n/// # use combine::parser::char::digit;\n\n/// # use combine::stream::easy;\n\n/// # use combine::stream::position::{self, SourcePosition};\n\n/// # fn main() {\n\n/// let mut parser = sep_end_by1(digit(), token(';'));\n\n/// let result_ok = parser.easy_parse(position::Stream::new(\"1;2;3;\"))\n\n/// .map(|(vec, state)| (vec, state.input));\n\n/// assert_eq!(result_ok, Ok((vec!['1', '2', '3'], \"\")));\n\n/// let result_err = parser.easy_parse(position::Stream::new(\"\"));\n\n/// assert_eq!(result_err, Err(easy::Errors {\n\n/// position: SourcePosition::default(),\n\n/// errors: vec![\n\n/// easy::Error::end_of_input(),\n\n/// easy::Error::Expected(\"digit\".into())\n\n/// ]\n\n/// }));\n\n/// # }\n\n/// ```\n\npub fn sep_end_by1<F, Input, P, S>(parser: P, separator: S) -> SepEndBy1<F, P, S>\n\nwhere\n\n Input: Stream,\n\n F: Extend<P::Output> + Default,\n\n P: Parser<Input>,\n\n S: Parser<Input>,\n\n{\n\n SepEndBy1 {\n\n parser,\n\n separator,\n\n _marker: PhantomData,\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct Chainl1<P, Op>(P, Op);\n\nimpl<Input, P, Op> Parser<Input> for Chainl1<P, Op>\n\nwhere\n\n Input: Stream,\n\n P: Parser<Input>,\n", "file_path": "src/parser/repeat.rs", "rank": 81, "score": 211702.32680044047 }, { "content": "/// Parses `parser` from `min` to `max` times (including `min` and `max`).\n\n///\n\n/// ```\n\n/// # extern crate combine;\n\n/// # use combine::*;\n\n/// # use combine::stream::easy::{Error, Info};\n\n/// # fn main() {\n\n/// let mut parser = count_min_max(2, 2, token(b'a'));\n\n///\n\n/// let result = parser.parse(&b\"aaab\"[..]);\n\n/// assert_eq!(result, Ok((b\"aa\"[..].to_owned(), &b\"ab\"[..])));\n\n/// let result = parser.parse(&b\"ab\"[..]);\n\n/// assert!(result.is_err());\n\n/// # }\n\n/// ```\n\n///\n\n/// # Panics\n\n///\n\n/// If `min` > `max`.\n\npub fn count_min_max<F, Input, P>(min: usize, max: usize, parser: P) -> CountMinMax<F, P>\n\nwhere\n\n Input: Stream,\n\n P: Parser<Input>,\n\n F: Extend<P::Output> + Default,\n\n{\n\n assert!(min <= max);\n\n\n\n CountMinMax {\n\n parser,\n\n min: min,\n\n max: max,\n\n _marker: PhantomData,\n\n }\n\n}\n\n\n\nparser! {\n\n pub struct SkipCountMinMax;\n\n type PartialState = <With<CountMinMax<Sink, P>, Value<Input, ()>> as Parser<Input>>::PartialState;\n\n /// Parses `parser` from `min` to `max` times (including `min` and `max`)\n", "file_path": "src/parser/repeat.rs", "rank": 82, "score": 211696.19148600008 }, { "content": "#[inline]\n\nfn json_value<Input>() -> impl Parser<Input, Output = Value>\n\nwhere\n\n Input: Stream<Token = char>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n json_value_()\n\n}\n\n\n\n// We need to use `parser!` to break the recursive use of `value` to prevent the returned parser\n\n// from containing itself\n\nparser! {\n\n #[inline]\n\n fn json_value_[Input]()(Input) -> Value\n\n where [ Input: Stream<Token = char> ]\n\n {\n\n let array = between(\n\n lex(char('[')),\n\n lex(char(']')),\n\n sep_by(json_value(), lex(char(','))),\n\n ).map(Value::Array);\n", "file_path": "benches/json.rs", "rank": 83, "score": 211126.46232653368 }, { "content": "fn two_digits<Input>() -> impl Parser<Input, Output = i32>\n\nwhere\n\n Input: Stream<Token = char>,\n\n // Necessary due to rust-lang/rust#24159\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n (digit(), digit()).map(|(x, y): (char, char)| {\n\n let x = x.to_digit(10).expect(\"digit\");\n\n let y = y.to_digit(10).expect(\"digit\");\n\n (x * 10 + y) as i32\n\n })\n\n}\n\n\n", "file_path": "examples/date.rs", "rank": 84, "score": 211121.61987516348 }, { "content": "fn json_char<Input>() -> impl Parser<Input, Output = char>\n\nwhere\n\n Input: Stream<Token = char>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n parser(|input: &mut Input| {\n\n let (c, consumed) = any().parse_lazy(input).into_result()?;\n\n let mut back_slash_char = satisfy_map(|c| {\n\n Some(match c {\n\n '\"' => '\"',\n\n '\\\\' => '\\\\',\n\n '/' => '/',\n\n 'b' => '\\u{0008}',\n\n 'f' => '\\u{000c}',\n\n 'n' => '\\n',\n\n 'r' => '\\r',\n\n 't' => '\\t',\n\n _ => return None,\n\n })\n\n });\n\n match c {\n\n '\\\\' => consumed.combine(|_| back_slash_char.parse_stream(input).into_result()),\n\n '\"' => Err(Consumed::Empty(\n\n Input::Error::empty(input.position()).into(),\n\n )),\n\n _ => Ok((c, consumed)),\n\n }\n\n })\n\n}\n\n\n", "file_path": "benches/json.rs", "rank": 85, "score": 211121.61987516348 }, { "content": "/// Parses a time zone\n\n/// +0012\n\n/// -06:30\n\n/// -01\n\n/// Z\n\nfn time_zone<Input>() -> impl Parser<Input, Output = i32>\n\nwhere\n\n Input: Stream<Token = char>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n let utc = char('Z').map(|_| 0);\n\n let offset = (\n\n choice([char('-'), char('+')]),\n\n two_digits(),\n\n optional(optional(char(':')).with(two_digits())),\n\n )\n\n .map(|(sign, hour, minute)| {\n\n let offset = hour * 60 + minute.unwrap_or(0);\n\n if sign == '-' {\n\n -offset\n\n } else {\n\n offset\n\n }\n\n });\n\n\n\n utc.or(offset)\n\n}\n\n\n", "file_path": "examples/date.rs", "rank": 86, "score": 211121.61987516348 }, { "content": "fn json_string<Input>() -> impl Parser<Input, Output = String>\n\nwhere\n\n Input: Stream<Token = char>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n between(char('\"'), lex(char('\"')), many(json_char())).expected(\"string\")\n\n}\n\n\n", "file_path": "benches/json.rs", "rank": 87, "score": 211121.61987516348 }, { "content": "/// Parses any token.\n\n///\n\n/// ```\n\n/// # extern crate combine;\n\n/// # use combine::*;\n\n/// # fn main() {\n\n/// let mut char_parser = any();\n\n/// assert_eq!(char_parser.parse(\"!\").map(|x| x.0), Ok('!'));\n\n/// assert!(char_parser.parse(\"\").is_err());\n\n/// let mut byte_parser = any();\n\n/// assert_eq!(byte_parser.parse(&b\"!\"[..]).map(|x| x.0), Ok(b'!'));\n\n/// assert!(byte_parser.parse(&b\"\"[..]).is_err());\n\n/// # }\n\n/// ```\n\npub fn any<Input>() -> Any<Input>\n\nwhere\n\n Input: Stream,\n\n{\n\n Any(PhantomData)\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct Satisfy<Input, P> {\n\n predicate: P,\n\n _marker: PhantomData<Input>,\n\n}\n\n\n", "file_path": "src/parser/token.rs", "rank": 88, "score": 210930.72497864196 }, { "content": "/// Parses a date time according to ISO8601\n\n/// 2015-08-02T18:54:42+02\n\nfn date_time<Input>() -> impl Parser<Input, Output = DateTime>\n\nwhere\n\n Input: Stream<Token = char>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n (date(), char('T'), time()).map(|(date, _, time)| DateTime {\n\n date: date,\n\n time: time,\n\n })\n\n}\n\n\n", "file_path": "examples/date.rs", "rank": 89, "score": 207147.1672120681 }, { "content": "fn property<Input>() -> impl Parser<Input, Output = (String, String)>\n\nwhere\n\n Input: Stream<Token = char>,\n\n // Necessary due to rust-lang/rust#24159\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n (\n\n many1(satisfy(|c| c != '=' && c != '[' && c != ';')),\n\n token('='),\n\n many1(satisfy(|c| c != '\\n' && c != ';')),\n\n )\n\n .map(|(key, _, value)| (key, value))\n\n .message(\"while parsing property\")\n\n}\n\n\n", "file_path": "examples/ini.rs", "rank": 90, "score": 206766.1476454692 }, { "content": "fn end_of_line<'a, Input>() -> impl Parser<Input, Output = u8>\n\nwhere\n\n Input: RangeStream<Token = u8, Range = &'a [u8]>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n (token(b'\\r'), token(b'\\n')).map(|_| b'\\r').or(token(b'\\n'))\n\n}\n\n\n", "file_path": "benches/http.rs", "rank": 91, "score": 206766.1476454692 }, { "content": "/// Equivalent to [`p.flat_map(f)`].\n\n///\n\n/// [`p.flat_map(f)`]: ../trait.Parser.html#method.flat_map\n\npub fn flat_map<Input, P, F, B>(p: P, f: F) -> FlatMap<P, F>\n\nwhere\n\n Input: Stream,\n\n P: Parser<Input>,\n\n F: FnMut(P::Output) -> Result<B, <Input as StreamOnce>::Error>,\n\n{\n\n FlatMap(p, f)\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct AndThen<P, F>(P, F);\n\nimpl<Input, P, F, O, E> Parser<Input> for AndThen<P, F>\n\nwhere\n\n Input: Stream,\n\n P: Parser<Input>,\n\n F: FnMut(P::Output) -> Result<O, E>,\n\n E: Into<<Input::Error as ParseError<Input::Token, Input::Range, Input::Position>>::StreamError>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n type Output = O;\n", "file_path": "src/parser/combinator.rs", "rank": 92, "score": 205918.10886853456 }, { "content": "/// Parses the string `s`, using `cmp` to compare each character.\n\n///\n\n/// ```\n\n/// # extern crate combine;\n\n/// # use combine::*;\n\n/// # use combine::parser::char::string_cmp;\n\n/// use std::ascii::AsciiExt;\n\n/// # fn main() {\n\n/// let result = string_cmp(\"rust\", |l, r| l.eq_ignore_ascii_case(&r))\n\n/// .parse(\"RusT\")\n\n/// .map(|x| x.0);\n\n/// assert_eq!(result, Ok(\"rust\"));\n\n/// # }\n\n/// ```\n\npub fn string_cmp<'a, C, Input>(s: &'static str, cmp: C) -> impl Parser<Input, Output = &'a str>\n\nwhere\n\n C: FnMut(char, char) -> bool,\n\n Input: Stream<Token = char>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n tokens_cmp(s.chars(), cmp).map(move |_| s).expected(s)\n\n}\n\n\n\n#[cfg(all(feature = \"std\", test))]\n\nmod tests {\n\n\n\n use crate::{\n\n parser::EasyParser,\n\n stream::{\n\n easy::{Error, Errors},\n\n position::{self, SourcePosition},\n\n },\n\n };\n\n\n", "file_path": "src/parser/char.rs", "rank": 93, "score": 204614.10020233045 }, { "content": "fn message_header<'a, Input>() -> impl Parser<Input, Output = Header<'a>>\n\nwhere\n\n Input: RangeStream<Token = u8, Range = &'a [u8]>,\n\n Input::Error: ParseError<Input::Token, Input::Range, Input::Position>,\n\n{\n\n let message_header_line = (\n\n take_while1(is_horizontal_space),\n\n take_while1(|c| c != b'\\r' && c != b'\\n'),\n\n end_of_line(),\n\n )\n\n .map(|(_, line, _)| line);\n\n\n\n struct_parser!(Header {\n\n name: take_while1(is_token),\n\n _: token(b':'),\n\n value: many1(message_header_line),\n\n })\n\n}\n\n\n", "file_path": "benches/http.rs", "rank": 94, "score": 202790.9006093298 }, { "content": "/// `ChoiceParser` represents a parser which may parse one of several different choices depending\n\n/// on the input.\n\n///\n\n/// This is an internal trait used to overload the `choice` function.\n\npub trait ChoiceParser<Input: Stream> {\n\n type Output;\n\n type PartialState: Default;\n\n\n\n fn parse_first(\n\n &mut self,\n\n input: &mut Input,\n\n state: &mut Self::PartialState,\n\n ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>;\n\n\n\n fn parse_partial(\n\n &mut self,\n\n input: &mut Input,\n\n state: &mut Self::PartialState,\n\n ) -> ParseResult<Self::Output, <Input as StreamOnce>::Error>;\n\n\n\n fn parse_mode_choice<M>(\n\n &mut self,\n\n mode: M,\n\n input: &mut Input,\n", "file_path": "src/parser/choice.rs", "rank": 95, "score": 202414.387502386 }, { "content": "/// Succeeds only if the stream is at end of input, fails otherwise.\n\n///\n\n/// ```\n\n/// # extern crate combine;\n\n/// # use combine::*;\n\n/// # use combine::stream::easy;\n\n/// # use combine::stream::position::{self, SourcePosition};\n\n/// # fn main() {\n\n/// let mut parser = eof();\n\n/// assert_eq!(parser.easy_parse(position::Stream::new(\"\")), Ok(((), position::Stream::new(\"\"))));\n\n/// assert_eq!(parser.easy_parse(position::Stream::new(\"x\")), Err(easy::Errors {\n\n/// position: SourcePosition::default(),\n\n/// errors: vec![\n\n/// easy::Error::Unexpected('x'.into()),\n\n/// easy::Error::Expected(\"end of input\".into())\n\n/// ]\n\n/// }));\n\n/// # }\n\n/// ```\n\npub fn eof<Input>() -> Eof<Input>\n\nwhere\n\n Input: Stream,\n\n{\n\n Eof(PhantomData)\n\n}\n", "file_path": "src/parser/token.rs", "rank": 96, "score": 200522.58600475136 }, { "content": "/// Parser which just returns the current position in the stream.\n\n///\n\n/// ```\n\n/// # extern crate combine;\n\n/// # use combine::*;\n\n/// # use combine::stream::position::{self, SourcePosition};\n\n/// # fn main() {\n\n/// let result = (position(), token('!'), position())\n\n/// .parse(position::Stream::new(\"!\"))\n\n/// .map(|x| x.0);\n\n/// assert_eq!(result, Ok((SourcePosition { line: 1, column: 1 },\n\n/// '!',\n\n/// SourcePosition { line: 1, column: 2 })));\n\n/// # }\n\n/// ```\n\npub fn position<Input>() -> Position<Input>\n\nwhere\n\n Input: Stream,\n\n{\n\n Position {\n\n _marker: PhantomData,\n\n }\n\n}\n\n\n\n#[derive(Copy, Clone)]\n\npub struct OneOf<T, Input>\n\nwhere\n\n Input: Stream,\n\n{\n\n tokens: T,\n\n _marker: PhantomData<Input>,\n\n}\n\n\n\nimpl<Input, T> Parser<Input> for OneOf<T, Input>\n\nwhere\n", "file_path": "src/parser/token.rs", "rank": 97, "score": 200511.00543051344 }, { "content": "/// Parses multiple tokens.\n\n///\n\n/// Consumes items from the input and compares them to the values from `tokens` using the\n\n/// comparison function `cmp`. Succeeds if all the items from `tokens` are matched in the input\n\n/// stream and fails otherwise with `expected` used as part of the error.\n\n///\n\n/// ```\n\n/// # extern crate combine;\n\n/// # use combine::*;\n\n/// # use combine::error;\n\n/// # fn main() {\n\n/// let result = tokens(|l, r| l.eq_ignore_ascii_case(&r), \"abc\", \"abc\".chars())\n\n/// .parse(\"AbC\")\n\n/// .map(|x| x.0.as_str());\n\n/// assert_eq!(result, Ok(\"abc\"));\n\n/// let result = tokens(\n\n/// |&l, r| (if l < r { r - l } else { l - r }) <= 2,\n\n/// error::Range(&b\"025\"[..]),\n\n/// &b\"025\"[..]\n\n/// )\n\n/// .parse(&b\"123\"[..])\n\n/// .map(|x| x.0);\n\n/// assert_eq!(result, Ok(&b\"025\"[..]));\n\n/// # }\n\n/// ```\n\npub fn tokens<C, E, T, Input>(cmp: C, expected: E, tokens: T) -> Tokens<C, E, T, Input>\n\nwhere\n\n C: FnMut(T::Item, Input::Token) -> bool,\n\n T: Clone + IntoIterator,\n\n Input: Stream,\n\n{\n\n Tokens {\n\n cmp,\n\n expected,\n\n tokens,\n\n _marker: PhantomData,\n\n }\n\n}\n\n\n\n#[derive(Clone)]\n\npub struct TokensCmp<C, T, Input>\n\nwhere\n\n Input: Stream,\n\n{\n\n cmp: C,\n", "file_path": "src/parser/token.rs", "rank": 98, "score": 200169.97650165638 }, { "content": "#[inline]\n\npub fn uncons_range<Input>(\n\n input: &mut Input,\n\n size: usize,\n\n) -> ParseResult<Input::Range, <Input as StreamOnce>::Error>\n\nwhere\n\n Input: ?Sized + RangeStream,\n\n{\n\n match input.uncons_range(size) {\n\n Err(err) => wrap_stream_error(input, err),\n\n Ok(x) => {\n\n if size == 0 {\n\n EmptyOk(x)\n\n } else {\n\n ConsumedOk(x)\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/stream/mod.rs", "rank": 99, "score": 198303.38983789482 } ]
Rust
providers/nitro/nitro-helper/src/command/nitro_enclave.rs
chatchai-hub/tmkms-light
972a739277002704308bfc4e75a0cfa79f62bbb6
use crate::command::check_vsock_proxy; use crate::config::{EnclaveOpt, VSockProxyOpt}; use crate::enclave_log_server::LogServer; use serde::de::DeserializeOwned; use serde::{Deserialize, Serialize}; use std::process::{Command, Output}; use std::sync::mpsc::Receiver; #[derive(Clone, Serialize, Deserialize)] pub struct EnclaveDescribeInfo { #[serde(rename = "EnclaveID")] pub enclave_id: String, #[serde(rename = "ProcessID")] pub process_id: u32, #[serde(rename = "EnclaveCID")] pub enclave_cid: u64, #[serde(rename = "NumberOfCPUs")] pub cpu_count: u64, #[serde(rename = "CPUIDs")] pub cpu_ids: Vec<u32>, #[serde(rename = "MemoryMiB")] pub memory_mib: u64, #[serde(rename = "State")] pub state: String, #[serde(rename = "Flags")] pub flags: String, } #[derive(Clone, Serialize, Deserialize)] pub struct EnclaveRunInfo { #[serde(rename = "EnclaveID")] pub enclave_id: String, #[serde(rename = "ProcessID")] pub process_id: u32, #[serde(rename = "EnclaveCID")] pub enclave_cid: u64, #[serde(rename = "NumberOfCPUs")] pub cpu_count: usize, #[serde(rename = "CPUIDs")] pub cpu_ids: Vec<u32>, #[serde(rename = "MemoryMiB")] pub memory_mib: u64, } #[derive(Clone, Serialize, Deserialize)] pub struct EnclaveTerminateInfo { #[serde(rename = "EnclaveID")] pub enclave_id: String, #[serde(rename = "Terminated")] pub terminated: bool, } fn parse_output<T: DeserializeOwned>(output: Output) -> Result<T, String> { if !output.status.success() { return Err(format!( "{}, status code: {:?}", String::from_utf8_lossy(output.stderr.as_slice()), output.status.code(), )); } serde_json::from_slice(output.stdout.as_slice()) .map_err(|_| "command invalid output".to_string()) } fn run_enclave_daemon( image_path: &str, cpu_count: usize, memory_mib: u64, cid: Option<u64>, ) -> Result<EnclaveRunInfo, String> { let mut cmd = Command::new("nitro-cli"); cmd.arg("run-enclave") .args(&["--eif-path", image_path]) .args(&["--cpu-count", &format!("{}", cpu_count)]) .args(&["--memory", &format!("{}", memory_mib)]); if let Some(cid) = cid { cmd.args(&["--cid", &cid.to_string()]); } let output = cmd .output() .map_err(|e| format!("execute nitro-cli error: {}", e))?; parse_output(output) } pub fn run_enclave(opt: &EnclaveOpt, stop_receiver: Receiver<()>) -> Result<(), String> { let enclave_info = describe_enclave()?; if !enclave_info.is_empty() { let info = serde_json::to_string_pretty(&enclave_info).expect("get invalid enclave info"); return Err(format!( "the following enclave is already active, please stop and try again:\n{:?}", info )); } tracing::info!("start enclave log server at port {}", opt.log_server_port); let enclave_log_server = LogServer::new( opt.log_server_port, opt.log_to_console, opt.log_file.clone(), ) .map_err(|e| format!("{:?}", e))?; enclave_log_server.launch(); let info = run_enclave_daemon( &opt.eif_path, opt.cpu_count, opt.memory_mib, opt.enclave_cid, )?; let s = serde_json::to_string_pretty(&info).unwrap(); tracing::info!("run enclave success:\n{}", s); let _ = stop_receiver.recv(); let _ = stop_enclave(Some(info.enclave_id)); Ok(()) } pub fn stop_enclave(cid: Option<String>) -> Result<EnclaveTerminateInfo, String> { let mut cmd = Command::new("nitro-cli"); cmd.arg("terminate-enclave"); if let Some(id) = cid { cmd.args(&["--enclave-id", &id]); } else { cmd.arg("--all"); } let output = cmd .output() .map_err(|e| format!("execute nitro-cli error: {:?}", e))?; parse_output(output) } pub fn describe_enclave() -> Result<Vec<EnclaveDescribeInfo>, String> { let output = Command::new("nitro-cli") .arg("describe-enclaves") .output() .map_err(|e| format!("execute nitro-cli error: {:?}", e))?; parse_output(output) } pub fn run_vsock_proxy(opt: &VSockProxyOpt, stop_receiver: Receiver<()>) -> Result<(), String> { tracing::debug!("run vsock proxy with config: {:?}", opt); if check_vsock_proxy() { tracing::warn!("vsock proxy is already running, ignore this start"); return Ok(()); } let mut child = Command::new("vsock-proxy") .args(&["--num_workers", &format!("{}", opt.num_workers)]) .args(&["--config", &opt.config_file]) .arg(opt.local_port.to_string()) .arg(&opt.remote_addr) .arg(opt.remote_port.to_string()) .spawn() .map_err(|e| format!("spawn vsock proxy error: {:?}", e))?; let _ = stop_receiver.recv(); if child.kill().is_ok() { tracing::info!("vsock proxy stopped"); } Ok(()) }
use crate::command::check_vsock_proxy; use crate::config::{EnclaveOpt, VSockProxyOpt}; use crate::enclave_log_server::LogServer; use serde::de::DeserializeOwned; use serde::{Deserialize, Serialize}; use std::process::{Command, Output}; use std::sync::mpsc::Receiver; #[derive(Clone, Serialize, Deserialize)] pub struct EnclaveDescribeInfo { #[serde(rename = "EnclaveID")] pub enclave_id: String, #[serde(rename = "ProcessID")] pub process_id: u32, #[serde(rename = "EnclaveCID")] pub enclave_cid: u64, #[serde(rename = "NumberOfCPUs")] pub cpu_count: u64, #[serde(rename = "CPUIDs")] pub cpu_ids: Vec<u32>, #[serde(rename = "MemoryMiB")] pub memory_mib: u64, #[serde(rename = "State")] pub state: String, #[serde(rename = "Flags")] pub flags: String, } #[derive(Clone, Serialize, Deserialize)] pub struct EnclaveRunInfo { #[serde(rename = "EnclaveID")] pub enclave_id: String, #[serde(rename = "ProcessID")] pub process_id: u32, #[serde(rename = "EnclaveCID")] pub enclave_cid: u64, #[serde(rename = "NumberOfCPUs")] pub cpu_count: usize, #[serde(rename = "CPUIDs")] pub cpu_ids: Vec<u32>, #[serde(rename = "MemoryMiB")] pub memory_mib: u64, } #[derive(Clone, Serialize, Deserialize)] pub struct EnclaveTerminateInfo { #[serde(rename = "EnclaveID")] pub enclave_id: String, #[serde(rename = "Terminated")] pub terminated: bool, } fn parse_output<T: DeserializeOwned>(output: Output) -> Result<T, String> { if !output.status.success() { return
; } serde_json::from_slice(output.stdout.as_slice()) .map_err(|_| "command invalid output".to_string()) } fn run_enclave_daemon( image_path: &str, cpu_count: usize, memory_mib: u64, cid: Option<u64>, ) -> Result<EnclaveRunInfo, String> { let mut cmd = Command::new("nitro-cli"); cmd.arg("run-enclave") .args(&["--eif-path", image_path]) .args(&["--cpu-count", &format!("{}", cpu_count)]) .args(&["--memory", &format!("{}", memory_mib)]); if let Some(cid) = cid { cmd.args(&["--cid", &cid.to_string()]); } let output = cmd .output() .map_err(|e| format!("execute nitro-cli error: {}", e))?; parse_output(output) } pub fn run_enclave(opt: &EnclaveOpt, stop_receiver: Receiver<()>) -> Result<(), String> { let enclave_info = describe_enclave()?; if !enclave_info.is_empty() { let info = serde_json::to_string_pretty(&enclave_info).expect("get invalid enclave info"); return Err(format!( "the following enclave is already active, please stop and try again:\n{:?}", info )); } tracing::info!("start enclave log server at port {}", opt.log_server_port); let enclave_log_server = LogServer::new( opt.log_server_port, opt.log_to_console, opt.log_file.clone(), ) .map_err(|e| format!("{:?}", e))?; enclave_log_server.launch(); let info = run_enclave_daemon( &opt.eif_path, opt.cpu_count, opt.memory_mib, opt.enclave_cid, )?; let s = serde_json::to_string_pretty(&info).unwrap(); tracing::info!("run enclave success:\n{}", s); let _ = stop_receiver.recv(); let _ = stop_enclave(Some(info.enclave_id)); Ok(()) } pub fn stop_enclave(cid: Option<String>) -> Result<EnclaveTerminateInfo, String> { let mut cmd = Command::new("nitro-cli"); cmd.arg("terminate-enclave"); if let Some(id) = cid { cmd.args(&["--enclave-id", &id]); } else { cmd.arg("--all"); } let output = cmd .output() .map_err(|e| format!("execute nitro-cli error: {:?}", e))?; parse_output(output) } pub fn describe_enclave() -> Result<Vec<EnclaveDescribeInfo>, String> { let output = Command::new("nitro-cli") .arg("describe-enclaves") .output() .map_err(|e| format!("execute nitro-cli error: {:?}", e))?; parse_output(output) } pub fn run_vsock_proxy(opt: &VSockProxyOpt, stop_receiver: Receiver<()>) -> Result<(), String> { tracing::debug!("run vsock proxy with config: {:?}", opt); if check_vsock_proxy() { tracing::warn!("vsock proxy is already running, ignore this start"); return Ok(()); } let mut child = Command::new("vsock-proxy") .args(&["--num_workers", &format!("{}", opt.num_workers)]) .args(&["--config", &opt.config_file]) .arg(opt.local_port.to_string()) .arg(&opt.remote_addr) .arg(opt.remote_port.to_string()) .spawn() .map_err(|e| format!("spawn vsock proxy error: {:?}", e))?; let _ = stop_receiver.recv(); if child.kill().is_ok() { tracing::info!("vsock proxy stopped"); } Ok(()) }
Err(format!( "{}, status code: {:?}", String::from_utf8_lossy(output.stderr.as_slice()), output.status.code(), ))
call_expression
[]
Rust
src/main.rs
mverleg/next_semver
c9c9833f41e4fb768354479a05f32f6440ff2325
use ::std::fmt; use ::rocket::get; use ::rocket::launch; use ::rocket::request::FromParam; use ::rocket::response::status; use ::rocket::routes; use ::rocket::Build; use ::rocket::Rocket; use ::semver::Version; use ::next_semver::bump; use ::next_semver::Part; #[cfg(feature = "jemalloc")] #[global_allocator] static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc; #[derive(Debug, Clone, Copy)] pub enum BumpPart { Major, Minor, Patch, } impl From<BumpPart> for Part { fn from(part: BumpPart) -> Self { match part { BumpPart::Major => Part::Major, BumpPart::Minor => Part::Minor, BumpPart::Patch => Part::Patch, } } } impl<'a> FromParam<'a> for BumpPart { type Error = (); fn from_param(param: &'a str) -> Result<Self, Self::Error> { Ok(match param { "ma" | "major" | "breaking" => BumpPart::Major, "mi" | "minor" | "feature" => BumpPart::Minor, "pa" | "patch" | "fix" => BumpPart::Patch, _ => return Err(()), }) } } #[derive(Debug, Clone)] pub struct BumpVersion { version: Version, } impl From<BumpVersion> for Version { fn from(version: BumpVersion) -> Self { version.version } } impl fmt::Display for BumpVersion { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.version) } } impl<'a> FromParam<'a> for BumpVersion { type Error = (); fn from_param(param: &'a str) -> Result<Self, Self::Error> { Ok(BumpVersion { version: Version::parse(param).map_err(|_| ())?, }) } } #[derive(Debug, Clone)] pub struct PrefixBumpVersion { version: Version, } impl From<PrefixBumpVersion> for Version { fn from(version: PrefixBumpVersion) -> Self { version.version } } impl<'a> FromParam<'a> for PrefixBumpVersion { type Error = (); fn from_param(param: &'a str) -> Result<Self, Self::Error> { if !param.starts_with('v') { return Err(()); }; Ok(PrefixBumpVersion { version: Version::parse(&param[1..]).map_err(|_| ())?, }) } } #[get("/<part>/<version>", rank = 1)] fn next(part: BumpPart, version: BumpVersion) -> String { bump(&version.into(), part.into()).to_string() } #[get("/<part>/<version>", rank = 2)] fn next_prefix(part: BumpPart, version: PrefixBumpVersion) -> String { bump(&version.into(), part.into()).to_string() } #[get("/<part>/<_>", rank = 3)] fn part_err(part: &str) -> status::BadRequest<String> { status::BadRequest(Some(format!( "cannot parse part (first part of path): '{}' \ should be one of 'major', 'minor' or 'patch'", part ))) } #[get("/<_>/<version>", rank = 4)] fn version_err(version: &str) -> status::BadRequest<String> { status::BadRequest(Some(format!( "cannot parse version (second part of path): '{}' \ should be a semver, e.g. '1.2.4'", version ))) } #[get("/<_>/<_>/<_>")] fn three_parts() -> status::BadRequest<String> { status::BadRequest(Some( "path too long, expected two parts, e.g. /major/1.2.4 or /patch/0.2.0".to_owned(), )) } #[get("/<_>/<_>/<_>/<_>")] fn four_parts() -> status::BadRequest<String> { status::BadRequest(Some( "path too long, expected two parts, e.g. /major/1.2.4 or /patch/0.2.0".to_owned(), )) } #[get("/<param>")] fn missing_part(param: &str) -> status::BadRequest<String> { status::BadRequest(Some(format!( "found only one path part ('{}'), expected two \ parts, e.g. /major/1.2.4 or /patch/0.2.0", param ))) } #[get("/")] fn fallback() -> status::BadRequest<String> { status::BadRequest(Some( ("Welcome to next_semver! This service gives you \ bumped version numbers. Are you on version 1.2.5 and have a new feature? Request \ /minor/1.2.5 and you get your next version: 1.3.0. It is extremely simple. First path \ part is major, minor or patch, second part is the current semantic version.") .to_owned(), )) } #[launch] fn rocket() -> Rocket<Build> { rocket::build().mount( "/", routes![ next, next_prefix, part_err, version_err, three_parts, four_parts, missing_part, fallback, ], ) }
use ::std::fmt; use ::rocket::get; use ::rocket::launch; use ::rocket::request::FromParam; use ::rocket::response::status; use ::rocket::routes; use ::rocket::Build; use ::rocket::Rocket; use ::semver::Version; use ::next_semver::bump; use ::next_semver::Part; #[cfg(feature = "jemalloc")] #[global_allocator] static ALLOC: jemallocator::Jemalloc = jemallocator::Jemalloc; #[derive(Debug, Clone, Copy)] pub enum BumpPart { Major, Minor, Patch, } impl From<BumpPart> for Part { fn from(part: BumpPart) -> Self { match part { BumpPart::Major => Part::Major, BumpPart::Minor => Part::Minor, BumpPart::Patch => Part::Patch, } } } impl<'a> FromParam<'a> for BumpPart { type Error = (); fn from_param(param: &'a str) -> Result<Self, Self::Error> { Ok(
) } } #[derive(Debug, Clone)] pub struct BumpVersion { version: Version, } impl From<BumpVersion> for Version { fn from(version: BumpVersion) -> Self { version.version } } impl fmt::Display for BumpVersion { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.version) } } impl<'a> FromParam<'a> for BumpVersion { type Error = (); fn from_param(param: &'a str) -> Result<Self, Self::Error> { Ok(BumpVersion { version: Version::parse(param).map_err(|_| ())?, }) } } #[derive(Debug, Clone)] pub struct PrefixBumpVersion { version: Version, } impl From<PrefixBumpVersion> for Version { fn from(version: PrefixBumpVersion) -> Self { version.version } } impl<'a> FromParam<'a> for PrefixBumpVersion { type Error = (); fn from_param(param: &'a str) -> Result<Self, Self::Error> { if !param.starts_with('v') { return Err(()); }; Ok(PrefixBumpVersion { version: Version::parse(&param[1..]).map_err(|_| ())?, }) } } #[get("/<part>/<version>", rank = 1)] fn next(part: BumpPart, version: BumpVersion) -> String { bump(&version.into(), part.into()).to_string() } #[get("/<part>/<version>", rank = 2)] fn next_prefix(part: BumpPart, version: PrefixBumpVersion) -> String { bump(&version.into(), part.into()).to_string() } #[get("/<part>/<_>", rank = 3)] fn part_err(part: &str) -> status::BadRequest<String> { status::BadRequest(Some(format!( "cannot parse part (first part of path): '{}' \ should be one of 'major', 'minor' or 'patch'", part ))) } #[get("/<_>/<version>", rank = 4)] fn version_err(version: &str) -> status::BadRequest<String> { status::BadRequest(Some(format!( "cannot parse version (second part of path): '{}' \ should be a semver, e.g. '1.2.4'", version ))) } #[get("/<_>/<_>/<_>")] fn three_parts() -> status::BadRequest<String> { status::BadRequest(Some( "path too long, expected two parts, e.g. /major/1.2.4 or /patch/0.2.0".to_owned(), )) } #[get("/<_>/<_>/<_>/<_>")] fn four_parts() -> status::BadRequest<String> { status::BadRequest(Some( "path too long, expected two parts, e.g. /major/1.2.4 or /patch/0.2.0".to_owned(), )) } #[get("/<param>")] fn missing_part(param: &str) -> status::BadRequest<String> { status::BadRequest(Some(format!( "found only one path part ('{}'), expected two \ parts, e.g. /major/1.2.4 or /patch/0.2.0", param ))) } #[get("/")] fn fallback() -> status::BadRequest<String> { status::BadRequest(Some( ("Welcome to next_semver! This service gives you \ bumped version numbers. Are you on version 1.2.5 and have a new feature? Request \ /minor/1.2.5 and you get your next version: 1.3.0. It is extremely simple. First path \ part is major, minor or patch, second part is the current semantic version.") .to_owned(), )) } #[launch] fn rocket() -> Rocket<Build> { rocket::build().mount( "/", routes![ next, next_prefix, part_err, version_err, three_parts, four_parts, missing_part, fallback, ], ) }
match param { "ma" | "major" | "breaking" => BumpPart::Major, "mi" | "minor" | "feature" => BumpPart::Minor, "pa" | "patch" | "fix" => BumpPart::Patch, _ => return Err(()), }
if_condition
[ { "content": "pub fn bump(version: impl Borrow<Version>, part: Part) -> Version {\n\n let version = version.borrow();\n\n match part {\n\n Part::Major => Version {\n\n major: version.major + 1,\n\n minor: 0,\n\n patch: 0,\n\n pre: version.pre.clone(),\n\n build: BuildMetadata::EMPTY,\n\n },\n\n Part::Minor => Version {\n\n major: version.major,\n\n minor: version.minor + 1,\n\n patch: 0,\n\n pre: version.pre.clone(),\n\n build: BuildMetadata::EMPTY,\n\n },\n\n Part::Patch => Version {\n\n major: version.major,\n\n minor: version.minor,\n", "file_path": "src/lib.rs", "rank": 0, "score": 75907.5665841834 }, { "content": "use ::std::borrow::Borrow;\n\n\n\nuse ::semver::{BuildMetadata, Version};\n\n\n\n#[derive(Clone, Copy)]\n\npub enum Part {\n\n Major,\n\n Minor,\n\n Patch,\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 11, "score": 10.001670501369805 }, { "content": " patch: version.patch + 1,\n\n pre: version.pre.clone(),\n\n build: BuildMetadata::EMPTY,\n\n },\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn bump_major() {\n\n assert_eq!(v(\"1.0.0\"), bump(v(\"0.1.2\"), Part::Major));\n\n assert_eq!(v(\"3.0.0\"), bump(v(\"2.2.2\"), Part::Major));\n\n }\n\n\n\n #[test]\n\n fn bump_minor() {\n\n assert_eq!(v(\"0.2.0\"), bump(v(\"0.1.2\"), Part::Minor));\n", "file_path": "src/lib.rs", "rank": 14, "score": 6.7300396759270384 }, { "content": " assert_eq!(v(\"2.3.0\"), bump(v(\"2.2.2\"), Part::Minor));\n\n }\n\n\n\n #[test]\n\n fn bump_patch() {\n\n assert_eq!(v(\"0.1.3\"), bump(v(\"0.1.2\"), Part::Patch));\n\n assert_eq!(v(\"2.2.3\"), bump(v(\"2.2.2\"), Part::Patch));\n\n }\n\n\n\n #[test]\n\n fn borrow_and_owned() {\n\n bump(v(\"0.2.2\"), Part::Minor);\n\n bump(&v(\"0.2.2\"), Part::Minor);\n\n }\n\n\n\n #[test]\n\n fn keep_pre() {\n\n assert_eq!(v(\"1.3.0-alpha\").to_string(), next_minor(\"1.2.5-alpha\"))\n\n }\n\n\n", "file_path": "src/lib.rs", "rank": 17, "score": 4.656722987385506 }, { "content": " #[test]\n\n fn strip_build() {\n\n assert_eq!(v(\"1.3.0\").to_string(), next_minor(\"1.2.5+567\"))\n\n }\n\n\n\n #[test]\n\n fn pre_and_build() {\n\n assert_eq!(v(\"1.3.0-alpha\").to_string(), next_minor(\"1.2.5-alpha+567\"))\n\n }\n\n\n\n fn v(version: &str) -> Version {\n\n Version::parse(version).unwrap()\n\n }\n\n\n\n fn next_minor(version: &str) -> String {\n\n bump(v(version), Part::Minor).to_string()\n\n }\n\n}\n", "file_path": "src/lib.rs", "rank": 18, "score": 3.647728051344191 }, { "content": "# Next semver\n\n\n\nThis is an extremely simple service. You send it your current version and the type of bump you want, and you get back the new version.\n\n\n\n* `/minor/1.2.4` -> `1.3.0`\n\n* `/patch/0.3.7-alpha` -> `0.3.8-alpha`\n\n\n\nI have this as a http endpoint to keep my CI pipelines code-free.\n\n\n\n## Webservice\n\n\n\nNo guarantees about uptime, but I intend to have the service mostly available at [next.tryin.top](https://next.tryin.top). Example:\n\n\n\n curl -f https://next.tryin.top/minor/v1.2.4\n\n # 1.3.0\n\n\n\n## Run locally\n\n\n\nYou can run with just Docker ([the image](https://hub.docker.com/repository/docker/mverleg/next_semver) is 2MB):\n\n\n\n docker run -p8080:8080 -it mverleg/next_semver:latest\n\n\n\nOr you can build it yourself with Cargo, after checking out the code:\n\n\n\n ROCKET_ADDRESS=\"0.0.0.0\" ROCKET_PORT=8080 cargo run --features=web --bin next_semver \n\n\n\n## Crate\n\n\n\nThe crate is available as [next_semver](https://crates.io/crates/next_semver) and can be used as a library (without web dependencies).\n\n\n", "file_path": "README.md", "rank": 19, "score": 1.764832614062533 } ]
Rust
plotters/src/coord/ranged2d/cartesian.rs
facorread/plotters
f86adaec5236551d9be3adf5c631549a1bc1c977
/*! The 2-dimensional cartesian coordinate system. This module provides the 2D cartesian coordinate system, which is composed by two independent ranged 1D coordinate sepcification. This types of coordinate system is used by the chart constructed with [ChartBuilder::build_cartesian_2d](../../chart/ChartBuilder.html#method.build_cartesian_2d). */ use crate::coord::ranged1d::{KeyPointHint, Ranged, ReversibleRanged}; use crate::coord::{CoordTranslate, ReverseCoordTranslate}; use crate::style::ShapeStyle; use plotters_backend::{BackendCoord, DrawingBackend, DrawingErrorKind}; use std::ops::Range; #[derive(Clone)] pub struct Cartesian2d<X: Ranged, Y: Ranged> { logic_x: X, logic_y: Y, back_x: (i32, i32), back_y: (i32, i32), } impl<X: Ranged, Y: Ranged> Cartesian2d<X, Y> { pub fn new<IntoX: Into<X>, IntoY: Into<Y>>( logic_x: IntoX, logic_y: IntoY, actual: (Range<i32>, Range<i32>), ) -> Self { Self { logic_x: logic_x.into(), logic_y: logic_y.into(), back_x: (actual.0.start, actual.0.end), back_y: (actual.1.start, actual.1.end), } } pub fn draw_mesh< E, DrawMesh: FnMut(MeshLine<X, Y>) -> Result<(), E>, XH: KeyPointHint, YH: KeyPointHint, >( &self, h_limit: YH, v_limit: XH, mut draw_mesh: DrawMesh, ) -> Result<(), E> { let (xkp, ykp) = ( self.logic_x.key_points(v_limit), self.logic_y.key_points(h_limit), ); for logic_x in xkp { let x = self.logic_x.map(&logic_x, self.back_x); draw_mesh(MeshLine::XMesh( (x, self.back_y.0), (x, self.back_y.1), &logic_x, ))?; } for logic_y in ykp { let y = self.logic_y.map(&logic_y, self.back_y); draw_mesh(MeshLine::YMesh( (self.back_x.0, y), (self.back_x.1, y), &logic_y, ))?; } Ok(()) } pub fn get_x_range(&self) -> Range<X::ValueType> { self.logic_x.range() } pub fn get_y_range(&self) -> Range<Y::ValueType> { self.logic_y.range() } pub fn get_x_axis_pixel_range(&self) -> Range<i32> { self.logic_x.axis_pixel_range(self.back_x) } pub fn get_y_axis_pixel_range(&self) -> Range<i32> { self.logic_y.axis_pixel_range(self.back_y) } pub fn x_spec(&self) -> &X { &self.logic_x } pub fn y_spec(&self) -> &Y { &self.logic_y } } impl<X: Ranged, Y: Ranged> CoordTranslate for Cartesian2d<X, Y> { type From = (X::ValueType, Y::ValueType); fn translate(&self, from: &Self::From) -> BackendCoord { ( self.logic_x.map(&from.0, self.back_x), self.logic_y.map(&from.1, self.back_y), ) } } impl<X: ReversibleRanged, Y: ReversibleRanged> ReverseCoordTranslate for Cartesian2d<X, Y> { fn reverse_translate(&self, input: BackendCoord) -> Option<Self::From> { Some(( self.logic_x.unmap(input.0, self.back_x)?, self.logic_y.unmap(input.1, self.back_y)?, )) } } pub enum MeshLine<'a, X: Ranged, Y: Ranged> { XMesh(BackendCoord, BackendCoord, &'a X::ValueType), YMesh(BackendCoord, BackendCoord, &'a Y::ValueType), } impl<'a, X: Ranged, Y: Ranged> MeshLine<'a, X, Y> { pub fn draw<DB: DrawingBackend>( &self, backend: &mut DB, style: &ShapeStyle, ) -> Result<(), DrawingErrorKind<DB::ErrorType>> { let (&left, &right) = match self { MeshLine::XMesh(a, b, _) => (a, b), MeshLine::YMesh(a, b, _) => (a, b), }; backend.draw_line(left, right, style) } }
/*! The 2-dimensional cartesian coordinate system. This module provides the 2D cartesian coordinate system, which is composed by two independent ranged 1D coordinate sepcification. This types of coordinate system is used by the chart constructed with [ChartBuilder::build_cartesian_2d](../../chart/ChartBuilder.html#method.build_cartesian_2d). */ use crate::coord::ranged1d::{KeyPointHint, Ranged, ReversibleRanged}; use crate::coord::{CoordTranslate, ReverseCoordTranslate}; use crate::style::ShapeStyle; use plotters_backend::{BackendCoord, DrawingBackend, DrawingErrorKind}; use std::ops::Range; #[derive(Clone)] pub struct Cartesian2d<X: Ranged, Y: Ranged> { logic_x: X, logic_y: Y, back_x: (i32, i32), back_y: (i32, i32), } impl<X: Ranged, Y: Ranged> Cartesian2d<X, Y> { pub fn new<IntoX: Into<X>, IntoY: Into<Y>>( logic_x: IntoX, logic_y: IntoY, actual: (Range<i32>, Range<i32>), ) -> Self { Self { logic_x: logic_x.into(), logic_y: logic_y.into(), back_x: (actual.0.start, actual.0.end), back_y: (actual.1.start, actual.1.end), } } pub fn draw_mesh< E, DrawMesh: FnMut(MeshLine<X, Y>) -> Result<(), E>, XH: KeyPointHint, YH: KeyPointHint, >( &self, h_limit: YH, v_limit: XH, mut draw_mesh: DrawMesh, ) -> Result<(), E> { let (xkp, ykp) = ( self.logic_x.key_points(v_limit), self.logic_y.key_points(h_limit), ); for logic_x in xkp { let x = self.logic_x.map(&logic_x, self.back_x); draw_mesh(MeshLine::XMesh( (x, self.back_y.0), (x, self.back_y.1), &logic_x, ))?; } for logic_y in ykp { let y = self.logic_y.map(&logic_y, self.back_y);
?; } Ok(()) } pub fn get_x_range(&self) -> Range<X::ValueType> { self.logic_x.range() } pub fn get_y_range(&self) -> Range<Y::ValueType> { self.logic_y.range() } pub fn get_x_axis_pixel_range(&self) -> Range<i32> { self.logic_x.axis_pixel_range(self.back_x) } pub fn get_y_axis_pixel_range(&self) -> Range<i32> { self.logic_y.axis_pixel_range(self.back_y) } pub fn x_spec(&self) -> &X { &self.logic_x } pub fn y_spec(&self) -> &Y { &self.logic_y } } impl<X: Ranged, Y: Ranged> CoordTranslate for Cartesian2d<X, Y> { type From = (X::ValueType, Y::ValueType); fn translate(&self, from: &Self::From) -> BackendCoord { ( self.logic_x.map(&from.0, self.back_x), self.logic_y.map(&from.1, self.back_y), ) } } impl<X: ReversibleRanged, Y: ReversibleRanged> ReverseCoordTranslate for Cartesian2d<X, Y> { fn reverse_translate(&self, input: BackendCoord) -> Option<Self::From> { Some(( self.logic_x.unmap(input.0, self.back_x)?, self.logic_y.unmap(input.1, self.back_y)?, )) } } pub enum MeshLine<'a, X: Ranged, Y: Ranged> { XMesh(BackendCoord, BackendCoord, &'a X::ValueType), YMesh(BackendCoord, BackendCoord, &'a Y::ValueType), } impl<'a, X: Ranged, Y: Ranged> MeshLine<'a, X, Y> { pub fn draw<DB: DrawingBackend>( &self, backend: &mut DB, style: &ShapeStyle, ) -> Result<(), DrawingErrorKind<DB::ErrorType>> { let (&left, &right) = match self { MeshLine::XMesh(a, b, _) => (a, b), MeshLine::YMesh(a, b, _) => (a, b), }; backend.draw_line(left, right, style) } }
draw_mesh(MeshLine::YMesh( (self.back_x.0, y), (self.back_x.1, y), &logic_y, ))
call_expression
[ { "content": "/// Draw power function f(x) = x^power.\n\npub fn draw(canvas_id: &str, power: i32) -> DrawResult<impl Fn((i32, i32)) -> Option<(f32, f32)>> {\n\n let backend = CanvasBackend::new(canvas_id).expect(\"cannot find canvas\");\n\n let root = backend.into_drawing_area();\n\n let font: FontDesc = (\"sans-serif\", 20.0).into();\n\n\n\n root.fill(&WHITE)?;\n\n\n\n let mut chart = ChartBuilder::on(&root)\n\n .margin(20u32)\n\n .caption(format!(\"y=x^{}\", power), font)\n\n .x_label_area_size(30u32)\n\n .y_label_area_size(30u32)\n\n .build_cartesian_2d(-1f32..1f32, -1.2f32..1.2f32)?;\n\n\n\n chart.configure_mesh().x_labels(3).y_labels(3).draw()?;\n\n\n\n chart.draw_series(LineSeries::new(\n\n (-50..=50)\n\n .map(|x| x as f32 / 50.0)\n\n .map(|x| (x, x.powf(power as f32))),\n\n &RED,\n\n ))?;\n\n\n\n root.present()?;\n\n return Ok(chart.into_coord_trans());\n\n}\n", "file_path": "plotters/examples/wasm-demo/src/func_plot.rs", "rank": 0, "score": 225844.66691183197 }, { "content": "/// Draw Mandelbrot set\n\npub fn draw(element: HtmlCanvasElement) -> DrawResult<impl Fn((i32, i32)) -> Option<(f64, f64)>> {\n\n let backend = CanvasBackend::with_canvas_object(element).unwrap();\n\n\n\n let root = backend.into_drawing_area();\n\n root.fill(&WHITE)?;\n\n\n\n let mut chart = ChartBuilder::on(&root)\n\n .margin(20)\n\n .x_label_area_size(10)\n\n .y_label_area_size(10)\n\n .build_cartesian_2d(-2.1..0.6, -1.2..1.2)?;\n\n\n\n chart\n\n .configure_mesh()\n\n .disable_x_mesh()\n\n .disable_y_mesh()\n\n .draw()?;\n\n\n\n let plotting_area = chart.plotting_area();\n\n\n", "file_path": "plotters/examples/wasm-demo/src/mandelbrot.rs", "rank": 1, "score": 222568.9622248168 }, { "content": "pub fn create_mocked_drawing_area<F: FnOnce(&mut MockedBackend)>(\n\n width: u32,\n\n height: u32,\n\n setup: F,\n\n) -> DrawingArea<MockedBackend, Shift> {\n\n let mut backend = MockedBackend::new(width, height);\n\n setup(&mut backend);\n\n backend.into_drawing_area()\n\n}\n", "file_path": "plotters/src/drawing/backend_impl/mocked.rs", "rank": 2, "score": 188502.06236800028 }, { "content": "pub fn drawable<F>(application: &gtk::Application, width: i32, height: i32, draw_fn: F)\n\nwhere\n\n F: Fn(&DrawingArea, &Context) -> Inhibit + 'static,\n\n{\n\n let window = gtk::ApplicationWindow::new(application);\n\n let drawing_area = Box::new(DrawingArea::new)();\n\n\n\n drawing_area.connect_draw(draw_fn);\n\n\n\n window.set_default_size(width, height);\n\n\n\n window.add(&drawing_area);\n\n window.show_all();\n\n}\n", "file_path": "plotters/examples/gtk-demo/src/main.rs", "rank": 3, "score": 186912.55145698317 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let root_area = BitMapBackend::new(OUT_FILE_NAME, (1024, 768)).into_drawing_area();\n\n\n\n root_area.fill(&WHITE)?;\n\n\n\n let root_area = root_area.titled(\"Image Title\", (\"sans-serif\", 60))?;\n\n\n\n let (upper, lower) = root_area.split_vertically(512);\n\n\n\n let x_axis = (-3.4f32..3.4).step(0.1);\n\n\n\n let mut cc = ChartBuilder::on(&upper)\n\n .margin(5)\n\n .set_all_label_area_size(50)\n\n .caption(\"Sine and Cosine\", (\"sans-serif\", 40))\n\n .build_cartesian_2d(-3.4f32..3.4, -1.2f32..1.2f32)?;\n\n\n\n cc.configure_mesh()\n\n .x_labels(20)\n\n .y_labels(10)\n", "file_path": "plotters/examples/chart.rs", "rank": 4, "score": 162745.0898207244 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let root = BitMapBackend::new(OUT_FILE_NAME, (1024, 768)).into_drawing_area();\n\n root.fill(&WHITE)?;\n\n\n\n let mut chart = ChartBuilder::on(&root)\n\n .x_label_area_size(35)\n\n .y_label_area_size(40)\n\n .right_y_label_area_size(40)\n\n .margin(5)\n\n .caption(\"Dual Y-Axis Example\", (\"sans-serif\", 50.0).into_font())\n\n .build_cartesian_2d(0f32..10f32, (0.1f32..1e10f32).log_scale())?\n\n .set_secondary_coord(0f32..10f32, -1.0f32..1.0f32);\n\n\n\n chart\n\n .configure_mesh()\n\n .disable_x_mesh()\n\n .disable_y_mesh()\n\n .y_desc(\"Log Scale\")\n\n .y_label_formatter(&|x| format!(\"{:e}\", x))\n\n .draw()?;\n", "file_path": "plotters/examples/two-scales.rs", "rank": 5, "score": 160813.30377030102 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let root = BitMapBackend::new(\"plotters-doc-data/5.png\", (640, 480)).into_drawing_area();\n\n root.fill(&WHITE);\n\n let root = root.margin(10, 10, 10, 10);\n\n // After this point, we should be able to draw construct a chart context\n\n let mut chart = ChartBuilder::on(&root)\n\n // Set the caption of the chart\n\n .caption(\"This is our first plot\", (\"sans-serif\", 40).into_font())\n\n // Set the size of the label region\n\n .x_label_area_size(20)\n\n .y_label_area_size(40)\n\n // Finally attach a coordinate on the drawing area and make a chart context\n\n .build_cartesian_2d(0f32..10f32, 0f32..10f32)?;\n\n\n\n // Then we can draw a mesh\n\n chart\n\n .configure_mesh()\n\n // We can customize the maximum number of labels allowed for each axis\n\n .x_labels(5)\n\n .y_labels(5)\n", "file_path": "doc-template/examples/chart.rs", "rank": 6, "score": 160052.72436028766 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let data: Vec<_> = {\n\n let norm_dist = Normal::new(500.0, 100.0).unwrap();\n\n let mut x_rand = XorShiftRng::from_seed(*b\"MyFragileSeed123\");\n\n let x_iter = norm_dist.sample_iter(&mut x_rand);\n\n x_iter\n\n .filter(|x| *x < 1500.0)\n\n .take(100)\n\n .zip(0..)\n\n .map(|(x, b)| x + (b as f64).powf(1.2))\n\n .collect()\n\n };\n\n\n\n let root = BitMapBackend::new(OUT_FILE_NAME, (1024, 768)).into_drawing_area();\n\n\n\n root.fill(&WHITE)?;\n\n\n\n let mut chart = ChartBuilder::on(&root)\n\n .set_label_area_size(LabelAreaPosition::Left, 60)\n\n .set_label_area_size(LabelAreaPosition::Bottom, 60)\n", "file_path": "plotters/examples/area-chart.rs", "rank": 7, "score": 160052.72436028766 }, { "content": "/// Build a range that fits the data\n\n///\n\n/// - `iter`: the iterator over the data\n\n/// - **returns** The resulting range\n\n///\n\n/// ```rust\n\n/// use plotters::data::fitting_range;\n\n///\n\n/// let data = [4, 14, -2, 2, 5];\n\n/// let range = fitting_range(&data);\n\n/// assert_eq!(range, std::ops::Range { start: -2, end: 14 });\n\n/// ```\n\npub fn fitting_range<'a, T: 'a, I: IntoIterator<Item = &'a T>>(iter: I) -> Range<T>\n\nwhere\n\n T: Zero + One + PartialOrd + Clone,\n\n{\n\n let (mut lb, mut ub) = (None, None);\n\n\n\n for value in iter.into_iter() {\n\n if let Some(Ordering::Greater) = lb\n\n .as_ref()\n\n .map_or(Some(Ordering::Greater), |lbv: &T| lbv.partial_cmp(value))\n\n {\n\n lb = Some(value.clone());\n\n }\n\n\n\n if let Some(Ordering::Less) = ub\n\n .as_ref()\n\n .map_or(Some(Ordering::Less), |ubv: &T| ubv.partial_cmp(value))\n\n {\n\n ub = Some(value.clone());\n\n }\n\n }\n\n\n\n lb.unwrap_or_else(Zero::zero)..ub.unwrap_or_else(One::one)\n\n}\n", "file_path": "plotters/src/data/data_range.rs", "rank": 8, "score": 157167.29527619385 }, { "content": "type SeriesAnnoDrawFn<'a, DB> = dyn Fn(BackendCoord) -> DynElement<'a, DB, BackendCoord> + 'a;\n\n\n\n/// The annotations (such as the label of the series, the legend element, etc)\n\n/// When a series is drawn onto a drawing area, an series annotation object\n\n/// is created and a mutable reference is returned.\n\npub struct SeriesAnno<'a, DB: DrawingBackend> {\n\n label: Option<String>,\n\n draw_func: Option<Box<SeriesAnnoDrawFn<'a, DB>>>,\n\n}\n\n\n\nimpl<'a, DB: DrawingBackend> SeriesAnno<'a, DB> {\n\n #[allow(clippy::option_as_ref_deref)]\n\n pub(crate) fn get_label(&self) -> &str {\n\n // TODO: Change this when we bump the MSRV\n\n self.label.as_ref().map(|x| x.as_str()).unwrap_or(\"\")\n\n }\n\n\n\n pub(crate) fn get_draw_func(&self) -> Option<&SeriesAnnoDrawFn<'a, DB>> {\n\n self.draw_func.as_ref().map(|x| x.as_ref())\n\n }\n", "file_path": "plotters/src/chart/series.rs", "rank": 9, "score": 149326.39703607128 }, { "content": "fn theta_to_ordinal_coord(radius: f64, theta: f64, ordinal_offset: &(i32, i32)) -> (i32, i32) {\n\n // polar coordinates are (r, theta)\n\n // convert to (x, y) coord, with center as offset\n\n\n\n let (sin, cos) = theta.sin_cos();\n\n (\n\n // casting f64 to discrete i32 pixels coordinates is inevitably going to lose precision\n\n // if plotters can support float coordinates, this place would surely benefit, especially for small sizes.\n\n // so far, the result isn't so bad though\n\n (radius * cos + ordinal_offset.0 as f64).round() as i32, // x\n\n (radius * sin + ordinal_offset.1 as f64).round() as i32, // y\n\n )\n\n}\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n // use crate::prelude::*;\n\n\n\n #[test]\n\n fn polar_coord_to_cartestian_coord() {\n", "file_path": "plotters/src/element/pie.rs", "rank": 10, "score": 147343.89113180182 }, { "content": "/// Start drawing an evcxr figure\n\npub fn evcxr_figure<\n\n Draw: FnOnce(DrawingArea<SVGBackend, Shift>) -> Result<(), Box<dyn std::error::Error>>,\n\n>(\n\n size: (u32, u32),\n\n draw: Draw,\n\n) -> SVGWrapper {\n\n let mut buffer = \"\".to_string();\n\n let root = SVGBackend::with_string(&mut buffer, size).into_drawing_area();\n\n draw(root).expect(\"Drawing failure\");\n\n SVGWrapper(buffer, \"\".to_string())\n\n}\n", "file_path": "plotters/src/evcxr.rs", "rank": 11, "score": 145678.70941226708 }, { "content": "pub fn sierpinski_carpet(\n\n depth: u32,\n\n drawing_area: &DrawingArea<BitMapBackend, Shift>,\n\n) -> Result<(), Box<dyn std::error::Error>> {\n\n if depth > 0 {\n\n let sub_areas = drawing_area.split_evenly((3, 3));\n\n for (idx, sub_area) in (0..).zip(sub_areas.iter()) {\n\n if idx != 4 {\n\n sub_area.fill(&BLUE)?;\n\n sierpinski_carpet(depth - 1, sub_area)?;\n\n } else {\n\n sub_area.fill(&WHITE)?;\n\n }\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n\nconst OUT_FILE_NAME: &'static str = \"plotters-doc-data/sierpinski.png\";\n", "file_path": "plotters/examples/sierpinski.rs", "rank": 12, "score": 145678.70941226708 }, { "content": "pub fn fill_polygon<DB: DrawingBackend, S: BackendStyle>(\n\n back: &mut DB,\n\n vertices: &[BackendCoord],\n\n style: &S,\n\n) -> Result<(), DrawingErrorKind<DB::ErrorType>> {\n\n if let Some((x_span, y_span)) =\n\n vertices\n\n .iter()\n\n .fold(None, |res: Option<((i32, i32), (i32, i32))>, (x, y)| {\n\n Some(\n\n res.map(|((min_x, max_x), (min_y, max_y))| {\n\n (\n\n (min_x.min(*x), max_x.max(*x)),\n\n (min_y.min(*y), max_y.max(*y)),\n\n )\n\n })\n\n .unwrap_or(((*x, *x), (*y, *y))),\n\n )\n\n })\n\n {\n", "file_path": "plotters-backend/src/rasterizer/polygon.rs", "rank": 13, "score": 144431.37156711554 }, { "content": "pub fn draw_line<DB: DrawingBackend, S: BackendStyle>(\n\n back: &mut DB,\n\n mut from: BackendCoord,\n\n mut to: BackendCoord,\n\n style: &S,\n\n) -> Result<(), DrawingErrorKind<DB::ErrorType>> {\n\n if style.color().alpha == 0.0 || style.stroke_width() == 0 {\n\n return Ok(());\n\n }\n\n\n\n if style.stroke_width() != 1 {\n\n // If the line is wider than 1px, then we need to make it a polygon\n\n let v = (i64::from(to.0 - from.0), i64::from(to.1 - from.1));\n\n let l = ((v.0 * v.0 + v.1 * v.1) as f64).sqrt();\n\n\n\n if l < 1e-5 {\n\n return Ok(());\n\n }\n\n\n\n let v = (v.0 as f64 / l, v.1 as f64 / l);\n", "file_path": "plotters-backend/src/rasterizer/line.rs", "rank": 14, "score": 144431.37156711554 }, { "content": "pub fn draw_rect<B: DrawingBackend, S: BackendStyle>(\n\n b: &mut B,\n\n upper_left: BackendCoord,\n\n bottom_right: BackendCoord,\n\n style: &S,\n\n fill: bool,\n\n) -> Result<(), DrawingErrorKind<B::ErrorType>> {\n\n if style.color().alpha == 0.0 {\n\n return Ok(());\n\n }\n\n let (upper_left, bottom_right) = (\n\n (\n\n upper_left.0.min(bottom_right.0),\n\n upper_left.1.min(bottom_right.1),\n\n ),\n\n (\n\n upper_left.0.max(bottom_right.0),\n\n upper_left.1.max(bottom_right.1),\n\n ),\n\n );\n", "file_path": "plotters-backend/src/rasterizer/rect.rs", "rank": 15, "score": 144431.37156711554 }, { "content": "pub fn draw_circle<B: DrawingBackend, S: BackendStyle>(\n\n b: &mut B,\n\n center: BackendCoord,\n\n mut radius: u32,\n\n style: &S,\n\n mut fill: bool,\n\n) -> Result<(), DrawingErrorKind<B::ErrorType>> {\n\n if style.color().alpha == 0.0 {\n\n return Ok(());\n\n }\n\n\n\n if !fill && style.stroke_width() != 1 {\n\n let inner_radius = radius - (style.stroke_width() / 2).min(radius);\n\n radius += style.stroke_width() / 2;\n\n if inner_radius > 0 {\n\n return draw_annulus(b, center, (radius, inner_radius), style);\n\n } else {\n\n fill = true;\n\n }\n\n }\n", "file_path": "plotters-backend/src/rasterizer/circle.rs", "rank": 16, "score": 144431.37156711554 }, { "content": "fn draw_chart<B: DrawingBackend>(root: &DrawingArea<B, Shift>) -> DrawResult<(), B> {\n\n let mut chart = ChartBuilder::on(root)\n\n .caption(\n\n \"Relative Size Example\",\n\n (\"sans-serif\", (5).percent_height()),\n\n )\n\n .x_label_area_size((10).percent_height())\n\n .y_label_area_size((10).percent_width())\n\n .margin(5)\n\n .build_cartesian_2d(-5.0..5.0, -1.0..1.0)?;\n\n\n\n chart\n\n .configure_mesh()\n\n .disable_x_mesh()\n\n .disable_y_mesh()\n\n .label_style((\"sans-serif\", (3).percent_height()))\n\n .draw()?;\n\n\n\n chart.draw_series(LineSeries::new(\n\n (0..1000)\n\n .map(|x| x as f64 / 100.0 - 5.0)\n\n .map(|x| (x, x.sin())),\n\n &RED,\n\n ))?;\n\n Ok(())\n\n}\n\n\n\nconst OUT_FILE_NAME: &'static str = \"plotters-doc-data/relative_size.png\";\n", "file_path": "plotters/examples/relative_size.rs", "rank": 17, "score": 142657.45861477085 }, { "content": "type FontResult<T> = Result<T, FontError>;\n\n\n\n#[derive(Debug, Clone)]\n\npub enum FontError {\n\n LockError,\n\n NoSuchFont(String, String),\n\n FontLoadError(Arc<FontLoadingError>),\n\n GlyphError(Arc<GlyphLoadingError>),\n\n}\n\n\n\nimpl std::fmt::Display for FontError {\n\n fn fmt(&self, fmt: &mut std::fmt::Formatter) -> Result<(), std::fmt::Error> {\n\n match self {\n\n FontError::LockError => write!(fmt, \"Could not lock mutex\"),\n\n FontError::NoSuchFont(family, style) => {\n\n write!(fmt, \"No such font: {} {}\", family, style)\n\n }\n\n FontError::FontLoadError(e) => write!(fmt, \"Font loading error {}\", e),\n\n FontError::GlyphError(e) => write!(fmt, \"Glyph error {}\", e),\n\n }\n", "file_path": "plotters/src/style/font/ttf.rs", "rank": 18, "score": 141931.6722372101 }, { "content": "pub fn draw(canvas: HtmlCanvasElement, pitch: f64, yaw: f64) -> DrawResult<()> {\n\n let area = CanvasBackend::with_canvas_object(canvas)\n\n .unwrap()\n\n .into_drawing_area();\n\n area.fill(&WHITE)?;\n\n\n\n let x_axis = (-3.0..3.0).step(0.1);\n\n let z_axis = (-3.0..3.0).step(0.1);\n\n\n\n let mut chart =\n\n ChartBuilder::on(&area).build_cartesian_3d(x_axis.clone(), -3.0..3.0, z_axis.clone())?;\n\n\n\n chart.with_projection(|mut pb| {\n\n pb.yaw = yaw;\n\n pb.pitch = pitch;\n\n pb.scale = 0.7;\n\n pb.into_matrix()\n\n });\n\n\n\n chart.configure_axes().draw()?;\n", "file_path": "plotters/examples/wasm-demo/src/plot3d.rs", "rank": 19, "score": 140079.58111929233 }, { "content": "fn draw_chart<DB: DrawingBackend>(\n\n b: DrawingArea<DB, plotters::coord::Shift>,\n\n) -> Result<(), Box<dyn Error>>\n\nwhere\n\n DB::ErrorType: 'static,\n\n{\n\n let mut chart = ChartBuilder::on(&b)\n\n .margin(1)\n\n .caption(\"Sine and Cosine\", (\"sans-serif\", (10).percent_height()))\n\n .set_label_area_size(LabelAreaPosition::Left, (5i32).percent_width())\n\n .set_label_area_size(LabelAreaPosition::Bottom, (10i32).percent_height())\n\n .build_cartesian_2d(-std::f64::consts::PI..std::f64::consts::PI, -1.2..1.2)?;\n\n\n\n chart\n\n .configure_mesh()\n\n .disable_x_mesh()\n\n .disable_y_mesh()\n\n .draw()?;\n\n\n\n chart.draw_series(LineSeries::new(\n", "file_path": "plotters/examples/console.rs", "rank": 20, "score": 138620.51094758644 }, { "content": "fn fill_background(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"rasterizer::fill_background\");\n\n\n\n group.bench_function(\"rgb\", |b| {\n\n let mut buffer = vec![0; (W * H * 3) as usize];\n\n b.iter(|| {\n\n let root = BitMapBackend::with_buffer(&mut buffer, (W, H)).into_drawing_area();\n\n root.fill(&WHITE).unwrap();\n\n })\n\n });\n\n\n\n group.bench_function(\"bgrx\", |b| {\n\n let mut buffer = vec![0; (W * H * 4) as usize];\n\n b.iter(|| {\n\n let root = BitMapBackend::<BGRXPixel>::with_buffer_and_format(&mut buffer, (W, H))\n\n .unwrap()\n\n .into_drawing_area();\n\n root.fill(&WHITE).unwrap();\n\n })\n\n });\n\n}\n\n\n", "file_path": "plotters/benches/benches/rasterizer.rs", "rank": 21, "score": 136141.7592665102 }, { "content": "fn fill_circle(c: &mut Criterion) {\n\n let mut g = c.benchmark_group(\"rasterizer::fill_circle\");\n\n\n\n g.bench_function(\"rgb\", |b| {\n\n let mut buffer = vec![0; (W * H * 3) as usize];\n\n b.iter(|| {\n\n let mut root = BitMapBackend::with_buffer(&mut buffer, (W, H));\n\n root.draw_circle((W as i32 / 2, H as i32 / 2), W / 2, &WHITE.to_rgba(), true)\n\n .unwrap();\n\n })\n\n });\n\n\n\n g.bench_function(\"bgrx\", |b| {\n\n let mut buffer = vec![0; (W * H * 4) as usize];\n\n b.iter(|| {\n\n let mut root =\n\n BitMapBackend::<BGRXPixel>::with_buffer_and_format(&mut buffer, (W, H)).unwrap();\n\n root.draw_circle((W as i32 / 2, H as i32 / 2), W / 2, &WHITE.to_rgba(), true)\n\n .unwrap();\n\n })\n\n });\n\n}\n\n\n", "file_path": "plotters/benches/benches/rasterizer.rs", "rank": 22, "score": 136141.7592665102 }, { "content": "fn draw_pixel(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"rasterizer::draw_pixel\");\n\n\n\n group.bench_function(\"rgb\", |b| {\n\n let mut buffer = vec![0; (W * H * 3) as usize];\n\n b.iter(|| {\n\n let mut root = BitMapBackend::with_buffer(&mut buffer, (W, H));\n\n for x in 0..W / 10 {\n\n for y in 0..H / 10 {\n\n root.draw_pixel((x as i32, y as i32), RGBColor(255, 0, 234).color())\n\n .unwrap();\n\n }\n\n }\n\n })\n\n });\n\n\n\n group.bench_function(\"xbgr\", |b| {\n\n let mut buffer = vec![0; (W * H * 4) as usize];\n\n b.iter(|| {\n\n let mut root =\n", "file_path": "plotters/benches/benches/rasterizer.rs", "rank": 23, "score": 136141.7592665102 }, { "content": "fn quartiles_calc(c: &mut Criterion) {\n\n let src: Vec<u32> = Lcg::new().take(100000).collect();\n\n c.bench_function(\"data::quartiles_calc\", |b| {\n\n b.iter(|| {\n\n Quartiles::new(&src);\n\n })\n\n });\n\n}\n\n\n\ncriterion_group! {\n\n name = quartiles_group;\n\n config = Criterion::default().sample_size(10);\n\n targets = quartiles_calc\n\n}\n", "file_path": "plotters/benches/benches/data.rs", "rank": 24, "score": 136141.7592665102 }, { "content": "fn fill_hexagon(c: &mut Criterion) {\n\n let mut g = c.benchmark_group(\"rasterizer::fill_hexagon\");\n\n let mut vert = vec![];\n\n\n\n for i in 0..6 {\n\n let x = (W as f64 / 5.0 * (std::f64::consts::PI * i as f64 / 3.0).cos()).ceil() as i32\n\n + W as i32 / 2;\n\n let y = (W as f64 / 5.0 * (std::f64::consts::PI * i as f64 / 3.0).sin()).ceil() as i32\n\n + W as i32 / 2;\n\n vert.push((x, y));\n\n }\n\n\n\n g.bench_function(\"rgb\", |b| {\n\n let mut buffer = vec![0; (W * H * 3) as usize];\n\n b.iter(|| {\n\n let mut root = BitMapBackend::with_buffer(&mut buffer, (W, H));\n\n root.fill_polygon(vert.clone(), &RED).unwrap();\n\n })\n\n });\n\n\n", "file_path": "plotters/benches/benches/rasterizer.rs", "rank": 25, "score": 136141.7592665102 }, { "content": "fn draw_line(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"rasterizer::draw_line\");\n\n\n\n group.bench_function(\"rgb\", |b| {\n\n let mut buffer = vec![0; (W * H * 3) as usize];\n\n b.iter(|| {\n\n let mut root = BitMapBackend::with_buffer(&mut buffer, (W, H));\n\n for y in 0..10 {\n\n root.draw_line(\n\n (0, 0),\n\n ((W / 2) as i32, (y * 100) as i32),\n\n &RGBColor(255, 0, 234).to_rgba(),\n\n )\n\n .unwrap();\n\n }\n\n })\n\n });\n\n\n\n group.bench_function(\"bgrx\", |b| {\n\n let mut buffer = vec![0; (W * H * 4) as usize];\n", "file_path": "plotters/benches/benches/rasterizer.rs", "rank": 26, "score": 136141.7592665102 }, { "content": "fn blend_background(c: &mut Criterion) {\n\n let mut g = c.benchmark_group(\"rasterizer::blend_background\");\n\n\n\n g.bench_function(\"rgb\", |b| {\n\n let mut buffer = vec![0; (W * H * 3) as usize];\n\n b.iter(|| {\n\n let root = BitMapBackend::with_buffer(&mut buffer, (W, H)).into_drawing_area();\n\n root.fill(&WHITE.mix(0.1)).unwrap();\n\n })\n\n });\n\n\n\n g.bench_function(\"bgrx\", |b| {\n\n let mut buffer = vec![0; (W * H * 4) as usize];\n\n b.iter(|| {\n\n let root = BitMapBackend::<BGRXPixel>::with_buffer_and_format(&mut buffer, (W, H))\n\n .unwrap()\n\n .into_drawing_area();\n\n root.fill(&WHITE.mix(0.1)).unwrap();\n\n })\n\n });\n\n}\n\n\n", "file_path": "plotters/benches/benches/rasterizer.rs", "rank": 27, "score": 136141.7592665102 }, { "content": "/// Any type that describe a surface orientation\n\npub trait Direction<X, Y, Z> {\n\n /// The type for the first input argument\n\n type Input1Type;\n\n /// The type for the second input argument\n\n type Input2Type;\n\n /// The output of the surface function\n\n type OutputType;\n\n\n\n /// The function that maps a point on surface into the coordinate system\n\n fn make_coord(\n\n free_vars: (Self::Input1Type, Self::Input2Type),\n\n result: Self::OutputType,\n\n ) -> (X, Y, Z);\n\n}\n\n\n\nmacro_rules! define_panel_descriptor {\n\n ($name: ident, $var1: ident, $var2: ident, $out: ident, ($first: ident, $second:ident) -> $result: ident = $output: expr) => {\n\n #[allow(clippy::upper_case_acronyms)]\n\n pub struct $name;\n\n impl<X, Y, Z> Direction<X, Y, Z> for $name {\n", "file_path": "plotters/src/series/surface.rs", "rank": 28, "score": 135392.7591568827 }, { "content": "fn draw_func_4x4(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"draw_func_4x4\");\n\n\n\n for size in SIZES {\n\n group\n\n .bench_with_input(BenchmarkId::new(\"sequential\", size), size, |b, &s| {\n\n let mut buffer = vec![0; (s * s * 3) as usize];\n\n b.iter(|| {\n\n let root = BitMapBackend::with_buffer(&mut buffer, (s, s)).into_drawing_area();\n\n let areas = root.split_evenly((4, 4));\n\n areas.iter().for_each(|area| draw_plot(&area, 2.0));\n\n })\n\n })\n\n .bench_with_input(BenchmarkId::new(\"blit\", size), size, |b, &s| {\n\n let mut buffer = vec![0; (s * s * 3) as usize];\n\n let mut element_buffer = vec![vec![0; (s * s / 4 * 3) as usize]; 4];\n\n b.iter(|| {\n\n let root = BitMapBackend::with_buffer(&mut buffer, (s, s)).into_drawing_area();\n\n let areas = root.split_evenly((4, 4));\n\n let elements: Vec<_> = element_buffer\n", "file_path": "plotters/benches/benches/parallel.rs", "rank": 29, "score": 134159.56339192 }, { "content": "fn fill_background_red(c: &mut Criterion) {\n\n let mut g = c.benchmark_group(\"rasterizer::fill_background_red\");\n\n\n\n g.bench_function(\"rgb\", |b| {\n\n let mut buffer = vec![0; (W * H * 3) as usize];\n\n b.iter(|| {\n\n let root = BitMapBackend::with_buffer(&mut buffer, (W, H)).into_drawing_area();\n\n root.fill(&RED).unwrap();\n\n })\n\n });\n\n\n\n g.bench_function(\"bgrx\", |b| {\n\n let mut buffer = vec![0; (W * H * 4) as usize];\n\n b.iter(|| {\n\n let root = BitMapBackend::<BGRXPixel>::with_buffer_and_format(&mut buffer, (W, H))\n\n .unwrap()\n\n .into_drawing_area();\n\n root.fill(&RED).unwrap();\n\n })\n\n });\n\n}\n\n\n", "file_path": "plotters/benches/benches/rasterizer.rs", "rank": 30, "score": 134159.56339192 }, { "content": "fn draw_func_2x1(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"draw_func_2x1\");\n\n\n\n for size in SIZES {\n\n group\n\n .bench_with_input(BenchmarkId::new(\"blit\", size), size, |b, &s| {\n\n let mut buffer = vec![0; (s * s * 3) as usize];\n\n let mut element_buffer = vec![vec![0; (s * s / 2 * 3) as usize]; 2];\n\n b.iter(|| {\n\n let root = BitMapBackend::with_buffer(&mut buffer, (s, s)).into_drawing_area();\n\n let areas = root.split_evenly((2, 1));\n\n let elements: Vec<_> = element_buffer\n\n .par_iter_mut()\n\n .map(|buf| {\n\n let mut element =\n\n BitMapElement::with_mut((0, 0), (s, s / 2), buf).unwrap();\n\n draw_plot(&element.as_bitmap_backend().into_drawing_area(), 2.0);\n\n element\n\n })\n\n .collect();\n", "file_path": "plotters/benches/benches/parallel.rs", "rank": 31, "score": 134159.56339192 }, { "content": "fn main() -> Result<(), Box<dyn Error>> {\n\n draw_chart(TextDrawingBackend(vec![PixelState::Empty; 5000]).into_drawing_area())?;\n\n let b = BitMapBackend::new(OUT_FILE_NAME, (1024, 768)).into_drawing_area();\n\n b.fill(&WHITE)?;\n\n draw_chart(b)?;\n\n\n\n println!(\"Image result has been saved to {}\", OUT_FILE_NAME);\n\n\n\n Ok(())\n\n}\n", "file_path": "plotters/examples/console.rs", "rank": 32, "score": 132357.32584920377 }, { "content": "fn draw_func_1x1_seq(c: &mut Criterion) {\n\n let mut group = c.benchmark_group(\"draw_func_1x1\");\n\n\n\n for size in SIZES {\n\n group.bench_with_input(BenchmarkId::new(\"sequential\", size), size, |b, &s| {\n\n let mut buffer = vec![0; (s * s * 3) as usize];\n\n b.iter(|| {\n\n let root = BitMapBackend::with_buffer(&mut buffer, (s, s)).into_drawing_area();\n\n root.fill(&WHITE).unwrap();\n\n draw_plot(&root, 2.0);\n\n })\n\n });\n\n }\n\n}\n\n\n", "file_path": "plotters/benches/benches/parallel.rs", "rank": 33, "score": 132269.6308909396 }, { "content": "/// Make a partial axis based on the percentage of visible portion.\n\n/// We can use `into_partial_axis` to create a partial axis range specification.\n\n/// But sometimes, we want to directly specify the percentage visible to the user.\n\n///\n\n/// - `axis_range`: The range specification\n\n/// - `part`: The visible part of the axis. Each value is from [0.0, 1.0]\n\n/// - **returns**: The partial axis created from the input, or `None` when not possible\n\npub fn make_partial_axis<T>(\n\n axis_range: Range<T>,\n\n part: Range<f64>,\n\n) -> Option<PartialAxis<<Range<T> as AsRangedCoord>::CoordDescType>>\n\nwhere\n\n Range<T>: AsRangedCoord,\n\n T: num_traits::NumCast + Clone,\n\n{\n\n let left: f64 = num_traits::cast(axis_range.start.clone())?;\n\n let right: f64 = num_traits::cast(axis_range.end.clone())?;\n\n\n\n let full_range_size = (right - left) / (part.end - part.start);\n\n\n\n let full_left = left - full_range_size * part.start;\n\n let full_right = right + full_range_size * (1.0 - part.end);\n\n\n\n let full_range: Range<T> = num_traits::cast(full_left)?..num_traits::cast(full_right)?;\n\n\n\n let axis_range: <Range<T> as AsRangedCoord>::CoordDescType = axis_range.into();\n\n\n", "file_path": "plotters/src/coord/ranged1d/combinators/partial_axis.rs", "rank": 34, "score": 130388.88450400849 }, { "content": "fn main() -> Result<(), Box<dyn Error>> {\n\n let root = BitMapBackend::new(OUT_FILE_NAME, (1024, 768)).into_drawing_area();\n\n\n\n root.fill(&WHITE)?;\n\n\n\n let mut chart = ChartBuilder::on(&root)\n\n .margin(10)\n\n .caption(\n\n \"Monthly Average Temperate in Salt Lake City, UT\",\n\n (\"sans-serif\", 40),\n\n )\n\n .set_label_area_size(LabelAreaPosition::Left, 60)\n\n .set_label_area_size(LabelAreaPosition::Right, 60)\n\n .set_label_area_size(LabelAreaPosition::Bottom, 40)\n\n .build_cartesian_2d(\n\n (Utc.ymd(2010, 1, 1)..Utc.ymd(2018, 12, 1)).monthly(),\n\n 14.0..104.0,\n\n )?\n\n .set_secondary_coord(\n\n (Utc.ymd(2010, 1, 1)..Utc.ymd(2018, 12, 1)).monthly(),\n", "file_path": "plotters/examples/slc-temp.rs", "rank": 35, "score": 130378.36575195569 }, { "content": "fn main() -> Result<(), Box<dyn Error>> {\n\n let mut buf = BufferWrapper(vec![0u32; W * H]);\n\n\n\n let mut fx: f64 = 1.0;\n\n let mut fy: f64 = 1.1;\n\n let mut xphase: f64 = 0.0;\n\n let mut yphase: f64 = 0.1;\n\n\n\n let mut window = Window::new(\n\n &get_window_title(fx, fy, yphase - xphase),\n\n W,\n\n H,\n\n WindowOptions::default(),\n\n )?;\n\n let cs = {\n\n let root =\n\n BitMapBackend::<BGRXPixel>::with_buffer_and_format(buf.borrow_mut(), (W as u32, H as u32))?\n\n .into_drawing_area();\n\n root.fill(&BLACK)?;\n\n\n", "file_path": "plotters/examples/minifb-demo/src/main.rs", "rank": 36, "score": 126690.49888358853 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let data = generate_random_data();\n\n let down_sampled = down_sample(&data[..]);\n\n\n\n let root = BitMapBackend::new(OUT_FILE_NAME, (1024, 768)).into_drawing_area();\n\n\n\n root.fill(&WHITE)?;\n\n\n\n let mut chart = ChartBuilder::on(&root)\n\n .caption(\"Linear Function with Noise\", (\"sans-serif\", 60))\n\n .margin(10)\n\n .set_label_area_size(LabelAreaPosition::Left, 40)\n\n .set_label_area_size(LabelAreaPosition::Bottom, 40)\n\n .build_cartesian_2d(-10f64..10f64, -10f64..10f64)?;\n\n\n\n chart.configure_mesh().draw()?;\n\n\n\n chart\n\n .draw_series(LineSeries::new(data, &GREEN.mix(0.3)))?\n\n .label(\"Raw Data\")\n", "file_path": "plotters/examples/errorbar.rs", "rank": 37, "score": 123554.20740553719 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let root = BitMapBackend::new(OUT_FILE_NAME, (1024, 768)).into_drawing_area();\n\n\n\n root.fill(&WHITE)?;\n\n\n\n let mut chart = ChartBuilder::on(&root)\n\n .caption(\"Koch's Snowflake\", (\"sans-serif\", 50))\n\n .build_cartesian_2d(-2.0..2.0, -1.5..1.5)?;\n\n\n\n let mut snowflake_vertices = {\n\n let mut current: Vec<(f64, f64)> = vec![\n\n (0.0, 1.0),\n\n ((3.0f64).sqrt() / 2.0, -0.5),\n\n (-(3.0f64).sqrt() / 2.0, -0.5),\n\n ];\n\n for _ in 0..6 {\n\n current = snowflake_iter(&current[..]);\n\n }\n\n current\n\n };\n", "file_path": "plotters/examples/snowflake.rs", "rank": 38, "score": 123554.20740553719 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let root = BitMapBackend::new(OUT_FILE_NAME, (1024, 768)).into_drawing_area();\n\n\n\n root.fill(&WHITE)?;\n\n\n\n let mut chart = ChartBuilder::on(&root)\n\n .caption(\"Matshow Example\", (\"sans-serif\", 80))\n\n .margin(5)\n\n .top_x_label_area_size(40)\n\n .y_label_area_size(40)\n\n .build_cartesian_2d(0i32..15i32, 15i32..0i32)?;\n\n\n\n chart\n\n .configure_mesh()\n\n .x_labels(15)\n\n .y_labels(15)\n\n .max_light_lines(4)\n\n .x_label_offset(35)\n\n .y_label_offset(25)\n\n .disable_x_mesh()\n", "file_path": "plotters/examples/matshow.rs", "rank": 39, "score": 123554.20740553719 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let root = BitMapBackend::new(OUT_FILE_NAME, (640, 480)).into_drawing_area();\n\n\n\n root.fill(&WHITE)?;\n\n\n\n let mut chart = ChartBuilder::on(&root)\n\n .x_label_area_size(35)\n\n .y_label_area_size(40)\n\n .margin(5)\n\n .caption(\"Histogram Test\", (\"sans-serif\", 50.0))\n\n .build_cartesian_2d((0u32..10u32).into_segmented(), 0u32..10u32)?;\n\n\n\n chart\n\n .configure_mesh()\n\n .disable_x_mesh()\n\n .bold_line_style(&WHITE.mix(0.3))\n\n .y_desc(\"Count\")\n\n .x_desc(\"Bucket\")\n\n .axis_desc_style((\"sans-serif\", 15))\n\n .draw()?;\n", "file_path": "plotters/examples/histogram.rs", "rank": 40, "score": 123554.20740553719 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let root = BitMapBackend::new(OUT_FILE_NAME, (800, 600)).into_drawing_area();\n\n\n\n root.fill(&WHITE)?;\n\n\n\n let mut chart = ChartBuilder::on(&root)\n\n .margin(20)\n\n .x_label_area_size(10)\n\n .y_label_area_size(10)\n\n .build_cartesian_2d(-2.1f64..0.6f64, -1.2f64..1.2f64)?;\n\n\n\n chart\n\n .configure_mesh()\n\n .disable_x_mesh()\n\n .disable_y_mesh()\n\n .draw()?;\n\n\n\n let plotting_area = chart.plotting_area();\n\n\n\n let range = plotting_area.get_pixel_range();\n", "file_path": "plotters/examples/mandelbrot.rs", "rank": 41, "score": 123554.20740553719 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let root_area = BitMapBackend::new(&OUT_FILE_NAME, (950, 700)).into_drawing_area();\n\n root_area.fill(&WHITE).unwrap();\n\n let title_style = TextStyle::from((\"sans-serif\", 30).into_font()).color(&(BLACK));\n\n root_area.titled(\"BEST CIRCLES\", title_style).unwrap();\n\n\n\n let dims = root_area.dim_in_pixel();\n\n let center = (dims.0 as i32 / 2, dims.1 as i32 / 2);\n\n let radius = 300.0;\n\n let sizes = vec![66.0, 33.0];\n\n let _rgba = RGBAColor(0, 50, 255, 1.0);\n\n let colors = vec![RGBColor(0, 50, 255), CYAN];\n\n let labels = vec![\"Pizza\", \"Pacman\"];\n\n\n\n let mut pie = Pie::new(&center, &radius, &sizes, &colors, &labels);\n\n pie.start_angle(66.0);\n\n pie.label_style(((\"sans-serif\", 50).into_font()).color(&(ORANGE)));\n\n pie.percentages(((\"sans-serif\", radius * 0.08).into_font()).color(&BLACK));\n\n root_area.draw(&pie)?;\n\n\n\n Ok(())\n\n}\n", "file_path": "plotters/examples/pie.rs", "rank": 42, "score": 123554.20740553719 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let root = BitMapBackend::gif(OUT_FILE_NAME, (600, 400), 100)?.into_drawing_area();\n\n\n\n for pitch in 0..157 {\n\n root.fill(&WHITE)?;\n\n\n\n let mut chart = ChartBuilder::on(&root)\n\n .caption(\"2D Gaussian PDF\", (\"sans-serif\", 20))\n\n .build_cartesian_3d(-3.0..3.0, 0.0..6.0, -3.0..3.0)?;\n\n chart.with_projection(|mut p| {\n\n p.pitch = 1.57 - (1.57 - pitch as f64 / 50.0).abs();\n\n p.scale = 0.7;\n\n p.into_matrix() // build the projection matrix\n\n });\n\n\n\n chart\n\n .configure_axes()\n\n .light_grid_style(BLACK.mix(0.15))\n\n .max_light_lines(3)\n\n .draw()?;\n", "file_path": "plotters/examples/3d-plot2.rs", "rank": 43, "score": 123554.20740553719 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let root = BitMapBackend::gif(OUT_FILE_NAME, (800, 600), 1_000)?.into_drawing_area();\n\n\n\n for i in 0..8 {\n\n root.fill(&WHITE)?;\n\n\n\n let mut chart = ChartBuilder::on(&root)\n\n .caption(\n\n format!(\"Koch's Snowflake (n_iter = {})\", i),\n\n (\"sans-serif\", 50),\n\n )\n\n .build_cartesian_2d(-2.0..2.0, -1.5..1.5)?;\n\n\n\n let mut snowflake_vertices = {\n\n let mut current: Vec<(f64, f64)> = vec![\n\n (0.0, 1.0),\n\n ((3.0f64).sqrt() / 2.0, -0.5),\n\n (-(3.0f64).sqrt() / 2.0, -0.5),\n\n ];\n\n for _ in 0..i {\n", "file_path": "plotters/examples/animation.rs", "rank": 44, "score": 123554.20740553719 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let data = get_data();\n\n let root = BitMapBackend::new(OUT_FILE_NAME, (1024, 768)).into_drawing_area();\n\n root.fill(&WHITE)?;\n\n\n\n let (to_date, from_date) = (\n\n parse_time(&data[0].0) + Duration::days(1),\n\n parse_time(&data[29].0) - Duration::days(1),\n\n );\n\n\n\n let mut chart = ChartBuilder::on(&root)\n\n .x_label_area_size(40)\n\n .y_label_area_size(40)\n\n .caption(\"MSFT Stock Price\", (\"sans-serif\", 50.0).into_font())\n\n .build_cartesian_2d(from_date..to_date, 110f32..135f32)?;\n\n\n\n chart.configure_mesh().light_line_style(&WHITE).draw()?;\n\n\n\n chart.draw_series(\n\n data.iter().map(|x| {\n", "file_path": "plotters/examples/stock.rs", "rank": 45, "score": 123554.20740553719 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let root = BitMapBackend::new(\"plotters-doc-data/5.png\", (640, 480)).into_drawing_area();\n\n root.fill(&WHITE);\n\n let root = root.margin(10, 10, 10, 10);\n\n // After this point, we should be able to draw construct a chart context\n\n let mut chart = ChartBuilder::on(&root)\n\n // Set the caption of the chart\n\n .caption(\"This is our first plot\", (\"sans-serif\", 40).into_font())\n\n // Set the size of the label region\n\n .x_label_area_size(20)\n\n .y_label_area_size(40)\n\n // Finally attach a coordinate on the drawing area and make a chart context\n\n .build_cartesian_2d(0f32..10f32, 0f32..10f32)?;\n\n\n\n // Then we can draw a mesh\n\n chart\n\n .configure_mesh()\n\n // We can customize the maximum number of labels allowed for each axis\n\n .x_labels(5)\n\n .y_labels(5)\n", "file_path": "plotters/src/lib.rs", "rank": 46, "score": 123554.20740553719 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let root = BitMapBackend::new(OUT_FILE_NAME, (1024, 768)).into_drawing_area();\n\n\n\n root.fill(&WHITE)?;\n\n\n\n let root = root\n\n .titled(\"Sierpinski Carpet Demo\", (\"sans-serif\", 60))?\n\n .shrink(((1024 - 700) / 2, 0), (700, 700));\n\n\n\n sierpinski_carpet(5, &root)?;\n\n\n\n // To avoid the IO failure being ignored silently, we manually call the present function\n\n root.present().expect(\"Unable to write result to file, please make sure 'plotters-doc-data' dir exists under current dir\");\n\n println!(\"Result has been saved to {}\", OUT_FILE_NAME);\n\n\n\n Ok(())\n\n}\n", "file_path": "plotters/examples/sierpinski.rs", "rank": 47, "score": 123554.20740553719 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let root = SVGBackend::new(OUT_FILE_NAME, (1024, 768)).into_drawing_area();\n\n root.fill(&WHITE)?;\n\n\n\n let root = root.margin(5, 5, 5, 5);\n\n\n\n let (upper, lower) = root.split_vertically(512);\n\n\n\n let args: Vec<String> = env::args().collect();\n\n\n\n let ds = if args.len() < 2 {\n\n read_data(io::Cursor::new(get_data()))\n\n } else {\n\n let file = fs::File::open(&args[1])?;\n\n read_data(BufReader::new(file))\n\n };\n\n let dataset: Vec<(String, String, Quartiles)> = ds\n\n .iter()\n\n .map(|(k, v)| (k.0.clone(), k.1.clone(), Quartiles::new(&v)))\n\n .collect();\n", "file_path": "plotters/examples/boxplot.rs", "rank": 48, "score": 123554.20740553719 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let area = SVGBackend::new(OUT_FILE_NAME, (1024, 760)).into_drawing_area();\n\n\n\n area.fill(&WHITE)?;\n\n\n\n let x_axis = (-3.0..3.0).step(0.1);\n\n let z_axis = (-3.0..3.0).step(0.1);\n\n\n\n let mut chart = ChartBuilder::on(&area)\n\n .caption(format!(\"3D Plot Test\"), (\"sans\", 20))\n\n .build_cartesian_3d(x_axis.clone(), -3.0..3.0, z_axis.clone())?;\n\n\n\n chart.with_projection(|mut pb| {\n\n pb.yaw = 0.5;\n\n pb.scale = 0.9;\n\n pb.into_matrix()\n\n });\n\n\n\n chart\n\n .configure_axes()\n", "file_path": "plotters/examples/3d-plot.rs", "rank": 49, "score": 123554.20740553719 }, { "content": "/// The trait indicates the ranged value can be map reversely, which means\n\n/// an pixel-based coordinate is given, it's possible to figure out the underlying\n\n/// logic value.\n\npub trait ReversibleRanged: Ranged {\n\n /// Perform the reverse mapping\n\n fn unmap(&self, input: i32, limit: (i32, i32)) -> Option<Self::ValueType>;\n\n}\n\n\n", "file_path": "plotters/src/coord/ranged1d/mod.rs", "rank": 50, "score": 122224.8453268602 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let root = BitMapBackend::new(OUT_FILE_NAME, (1024, 768)).into_drawing_area();\n\n\n\n root.fill(&WHITE)?;\n\n\n\n let sd = 0.13;\n\n\n\n let random_points: Vec<(f64, f64)> = {\n\n let norm_dist = Normal::new(0.5, sd).unwrap();\n\n let mut x_rand = XorShiftRng::from_seed(*b\"MyFragileSeed123\");\n\n let mut y_rand = XorShiftRng::from_seed(*b\"MyFragileSeed321\");\n\n let x_iter = norm_dist.sample_iter(&mut x_rand);\n\n let y_iter = norm_dist.sample_iter(&mut y_rand);\n\n x_iter.zip(y_iter).take(5000).collect()\n\n };\n\n\n\n let areas = root.split_by_breakpoints([944], [80]);\n\n\n\n let mut x_hist_ctx = ChartBuilder::on(&areas[0])\n\n .y_label_area_size(40)\n", "file_path": "plotters/examples/normal-dist.rs", "rank": 51, "score": 121753.18787343829 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let area = SVGBackend::new(OUT_FILE_NAME, (1024, 760)).into_drawing_area();\n\n area.fill(&WHITE)?;\n\n\n\n let mut chart = ChartBuilder::on(&area)\n\n .set_all_label_area_size(50)\n\n .build_cartesian_2d(CustomizedX(7), 0.0..10.0)?;\n\n\n\n chart.configure_mesh().draw()?;\n\n\n\n area.present().expect(\"Unable to write result to file, please make sure 'plotters-doc-data' dir exists under current dir\");\n\n println!(\"Result has been saved to {}\", OUT_FILE_NAME);\n\n Ok(())\n\n}\n\n\n", "file_path": "plotters/examples/customized_coord.rs", "rank": 52, "score": 121753.18787343829 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let root = BitMapBackend::new(\"plotters-doc-data/3.png\", (300, 200)).into_drawing_area();\n\n root.fill(&WHITE)?;\n\n // Draw an circle on the drawing area\n\n root.draw(&Circle::new(\n\n (100, 100),\n\n 50,\n\n Into::<ShapeStyle>::into(&GREEN).filled(),\n\n ))?;\n\n root.present()?;\n\n Ok(())\n\n}\n", "file_path": "doc-template/examples/elements.rs", "rank": 53, "score": 121753.18787343829 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let root = BitMapBackend::new(OUT_FILE_NAME, (2000, 850)).into_drawing_area();\n\n\n\n root.fill(&WHITE)?;\n\n\n\n let mut chart = ChartBuilder::on(&root)\n\n .caption(\"Demonstration of full_palette Colors\", (\"sans-serif\", 50))\n\n .build_cartesian_2d(-0.5f32..19f32, -1f32..15f32)?;\n\n\n\n use full_palette::*;\n\n let colors = [\n\n [\n\n RED, RED_50, RED_100, RED_200, RED_300, RED_400, RED_500, RED_600, RED_700, RED_800,\n\n RED_900, RED_A100, RED_A200, RED_A400, RED_A700,\n\n ],\n\n [\n\n PINK, PINK_50, PINK_100, PINK_200, PINK_300, PINK_400, PINK_500, PINK_600, PINK_700,\n\n PINK_800, PINK_900, PINK_A100, PINK_A200, PINK_A400, PINK_A700,\n\n ],\n\n [\n", "file_path": "plotters/examples/full_palette.rs", "rank": 54, "score": 121753.18787343829 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let root = BitMapBackend::new(OUT_FILE_NAME, (640, 480)).into_drawing_area();\n\n\n\n root.fill(&WHITE)?;\n\n\n\n let mut chart = ChartBuilder::on(&root)\n\n .x_label_area_size(35)\n\n .y_label_area_size(40)\n\n .margin(5)\n\n .caption(\"Nested Coord\", (\"sans-serif\", 50.0))\n\n .build_cartesian_2d(\n\n [\"Linear\", \"Quadratic\"].nested_coord(|_| 0.0..10.0),\n\n 0.0..10.0,\n\n )?;\n\n\n\n chart\n\n .configure_mesh()\n\n .disable_mesh()\n\n .axis_desc_style((\"sans-serif\", 15))\n\n .draw()?;\n", "file_path": "plotters/examples/nested_coord.rs", "rank": 55, "score": 121753.18787343829 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let root = BitMapBackend::new(OUT_FILE_NAME, (1024, 768)).into_drawing_area();\n\n\n\n root.fill(&WHITE)?;\n\n\n\n let (left, right) = root.split_horizontally((70).percent_width());\n\n\n\n draw_chart(&left)?;\n\n\n\n let (upper, lower) = right.split_vertically(300);\n\n\n\n draw_chart(&upper)?;\n\n draw_chart(&lower)?;\n\n let root = root.shrink((200, 200), (150, 100));\n\n draw_chart(&root)?;\n\n\n\n // To avoid the IO failure being ignored silently, we manually call the present function\n\n root.present().expect(\"Unable to write result to file, please make sure 'plotters-doc-data' dir exists under current dir\");\n\n println!(\"Result has been saved to {}\", OUT_FILE_NAME);\n\n\n\n Ok(())\n\n}\n", "file_path": "plotters/examples/relative_size.rs", "rank": 56, "score": 121753.18787343829 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let root = SVGBackend::new(OUT_FILE_NAME, (1024, 768)).into_drawing_area();\n\n root.fill(&WHITE)?;\n\n\n\n let (upper, lower) = root.split_vertically(750);\n\n\n\n lower.titled(\n\n \"Data Source: https://covid.ourworldindata.org/data/owid-covid-data.json\",\n\n (\"sans-serif\", 10).into_font().color(&BLACK.mix(0.5)),\n\n )?;\n\n\n\n let mut chart = ChartBuilder::on(&upper)\n\n .caption(\"World COVID-19 Cases\", (\"sans-serif\", (5).percent_height()))\n\n .set_label_area_size(LabelAreaPosition::Left, (8).percent())\n\n .set_label_area_size(LabelAreaPosition::Bottom, (4).percent())\n\n .margin((1).percent())\n\n .build_cartesian_2d(\n\n (20u32..5000_0000u32)\n\n .log_scale()\n\n .with_key_points(vec![50, 100, 1000, 10000, 100000, 1000000, 10000000]),\n", "file_path": "plotters/examples/tick_control.rs", "rank": 57, "score": 121753.18787343829 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let root = BitMapBackend::new(OUT_FILE_NAME, (1024, 768)).into_drawing_area();\n\n root.fill(&WHITE)?;\n\n\n\n let mut chart = ChartBuilder::on(&root)\n\n .caption(\"Bitmap Example\", (\"sans-serif\", 30))\n\n .margin(5)\n\n .set_label_area_size(LabelAreaPosition::Left, 40)\n\n .set_label_area_size(LabelAreaPosition::Bottom, 40)\n\n .build_cartesian_2d(0.0..1.0, 0.0..1.0)?;\n\n\n\n chart.configure_mesh().disable_mesh().draw()?;\n\n\n\n let (w, h) = chart.plotting_area().dim_in_pixel();\n\n let image = image::load(\n\n BufReader::new(\n\n File::open(\"plotters-doc-data/cat.png\").map_err(|e| {\n\n eprintln!(\"Unable to open file plotters-doc-data.png, please make sure you have clone this repo with --recursive\");\n\n e\n\n })?),\n", "file_path": "plotters/examples/blit-bitmap.rs", "rank": 58, "score": 121753.18787343829 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let sd = 0.60;\n\n\n\n let random_points: Vec<f64> = {\n\n let norm_dist = Normal::new(0.0, sd).unwrap();\n\n let mut x_rand = XorShiftRng::from_seed(*b\"MyFragileSeed123\");\n\n let x_iter = norm_dist.sample_iter(&mut x_rand);\n\n x_iter.take(5000).filter(|x| x.abs() <= 4.0).collect()\n\n };\n\n\n\n let root = BitMapBackend::new(OUT_FILE_NAME, (1024, 768)).into_drawing_area();\n\n\n\n root.fill(&WHITE)?;\n\n\n\n let mut chart = ChartBuilder::on(&root)\n\n .margin(5)\n\n .caption(\"1D Gaussian Distribution Demo\", (\"sans-serif\", 30))\n\n .set_label_area_size(LabelAreaPosition::Left, 60)\n\n .set_label_area_size(LabelAreaPosition::Bottom, 60)\n\n .set_label_area_size(LabelAreaPosition::Right, 60)\n", "file_path": "plotters/examples/normal-dist2.rs", "rank": 59, "score": 121753.18787343829 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n // Create a 800*600 bitmap and start drawing\n\n let mut backend = BitMapBackend::new(\"plotters-doc-data/1.png\", (300, 200));\n\n // And if we want SVG backend\n\n // let backend = SVGBackend::new(\"output.svg\", (800, 600));\n\n backend.draw_rect((50, 50), (200, 150), &RED, true)?;\n\n backend.present()?;\n\n Ok(())\n\n}\n", "file_path": "doc-template/examples/drawing_backends.rs", "rank": 60, "score": 120032.27024650466 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let root = BitMapBackend::new(\"plotters-doc-data/0.png\", (640, 480)).into_drawing_area();\n\n root.fill(&WHITE)?;\n\n let mut chart = ChartBuilder::on(&root)\n\n .caption(\"y=x^2\", (\"sans-serif\", 50).into_font())\n\n .margin(5)\n\n .x_label_area_size(30)\n\n .y_label_area_size(30)\n\n .build_cartesian_2d(-1f32..1f32, -0.1f32..1f32)?;\n\n\n\n chart.configure_mesh().draw()?;\n\n\n\n chart\n\n .draw_series(LineSeries::new(\n\n (-50..=50).map(|x| x as f32 / 50.0).map(|x| (x, x * x)),\n\n &RED,\n\n ))?\n\n .label(\"y = x^2\")\n\n .legend(|(x, y)| PathElement::new(vec![(x, y), (x + 20, y)], &RED));\n\n\n", "file_path": "doc-template/examples/quick_start.rs", "rank": 61, "score": 120032.27024650466 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let root = BitMapBackend::new(\"plotters-doc-data/4.png\", (640, 480)).into_drawing_area();\n\n\n\n root.fill(&RGBColor(240, 200, 200))?;\n\n\n\n let root = root.apply_coord_spec(Cartesian2d::<RangedCoordf32, RangedCoordf32>::new(\n\n 0f32..1f32,\n\n 0f32..1f32,\n\n (0..640, 0..480),\n\n ));\n\n\n\n let dot_and_label = |x: f32, y: f32| {\n\n return EmptyElement::at((x, y))\n\n + Circle::new((0, 0), 3, ShapeStyle::from(&BLACK).filled())\n\n + Text::new(\n\n format!(\"({:.2},{:.2})\", x, y),\n\n (10, 0),\n\n (\"sans-serif\", 15.0).into_font(),\n\n );\n\n };\n\n\n\n root.draw(&dot_and_label(0.5, 0.6))?;\n\n root.draw(&dot_and_label(0.25, 0.33))?;\n\n root.draw(&dot_and_label(0.8, 0.8))?;\n\n root.present()?;\n\n Ok(())\n\n}\n", "file_path": "doc-template/examples/composable_elements.rs", "rank": 62, "score": 120032.27024650466 }, { "content": "fn main() -> Result<(), Box<dyn std::error::Error>> {\n\n let root_drawing_area =\n\n BitMapBackend::new(\"plotters-doc-data/2.png\", (300, 200)).into_drawing_area();\n\n // And we can split the drawing area into 3x3 grid\n\n let child_drawing_areas = root_drawing_area.split_evenly((3, 3));\n\n // Then we fill the drawing area with different color\n\n for (area, color) in child_drawing_areas.into_iter().zip(0..) {\n\n area.fill(&Palette99::pick(color))?;\n\n }\n\n root_drawing_area.present()?;\n\n Ok(())\n\n}\n", "file_path": "doc-template/examples/drawing_area.rs", "rank": 63, "score": 120032.27024650466 }, { "content": "struct CustomizedX(u32);\n\n\n\nimpl Ranged for CustomizedX {\n\n type ValueType = u32;\n\n type FormatOption = NoDefaultFormatting;\n\n fn map(&self, value: &Self::ValueType, limit: (i32, i32)) -> i32 {\n\n let size = limit.1 - limit.0;\n\n ((*value as f64 / self.0 as f64) * size as f64) as i32 + limit.0\n\n }\n\n\n\n fn range(&self) -> std::ops::Range<Self::ValueType> {\n\n 0..self.0\n\n }\n\n\n\n fn key_points<Hint: KeyPointHint>(&self, hint: Hint) -> Vec<Self::ValueType> {\n\n if hint.max_num_points() < (self.0 as usize) {\n\n return vec![];\n\n }\n\n\n\n (0..self.0).collect()\n\n }\n\n}\n\n\n\nimpl ValueFormatter<u32> for CustomizedX {\n\n fn format_ext(&self, value: &u32) -> String {\n\n format!(\"{} of {}\", value, self.0)\n\n }\n\n}\n\n\n", "file_path": "plotters/examples/customized_coord.rs", "rank": 64, "score": 118975.0729031153 }, { "content": "#[test]\n\nfn entry_point() {\n\n main().unwrap()\n\n}\n", "file_path": "plotters/examples/chart.rs", "rank": 65, "score": 117964.84969028595 }, { "content": "/// The function that pretty prints the floating number\n\n/// Since rust doesn't have anything that can format a float with out appearance, so we just\n\n/// implement a float pretty printing function, which finds the shortest representation of a\n\n/// floating point number within the allowed error range.\n\n///\n\n/// - `n`: The float number to pretty-print\n\n/// - `allow_sn`: Should we use scientific notation when possible\n\n/// - **returns**: The pretty printed string\n\npub fn pretty_print_float(n: f64, allow_sn: bool) -> String {\n\n (FloatPrettyPrinter {\n\n allow_scientific: allow_sn,\n\n min_decimal: 0,\n\n max_decimal: 10,\n\n })\n\n .print(n)\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n #[test]\n\n fn test_pretty_printing() {\n\n assert_eq!(pretty_print_float(0.99999999999999999999, false), \"1\");\n\n assert_eq!(pretty_print_float(0.9999, false), \"0.9999\");\n\n assert_eq!(\n\n pretty_print_float(-1e-5 - 0.00000000000000001, true),\n\n \"-1e-5\"\n\n );\n\n assert_eq!(\n\n pretty_print_float(-1e-5 - 0.00000000000000001, false),\n\n \"-0.00001\"\n\n );\n\n assert_eq!(pretty_print_float(1e100, true), \"1e100\");\n\n assert_eq!(pretty_print_float(1234567890f64, true), \"1234567890\");\n\n assert_eq!(pretty_print_float(1000000001f64, true), \"1e9\");\n\n }\n\n}\n", "file_path": "plotters/src/data/float.rs", "rank": 66, "score": 117771.29329295046 }, { "content": "/// The trait that indicates we have a ordered and ranged value\n\n/// Which is used to describe any 1D axis.\n\npub trait Ranged {\n\n /// This marker decides if Plotters default [ValueFormatter](trait.ValueFormatter.html) implementation should be used.\n\n /// This associated type can be one of the following two types:\n\n /// - [DefaultFormatting](struct.DefaultFormatting.html) will allow Plotters to automatically impl\n\n /// the formatter based on `Debug` trait, if `Debug` trait is not impl for the `Self::Value`,\n\n /// [ValueFormatter](trait.ValueFormatter.html) will not impl unless you impl it manually.\n\n ///\n\n /// - [NoDefaultFormatting](struct.NoDefaultFormatting.html) Disable the automatic `Debug`\n\n /// based value formatting. Thus you have to impl the\n\n /// [ValueFormatter](trait.ValueFormatter.html) manually.\n\n ///\n\n type FormatOption: DefaultValueFormatOption;\n\n\n\n /// The type of this value in this range specification\n\n type ValueType;\n\n\n\n /// This function maps the value to i32, which is the drawing coordinate\n\n fn map(&self, value: &Self::ValueType, limit: (i32, i32)) -> i32;\n\n\n\n /// This function gives the key points that we can draw a grid based on this\n", "file_path": "plotters/src/coord/ranged1d/mod.rs", "rank": 67, "score": 116992.02429681373 }, { "content": "pub fn check_color(left: BackendColor, right: RGBAColor) {\n\n assert_eq!(\n\n RGBAColor(left.rgb.0, left.rgb.1, left.rgb.2, left.alpha),\n\n right\n\n );\n\n}\n\n\n\npub struct MockedBackend {\n\n height: u32,\n\n width: u32,\n\n init_count: u32,\n\n pub draw_count: u32,\n\n pub num_draw_pixel_call: u32,\n\n pub num_draw_line_call: u32,\n\n pub num_draw_rect_call: u32,\n\n pub num_draw_circle_call: u32,\n\n pub num_draw_text_call: u32,\n\n pub num_draw_path_call: u32,\n\n pub num_fill_polygon_call: u32,\n\n check_draw_pixel: VecDeque<Box<dyn FnMut(RGBAColor, BackendCoord)>>,\n", "file_path": "plotters/src/drawing/backend_impl/mocked.rs", "rank": 68, "score": 116989.33086829985 }, { "content": "pub trait HistogramType {}\n\npub struct Vertical;\n\npub struct Horizontal;\n\n\n\nimpl HistogramType for Vertical {}\n\nimpl HistogramType for Horizontal {}\n\n\n\n/**\n\nPresents data in a histogram. Input data can be raw or aggregated.\n\n\n\n# Examples\n\n\n\n```\n\nuse plotters::prelude::*;\n\nlet data = [1, 1, 2, 2, 1, 3, 3, 2, 2, 1, 1, 2, 2, 2, 3, 3, 1, 2, 3];\n\nlet drawing_area = SVGBackend::new(\"histogram_vertical.svg\", (300, 200)).into_drawing_area();\n\ndrawing_area.fill(&WHITE).unwrap();\n\nlet mut chart_builder = ChartBuilder::on(&drawing_area);\n\nchart_builder.margin(5).set_left_and_bottom_label_area_size(20);\n\nlet mut chart_context = chart_builder.build_cartesian_2d((1..3).into_segmented(), 0..9).unwrap();\n", "file_path": "plotters/src/series/histogram.rs", "rank": 69, "score": 116625.5684197437 }, { "content": "#[test]\n\nfn entry_point() {\n\n main().unwrap()\n\n}\n", "file_path": "plotters/examples/two-scales.rs", "rank": 70, "score": 116541.1846263653 }, { "content": "#[test]\n\nfn entry_point() {\n\n main().unwrap()\n\n}\n", "file_path": "plotters/examples/area-chart.rs", "rank": 71, "score": 115683.01548029101 }, { "content": "/// The trait that provides method `Self::group_by` function which creates a\n\n/// `GroupBy` decorated ranged value.\n\npub trait ToGroupByRange: AsRangedCoord + Sized\n\nwhere\n\n Self::CoordDescType: DiscreteRanged,\n\n{\n\n /// Make a grouping ranged value, see the documentation for `GroupBy` for details.\n\n ///\n\n /// - `value`: The number of values we want to group it\n\n /// - **return**: The newly created grouping range specification\n\n fn group_by(self, value: usize) -> GroupBy<<Self as AsRangedCoord>::CoordDescType> {\n\n GroupBy(self.into(), value)\n\n }\n\n}\n\n\n\nimpl<T: AsRangedCoord + Sized> ToGroupByRange for T where T::CoordDescType: DiscreteRanged {}\n\n\n\nimpl<T: DiscreteRanged> DiscreteRanged for GroupBy<T> {\n\n fn size(&self) -> usize {\n\n (self.0.size() + self.1 - 1) / self.1\n\n }\n\n fn index_of(&self, value: &Self::ValueType) -> Option<usize> {\n", "file_path": "plotters/src/coord/ranged1d/combinators/group_by.rs", "rank": 72, "score": 115510.62742646142 }, { "content": "/// The trait indicates the coordinate is discrete\n\n/// This means we can bidirectionally map the range value to 0 to N\n\n/// in which N is the number of distinct values of the range.\n\n///\n\n/// This is useful since for a histgoram, this is an abstraction of bucket.\n\npub trait DiscreteRanged\n\nwhere\n\n Self: Ranged,\n\n{\n\n /// Get the number of element in the range\n\n /// Note: we assume that all the ranged discrete coordinate has finite value\n\n ///\n\n /// - **returns** The number of values in the range\n\n fn size(&self) -> usize;\n\n\n\n /// Map a value to the index\n\n ///\n\n /// Note: This function doesn't guareentee return None when the value is out of range.\n\n /// The only way to confirm the value is in the range is to examing the return value isn't\n\n /// larger than self.size.\n\n ///\n\n /// - `value`: The value to map\n\n /// - **returns** The index of the value\n\n fn index_of(&self, value: &Self::ValueType) -> Option<usize>;\n\n\n", "file_path": "plotters/src/coord/ranged1d/discrete.rs", "rank": 73, "score": 114905.9090030254 }, { "content": "/// Convert a range to a log scale coordinate spec\n\npub trait IntoLogRange {\n\n /// The type of the value\n\n type ValueType: LogScalable;\n\n\n\n /// Make the log scale coordinate\n\n fn log_scale(self) -> LogRangeExt<Self::ValueType>;\n\n}\n\n\n\nimpl<T: LogScalable> IntoLogRange for Range<T> {\n\n type ValueType = T;\n\n fn log_scale(self) -> LogRangeExt<T> {\n\n LogRangeExt {\n\n range: self,\n\n zero: 0.0,\n\n base: 10.0,\n\n }\n\n }\n\n}\n\n\n\n/// The logarithmic coodinate decorator.\n", "file_path": "plotters/src/coord/ranged1d/combinators/logarithmic.rs", "rank": 74, "score": 112910.76053681891 }, { "content": "/// Covert a path with >1px stroke width into polygon.\n\npub fn polygonize(vertices: &[BackendCoord], stroke_width: u32) -> Vec<BackendCoord> {\n\n if vertices.len() < 2 {\n\n return vec![];\n\n }\n\n\n\n let mut ret = vec![];\n\n\n\n traverse_vertices(vertices.iter(), stroke_width, |v| ret.push(v));\n\n traverse_vertices(vertices.iter().rev(), stroke_width, |v| ret.push(v));\n\n\n\n ret\n\n}\n", "file_path": "plotters-backend/src/rasterizer/path.rs", "rank": 75, "score": 111144.06720481742 }, { "content": "/// The trait for the type that can be converted into a ranged coordinate axis\n\npub trait AsRangedCoord: Sized {\n\n /// Type to describe a coordinate system\n\n type CoordDescType: Ranged<ValueType = Self::Value> + From<Self>;\n\n /// Type for values in the given coordinate system\n\n type Value;\n\n}\n\n\n\nimpl<T> AsRangedCoord for T\n\nwhere\n\n T: Ranged,\n\n{\n\n type CoordDescType = T;\n\n type Value = T::ValueType;\n\n}\n", "file_path": "plotters/src/coord/ranged1d/mod.rs", "rank": 76, "score": 110953.98234575146 }, { "content": "fn layout_multiline_text<'a, F: FnMut(&'a str)>(\n\n text: &'a str,\n\n max_width: u32,\n\n font: FontDesc<'a>,\n\n mut func: F,\n\n) {\n\n for line in text.lines() {\n\n if max_width == 0 || line.is_empty() {\n\n func(line);\n\n } else {\n\n let mut remaining = &line[0..];\n\n\n\n while !remaining.is_empty() {\n\n let mut left = 0;\n\n while left < remaining.len() {\n\n let width = font.box_size(&remaining[0..=left]).unwrap_or((0, 0)).0 as i32;\n\n\n\n if width > max_width as i32 {\n\n break;\n\n }\n", "file_path": "plotters/src/element/text.rs", "rank": 77, "score": 110341.51778710337 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn test_draw_line_out_of_range() {\n\n use plotters::prelude::*;\n\n let mut buffer = vec![0; 1000 * 1000 * 3];\n\n\n\n {\n\n let mut back = BitMapBackend::with_buffer(&mut buffer, (1000, 1000));\n\n\n\n back.draw_line((-1000, -1000), (2000, 2000), &WHITE.to_rgba())\n\n .unwrap();\n\n\n\n back.draw_line((999, -1000), (999, 2000), &WHITE.to_rgba())\n\n .unwrap();\n\n }\n\n\n\n for x in 0..1000 {\n\n for y in 0..1000 {\n\n let expected_value = if x == y || x == 999 { 255 } else { 0 };\n\n assert_eq!(buffer[(y * 1000 + x) as usize * 3 + 0], expected_value);\n\n assert_eq!(buffer[(y * 1000 + x) as usize * 3 + 1], expected_value);\n\n assert_eq!(buffer[(y * 1000 + x) as usize * 3 + 2], expected_value);\n\n }\n\n }\n\n}\n\n\n", "file_path": "plotters-bitmap/src/bitmap/test.rs", "rank": 78, "score": 110248.63715540271 }, { "content": "#[cfg(test)]\n\n#[test]\n\nfn test_draw_rect_out_of_range() {\n\n use plotters::prelude::*;\n\n let mut buffer = vec![0; 1099 * 1000 * 3];\n\n\n\n {\n\n let mut back = BitMapBackend::with_buffer(&mut buffer, (1000, 1000));\n\n\n\n back.draw_line((1100, 0), (1100, 999), &RED.to_rgba())\n\n .unwrap();\n\n back.draw_line((0, 1100), (999, 1100), &RED.to_rgba())\n\n .unwrap();\n\n back.draw_rect((1100, 0), (1100, 999), &RED.to_rgba(), true)\n\n .unwrap();\n\n }\n\n\n\n for x in 0..1000 {\n\n for y in 0..1000 {\n\n assert_eq!(buffer[(y * 1000 + x) as usize * 3 + 0], 0);\n\n assert_eq!(buffer[(y * 1000 + x) as usize * 3 + 1], 0);\n\n assert_eq!(buffer[(y * 1000 + x) as usize * 3 + 2], 0);\n\n }\n\n }\n\n}\n\n\n", "file_path": "plotters-bitmap/src/bitmap/test.rs", "rank": 79, "score": 110248.63715540271 }, { "content": "/// Makes a linspace coordinate from the ranged coordinates.\n\npub trait IntoLinspace: AsRangedCoord {\n\n /// Set the step value, make a linspace coordinate from the given range.\n\n /// By default the matching method use the exact match\n\n ///\n\n /// - `val`: The step value\n\n /// - **returns*: The newly created linspace\n\n fn step<S: Clone>(self, val: S) -> Linspace<Self::CoordDescType, S, Exact<Self::Value>>\n\n where\n\n Self::Value: Add<S, Output = Self::Value> + PartialOrd + Clone,\n\n {\n\n let mut ret = Linspace {\n\n step: val,\n\n inner: self.into(),\n\n grid_value: vec![],\n\n _phatom: PhantomData,\n\n };\n\n\n\n ret.compute_grid_values();\n\n\n\n ret\n", "file_path": "plotters/src/coord/ranged1d/combinators/linspace.rs", "rank": 80, "score": 109051.61146063078 }, { "content": "/// The trait for types that can decorated by [SegmentedCoord](struct.SegmentedCoord.html) decorator.\n\npub trait IntoSegmentedCoord: AsRangedCoord\n\nwhere\n\n Self::CoordDescType: DiscreteRanged,\n\n{\n\n /// Convert current ranged value into a segmented coordinate\n\n fn into_segmented(self) -> SegmentedCoord<Self::CoordDescType> {\n\n SegmentedCoord(self.into())\n\n }\n\n}\n\n\n\nimpl<R: AsRangedCoord> IntoSegmentedCoord for R where R::CoordDescType: DiscreteRanged {}\n\n\n\n/// The value that used by the segmented coordinate.\n\n#[derive(Clone, Debug)]\n\npub enum SegmentValue<T> {\n\n /// Means we are referring the exact position of value `T`\n\n Exact(T),\n\n /// Means we are referring the center of position `T` and the successor of `T`\n\n CenterOf(T),\n\n /// Referring the last dummy element\n", "file_path": "plotters/src/coord/ranged1d/discrete.rs", "rank": 81, "score": 109050.37310636396 }, { "content": "/// The trait that describe some time value. This is the uniformed abstraction that works\n\n/// for both Date, DateTime and Duration, etc.\n\npub trait TimeValue: Eq {\n\n type DateType: Datelike + PartialOrd;\n\n\n\n /// Returns the date that is no later than the time\n\n fn date_floor(&self) -> Self::DateType;\n\n /// Returns the date that is no earlier than the time\n\n fn date_ceil(&self) -> Self::DateType;\n\n /// Returns the maximum value that is earlier than the given date\n\n fn earliest_after_date(date: Self::DateType) -> Self;\n\n /// Returns the duration between two time value\n\n fn subtract(&self, other: &Self) -> Duration;\n\n /// Instantiate a date type for current time value;\n\n fn ymd(&self, year: i32, month: u32, date: u32) -> Self::DateType;\n\n /// Cast current date type into this type\n\n fn from_date(date: Self::DateType) -> Self;\n\n\n\n /// Map the coord spec\n\n fn map_coord(value: &Self, begin: &Self, end: &Self, limit: (i32, i32)) -> i32 {\n\n let total_span = end.subtract(begin);\n\n let value_span = value.subtract(begin);\n", "file_path": "plotters/src/coord/ranged1d/types/datetime.rs", "rank": 82, "score": 108708.76346328983 }, { "content": "/// Used to build a nested coordinate system.\n\npub trait BuildNestedCoord: AsRangedCoord\n\nwhere\n\n Self::CoordDescType: DiscreteRanged,\n\n{\n\n /// Builds a nested coordinate system.\n\n fn nested_coord<S: AsRangedCoord>(\n\n self,\n\n builder: impl Fn(<Self::CoordDescType as Ranged>::ValueType) -> S,\n\n ) -> NestedRange<Self::CoordDescType, S::CoordDescType> {\n\n let primary: Self::CoordDescType = self.into();\n\n assert!(primary.size() > 0);\n\n\n\n let secondary = primary\n\n .values()\n\n .map(|value| builder(value).into())\n\n .collect();\n\n\n\n NestedRange { primary, secondary }\n\n }\n\n}\n", "file_path": "plotters/src/coord/ranged1d/combinators/nested.rs", "rank": 83, "score": 105512.4965929063 }, { "content": "/// The trait for the types that can be converted into a partial axis\n\npub trait IntoPartialAxis: AsRangedCoord {\n\n /// Make the partial axis\n\n ///\n\n /// - `axis_range`: The range of the axis to be displayed\n\n /// - **returns**: The converted range specification\n\n fn partial_axis(\n\n self,\n\n axis_range: Range<<Self::CoordDescType as Ranged>::ValueType>,\n\n ) -> PartialAxis<Self::CoordDescType> {\n\n PartialAxis(self.into(), axis_range)\n\n }\n\n}\n\n\n\nimpl<R: AsRangedCoord> IntoPartialAxis for R {}\n\n\n\nimpl<R: Ranged> Ranged for PartialAxis<R>\n\nwhere\n\n R::ValueType: Clone,\n\n{\n\n type FormatOption = DefaultFormatting;\n", "file_path": "plotters/src/coord/ranged1d/combinators/partial_axis.rs", "rank": 84, "score": 105502.21842411094 }, { "content": "/// The trait for a hint provided to the key point algorithm used by the coordinate specs.\n\n/// The most important constraint is the `max_num_points` which means the algorithm could emit no more than specific number of key points\n\n/// `weight` is used to determine if this is used as a bold grid line or light grid line\n\n/// `bold_points` returns the max number of coresponding bold grid lines\n\npub trait KeyPointHint {\n\n /// Returns the max number of key points\n\n fn max_num_points(&self) -> usize;\n\n /// Returns the weight for this hint\n\n fn weight(&self) -> KeyPointWeight;\n\n /// Returns the point number constraint for the bold points\n\n fn bold_points(&self) -> usize {\n\n self.max_num_points()\n\n }\n\n}\n\n\n\nimpl KeyPointHint for usize {\n\n fn max_num_points(&self) -> usize {\n\n *self\n\n }\n\n\n\n fn weight(&self) -> KeyPointWeight {\n\n KeyPointWeight::Any\n\n }\n\n}\n", "file_path": "plotters/src/coord/ranged1d/mod.rs", "rank": 85, "score": 105271.39360454335 }, { "content": "/// The trait that converts a normal date coord into a monthly one\n\npub trait IntoMonthly<T: TimeValue> {\n\n /// Converts a normal date coord into a monthly one\n\n fn monthly(self) -> Monthly<T>;\n\n}\n\n\n", "file_path": "plotters/src/coord/ranged1d/types/datetime.rs", "rank": 86, "score": 105173.80131249559 }, { "content": "/// The trait that converts a normal date coord into a yearly one\n\npub trait IntoYearly<T: TimeValue> {\n\n /// Converts a normal date coord into a yearly one\n\n fn yearly(self) -> Yearly<T>;\n\n}\n\n\n\nimpl<T: TimeValue> IntoMonthly<T> for Range<T> {\n\n fn monthly(self) -> Monthly<T> {\n\n Monthly(self)\n\n }\n\n}\n\n\n\nimpl<T: TimeValue> IntoYearly<T> for Range<T> {\n\n fn yearly(self) -> Yearly<T> {\n\n Yearly(self)\n\n }\n\n}\n\n\n\n/// The ranged coordinate for the date and time\n\n#[derive(Clone)]\n\npub struct RangedDateTime<DT: Datelike + Timelike + TimeValue>(DT, DT);\n", "file_path": "plotters/src/coord/ranged1d/types/datetime.rs", "rank": 87, "score": 105173.80131249559 }, { "content": "/// The drawing backend trait, which implements the low-level drawing APIs.\n\n/// This trait has a set of default implementation. And the minimal requirement of\n\n/// implementing a drawing backend is implementing the `draw_pixel` function.\n\n///\n\n/// If the drawing backend supports vector graphics, the other drawing APIs should be\n\n/// override by the backend specific implementation. Otherwise, the default implementation\n\n/// will use the pixel-based approach to draw other types of low-level shapes.\n\npub trait DrawingBackend: Sized {\n\n /// The error type reported by the backend\n\n type ErrorType: Error + Send + Sync;\n\n\n\n /// Get the dimension of the drawing backend in pixels\n\n fn get_size(&self) -> (u32, u32);\n\n\n\n /// Ensure the backend is ready to draw\n\n fn ensure_prepared(&mut self) -> Result<(), DrawingErrorKind<Self::ErrorType>>;\n\n\n\n /// Finalize the drawing step and present all the changes.\n\n /// This is used as the real-time rendering support.\n\n /// The backend may implement in the following way, when `ensure_prepared` is called\n\n /// it checks if it needs a fresh buffer and `present` is called rendering all the\n\n /// pending changes on the screen.\n\n fn present(&mut self) -> Result<(), DrawingErrorKind<Self::ErrorType>>;\n\n\n\n /// Draw a pixel on the drawing backend\n\n /// - `point`: The backend pixel-based coordinate to draw\n\n /// - `color`: The color of the pixel\n", "file_path": "plotters-backend/src/lib.rs", "rank": 88, "score": 101860.14030471319 }, { "content": "fn generate_yearly_keypoints<T: TimeValue>(\n\n max_points: usize,\n\n mut start_year: i32,\n\n start_month: u32,\n\n mut end_year: i32,\n\n end_month: u32,\n\n builder: &T,\n\n) -> Vec<T> {\n\n if start_month > end_month {\n\n end_year -= 1;\n\n }\n\n\n\n let mut exp10 = 1;\n\n\n\n while (end_year - start_year + 1) as usize / (exp10 * 10) > max_points {\n\n exp10 *= 10;\n\n }\n\n\n\n let mut freq = exp10;\n\n\n", "file_path": "plotters/src/coord/ranged1d/types/datetime.rs", "rank": 89, "score": 100877.10727203278 }, { "content": "#[allow(type_alias_bounds)]\n\ntype DrawingAreaError<T: DrawingBackend> = DrawingAreaErrorKind<T::ErrorType>;\n\n\n\nimpl<DB: DrawingBackend> From<DB> for DrawingArea<DB, Shift> {\n\n fn from(backend: DB) -> Self {\n\n Self::with_rc_cell(Rc::new(RefCell::new(backend)))\n\n }\n\n}\n\n\n\nimpl<'a, DB: DrawingBackend> From<&'a Rc<RefCell<DB>>> for DrawingArea<DB, Shift> {\n\n fn from(backend: &'a Rc<RefCell<DB>>) -> Self {\n\n Self::with_rc_cell(backend.clone())\n\n }\n\n}\n\n\n", "file_path": "plotters/src/drawing/area.rs", "rank": 90, "score": 98241.24857167502 }, { "content": "/// A type which can be converted into a root drawing area\n\npub trait IntoDrawingArea: DrawingBackend + Sized {\n\n /// Convert the type into a root drawing area\n\n fn into_drawing_area(self) -> DrawingArea<Self, Shift>;\n\n}\n\n\n\nimpl<T: DrawingBackend> IntoDrawingArea for T {\n\n fn into_drawing_area(self) -> DrawingArea<T, Shift> {\n\n self.into()\n\n }\n\n}\n\n\n\nimpl<DB: DrawingBackend, X: Ranged, Y: Ranged> DrawingArea<DB, Cartesian2d<X, Y>> {\n\n /// Draw the mesh on a area\n\n pub fn draw_mesh<DrawFunc, YH: KeyPointHint, XH: KeyPointHint>(\n\n &self,\n\n mut draw_func: DrawFunc,\n\n y_count_max: YH,\n\n x_count_max: XH,\n\n ) -> Result<(), DrawingAreaErrorKind<DB::ErrorType>>\n\n where\n", "file_path": "plotters/src/drawing/area.rs", "rank": 91, "score": 96372.3057655617 }, { "content": "fn pdf(x: f64, y: f64) -> f64 {\n\n const SDX: f64 = 0.1;\n\n const SDY: f64 = 0.1;\n\n const A: f64 = 5.0;\n\n let x = x as f64 / 10.0;\n\n let y = y as f64 / 10.0;\n\n A * (-x * x / 2.0 / SDX / SDX - y * y / 2.0 / SDY / SDY).exp()\n\n}\n\n\n\nconst OUT_FILE_NAME: &'static str = \"plotters-doc-data/3d-plot2.gif\";\n", "file_path": "plotters/examples/3d-plot2.rs", "rank": 92, "score": 89935.56859402524 }, { "content": "/// Lazily load font data. Font type doesn't own actual data, which\n\n/// lives in the cache.\n\nfn load_font_data(face: FontFamily, style: FontStyle) -> FontResult<FontExt> {\n\n let key = match style {\n\n FontStyle::Normal => Cow::Borrowed(face.as_str()),\n\n _ => Cow::Owned(format!(\"{}, {}\", face.as_str(), style.as_str())),\n\n };\n\n\n\n // First, we try to find the font object for current thread\n\n if let Some(font_object) = FONT_OBJECT_CACHE.with(|font_object_cache| {\n\n font_object_cache\n\n .borrow()\n\n .get(Borrow::<str>::borrow(&key))\n\n .map(Clone::clone)\n\n }) {\n\n return Ok(font_object);\n\n }\n\n\n\n // Then we need to check if the data cache contains the font data\n\n let cache = DATA_CACHE.read().unwrap();\n\n if let Some(data) = cache.get(Borrow::<str>::borrow(&key)) {\n\n return data.clone().map(|handle| {\n", "file_path": "plotters/src/style/font/ttf.rs", "rank": 93, "score": 87087.89101274313 }, { "content": "fn draw_annulus<B: DrawingBackend, S: BackendStyle>(\n\n b: &mut B,\n\n center: BackendCoord,\n\n radius: (u32, u32),\n\n style: &S,\n\n) -> Result<(), DrawingErrorKind<B::ErrorType>> {\n\n let a0 = ((radius.0 - radius.1) as f64).min(radius.0 as f64 * (1.0 - 1.0 / (2f64).sqrt()));\n\n let a1 = (radius.0 as f64 - a0 - radius.1 as f64).max(0.0);\n\n\n\n check_result!(draw_part_a::<B, _>(a0, radius.0, |p, r| draw_sweep_line(\n\n b,\n\n style,\n\n center,\n\n (0, 1),\n\n p,\n\n r\n\n )));\n\n check_result!(draw_part_a::<B, _>(a0, radius.0, |p, r| draw_sweep_line(\n\n b,\n\n style,\n", "file_path": "plotters-backend/src/rasterizer/circle.rs", "rank": 94, "score": 86768.16090591908 }, { "content": " }\n\n\n\n /// This function has been renamed to [`ChartBuilder::build_cartesian_2d()`] and is to be removed in the future.\n\n #[allow(clippy::type_complexity)]\n\n #[deprecated(\n\n note = \"`build_ranged` has been renamed to `build_cartesian_2d` and is to be removed in the future.\"\n\n )]\n\n pub fn build_ranged<X: AsRangedCoord, Y: AsRangedCoord>(\n\n &mut self,\n\n x_spec: X,\n\n y_spec: Y,\n\n ) -> Result<\n\n ChartContext<'a, DB, Cartesian2d<X::CoordDescType, Y::CoordDescType>>,\n\n DrawingAreaErrorKind<DB::ErrorType>,\n\n > {\n\n self.build_cartesian_2d(x_spec, y_spec)\n\n }\n\n\n\n /**\n\n Builds a chart with a 2D Cartesian coordinate system.\n", "file_path": "plotters/src/chart/builder.rs", "rank": 98, "score": 42.89362133998133 }, { "content": "use super::{ProjectionMatrix, ProjectionMatrixBuilder};\n\nuse crate::coord::ranged1d::Ranged;\n\nuse crate::coord::CoordTranslate;\n\nuse plotters_backend::BackendCoord;\n\n\n\nuse std::ops::Range;\n\n\n\n/// A 3D cartesian coordinate system\n\n#[derive(Clone)]\n\npub struct Cartesian3d<X: Ranged, Y: Ranged, Z: Ranged> {\n\n pub(crate) logic_x: X,\n\n pub(crate) logic_y: Y,\n\n pub(crate) logic_z: Z,\n\n coord_size: (i32, i32, i32),\n\n projection: ProjectionMatrix,\n\n}\n\n\n\nimpl<X: Ranged, Y: Ranged, Z: Ranged> Cartesian3d<X, Y, Z> {\n\n fn compute_default_size(actual_x: Range<i32>, actual_y: Range<i32>) -> i32 {\n\n (actual_x.end - actual_x.start).min(actual_y.end - actual_y.start) * 4 / 5\n", "file_path": "plotters/src/coord/ranged3d/cartesian3d.rs", "rank": 99, "score": 41.952058069119204 } ]
Rust
src/fs/mmap.rs
Ryanmtate/future-aio
be6fd0ab83358c808248ffb7f2b5d127a3aaa4cc
use std::fs::OpenOptions; use std::io::Error as IoError; use std::path::Path; use std::sync::Arc; use std::sync::RwLock; use std::sync::RwLockReadGuard; use std::sync::RwLockWriteGuard; use memmap::Mmap; use memmap::MmapMut; use crate::task::spawn_blocking; use crate::fs::File; pub struct MemoryMappedMutFile(Arc<RwLock<MmapMut>>); impl MemoryMappedMutFile { pub async fn create(m_path: &Path, len: u64) -> Result<(Self,File), IoError> { let owned_path = m_path.to_owned(); let (m_map, mfile,_) = spawn_blocking ( move || { let inner_path = owned_path.clone(); let mfile = OpenOptions::new() .read(true) .write(true) .create(true) .open(inner_path) .unwrap(); mfile.set_len(len)?; unsafe { MmapMut::map_mut(&mfile) }.map(|mm_file| (mm_file, mfile,owned_path)) }).await?; Ok(( MemoryMappedMutFile::from_mmap(m_map), mfile.into() )) } fn from_mmap(mmap: MmapMut) -> MemoryMappedMutFile { MemoryMappedMutFile(Arc::new(RwLock::new(mmap))) } pub fn inner(&self) -> RwLockReadGuard<MmapMut> { self.0.read().unwrap() } pub fn inner_map(&self) -> Arc<RwLock<MmapMut>> { self.0.clone() } pub fn mut_inner(&self) -> RwLockWriteGuard<MmapMut> { self.0.write().unwrap() } pub fn write_bytes(&mut self, pos: usize, bytes: &Vec<u8>) { let mut m_file = self.mut_inner(); let m_array = &mut m_file[..]; for i in 0..bytes.len() { m_array[i + pos] = bytes[i]; } } pub async fn flush_ft(&self) -> Result<(),IoError> { let inner = self.0.clone(); spawn_blocking(move || { let inner_map = inner.write().unwrap(); let res = inner_map.flush(); drop(inner_map); res }).await } pub async fn flush_async_ft(&self) -> Result<(), IoError> { let inner = self.0.clone(); spawn_blocking(move || { let inner_map = inner.write().unwrap(); inner_map.flush_async() }).await } pub async fn flush_range_ft( &self, offset: usize, len: usize, ) -> Result<(), IoError> { let inner = self.0.clone(); spawn_blocking(move || { let inner_map = inner.write().unwrap(); inner_map.flush_range(offset, len) }).await } } pub struct MemoryMappedFile(Arc<RwLock<Mmap>>); impl MemoryMappedFile { pub async fn open<P>(path: P,min_len: u64) -> Result<(Self, File), IoError> where P: AsRef<Path> { let m_path = path.as_ref().to_owned(); let (m_map, mfile,_) = spawn_blocking (move || { let mfile = OpenOptions::new().read(true).open(&m_path).unwrap(); let meta = mfile.metadata().unwrap(); if meta.len() == 0 { mfile.set_len(min_len)?; } unsafe { Mmap::map(&mfile) }.map(|mm_file| (mm_file, mfile,m_path)) }).await?; Ok(( MemoryMappedFile::from_mmap(m_map), mfile.into() )) } fn from_mmap(mmap: Mmap) -> MemoryMappedFile { MemoryMappedFile(Arc::new(RwLock::new(mmap))) } pub fn inner(&self) -> RwLockReadGuard<Mmap> { self.0.read().unwrap() } } #[cfg(test)] mod tests { use std::env::temp_dir; use std::fs::File; use std::io::Error as IoError; use std::io::Read; use flv_util::fixture::ensure_clean_file; use crate::test_async; use super::MemoryMappedMutFile; #[test_async] async fn test_mmap_write_slice() -> Result<(),IoError> { let index_path = temp_dir().join("test.index"); ensure_clean_file(&index_path.clone()); let result = MemoryMappedMutFile::create(&index_path,3).await; assert!(result.is_ok()); let (mm_file, _) = result.unwrap(); { let mut mm = mm_file.mut_inner(); let src = [0x01, 0x02, 0x03]; mm.copy_from_slice(&src); } mm_file.flush_ft().await?; let mut f = File::open(&index_path)?; let mut buffer = vec![0; 3]; f.read(&mut buffer)?; assert_eq!(buffer[0], 0x01); assert_eq!(buffer[1], 0x02); assert_eq!(buffer[2], 0x03); Ok(()) } #[test_async] async fn test_mmap_write_pair_slice() -> Result<(),IoError> { let index_path = temp_dir().join("pairslice.index"); ensure_clean_file(&index_path.clone()); let result = MemoryMappedMutFile::create(&index_path, 24).await; assert!(result.is_ok()); let (mm_file, _) = result.unwrap(); { let mut mm = mm_file.mut_inner(); let src: [(u32, u32); 3] = [(5, 10), (11, 22), (50, 100)]; let (_, bytes, _) = unsafe { src.align_to::<u8>() }; assert_eq!(bytes.len(), 24); mm.copy_from_slice(&bytes); } mm_file.flush_ft().await?; let (mm_file2, _) = MemoryMappedMutFile::create(&index_path, 24).await?; let mm2 = mm_file2.mut_inner(); let (_, pairs, _) = unsafe { mm2.align_to::<(u32, u32)>() }; assert_eq!(pairs.len(), 3); assert_eq!(pairs[0].0, 5); assert_eq!(pairs[2].1, 100); Ok(()) } #[test_async] async fn test_mmap_write_with_pos() -> Result<(),IoError> { let index_path = temp_dir().join("testpos.index"); ensure_clean_file(&index_path.clone()); let (mut mm_file, _) = MemoryMappedMutFile::create(&index_path, 10).await?; let src = vec![0x05, 0x10, 0x44]; mm_file.write_bytes(5, &src); mm_file.flush_ft().await?; let mut f = File::open(&index_path)?; let mut buffer = vec![0; 10]; f.read(&mut buffer)?; assert_eq!(buffer[5], 0x05); assert_eq!(buffer[6], 0x10); assert_eq!(buffer[7], 0x44); Ok(()) } /* use std::fs::OpenOptions; use std::path::PathBuf; use memmap::MmapMut; #[test] fn debug_kafka_inspect() -> io::Result<()> { let path = "/tmp/kafka-logs/test-0/00000000000000000000.index"; let file = OpenOptions::new() .read(true) .write(true) .open(path)?; let mut mmap = unsafe { MmapMut::map_mut(&file)? }; println!("file size: {}",mmap.len()); Ok(()) } #[test] fn debug_file_inspect() -> io::Result<()> { let path = "/tmp/kafka-logs/test-0/00000000000000000000.index"; let file = File::open(path)?; let metadata = file.metadata()?; println!("file len: {:#?}",metadata.len()); Ok(()) } */ }
use std::fs::OpenOptions; use std::io::Error as IoError; use std::path::Path; use std::sync::Arc; use std::sync::RwLock; use std::sync::RwLockReadGuard; use std::sync::RwLockWriteGuard; use memmap::Mmap; use memmap::MmapMut; use crate::task::spawn_blocking; use crate::fs::File; pub struct MemoryMappedMutFile(Arc<RwLock<MmapMut>>); impl MemoryMappedMutFile { pub async fn create(m_path: &Path, len: u64) -> Result<(Self,File), IoError> { let owned_path = m_path.to_owned(); let (m_map, mfile,_) = spawn_blocking ( move || { let inner_path = owned_path.clone(); let mfile = OpenOptions::new() .read(true) .write(true) .create(true) .open(inner_path) .unwrap(); mfile.set_len(len)?; unsafe { MmapMut::map_mut(&mfile) }.map(|mm_file| (mm_file, mfile,owned_path)) }).await?; Ok(( MemoryMappedMutFile::from_mmap(m_map), mfile.into() )) } fn from_mmap(mmap: MmapMut) -> MemoryMappedMutFile { MemoryMappedMutFile(Arc::new(RwLock::new(mmap))) } pub fn inner(&self) -> RwLockReadGuard<MmapMut> { self.0.read().unwrap() } pub fn inner_map(&self) -> Arc<RwLock<MmapMut>> { self.0.clone() } pub fn mut_inner(&self) -> RwLockWriteGuard<MmapMut> { self.0.write().unwrap() } pub fn write_bytes(&mut self, pos: usize, bytes: &Vec<u8>) { let mut m_file = self.mut_inner(); let m_array = &mut m_file[..]; for i in 0..bytes.len() { m_array[i + pos] = bytes[i]; } } pub async fn flush_ft(&self) -> Result<(),IoError> { let inner = self.0.clone(); spawn_blocking(move || { let inner_map = inner.write().unwrap(); let res = inner_map.flush(); drop(inner_map); res }).await } pub async fn flush_async_ft(&self) -> Result<(), IoError> { let inner = self.0.clone(); spawn_blocking(move || { let inner_map = inner.write().unwrap(); inner_map.flush_async() }).await } pub async fn flush_range_ft( &self, offset: usize, len: usize, ) -> Result<(), IoError> { let inner = self.0.clone(); spawn_blocking(move || { let inner_map = inner.write().unwrap(); inner_map.flush_range(offset, len) }).await } } pub struct MemoryMappedFile(Arc<RwLock<Mmap>>); impl MemoryMappedFile {
fn from_mmap(mmap: Mmap) -> MemoryMappedFile { MemoryMappedFile(Arc::new(RwLock::new(mmap))) } pub fn inner(&self) -> RwLockReadGuard<Mmap> { self.0.read().unwrap() } } #[cfg(test)] mod tests { use std::env::temp_dir; use std::fs::File; use std::io::Error as IoError; use std::io::Read; use flv_util::fixture::ensure_clean_file; use crate::test_async; use super::MemoryMappedMutFile; #[test_async] async fn test_mmap_write_slice() -> Result<(),IoError> { let index_path = temp_dir().join("test.index"); ensure_clean_file(&index_path.clone()); let result = MemoryMappedMutFile::create(&index_path,3).await; assert!(result.is_ok()); let (mm_file, _) = result.unwrap(); { let mut mm = mm_file.mut_inner(); let src = [0x01, 0x02, 0x03]; mm.copy_from_slice(&src); } mm_file.flush_ft().await?; let mut f = File::open(&index_path)?; let mut buffer = vec![0; 3]; f.read(&mut buffer)?; assert_eq!(buffer[0], 0x01); assert_eq!(buffer[1], 0x02); assert_eq!(buffer[2], 0x03); Ok(()) } #[test_async] async fn test_mmap_write_pair_slice() -> Result<(),IoError> { let index_path = temp_dir().join("pairslice.index"); ensure_clean_file(&index_path.clone()); let result = MemoryMappedMutFile::create(&index_path, 24).await; assert!(result.is_ok()); let (mm_file, _) = result.unwrap(); { let mut mm = mm_file.mut_inner(); let src: [(u32, u32); 3] = [(5, 10), (11, 22), (50, 100)]; let (_, bytes, _) = unsafe { src.align_to::<u8>() }; assert_eq!(bytes.len(), 24); mm.copy_from_slice(&bytes); } mm_file.flush_ft().await?; let (mm_file2, _) = MemoryMappedMutFile::create(&index_path, 24).await?; let mm2 = mm_file2.mut_inner(); let (_, pairs, _) = unsafe { mm2.align_to::<(u32, u32)>() }; assert_eq!(pairs.len(), 3); assert_eq!(pairs[0].0, 5); assert_eq!(pairs[2].1, 100); Ok(()) } #[test_async] async fn test_mmap_write_with_pos() -> Result<(),IoError> { let index_path = temp_dir().join("testpos.index"); ensure_clean_file(&index_path.clone()); let (mut mm_file, _) = MemoryMappedMutFile::create(&index_path, 10).await?; let src = vec![0x05, 0x10, 0x44]; mm_file.write_bytes(5, &src); mm_file.flush_ft().await?; let mut f = File::open(&index_path)?; let mut buffer = vec![0; 10]; f.read(&mut buffer)?; assert_eq!(buffer[5], 0x05); assert_eq!(buffer[6], 0x10); assert_eq!(buffer[7], 0x44); Ok(()) } /* use std::fs::OpenOptions; use std::path::PathBuf; use memmap::MmapMut; #[test] fn debug_kafka_inspect() -> io::Result<()> { let path = "/tmp/kafka-logs/test-0/00000000000000000000.index"; let file = OpenOptions::new() .read(true) .write(true) .open(path)?; let mut mmap = unsafe { MmapMut::map_mut(&file)? }; println!("file size: {}",mmap.len()); Ok(()) } #[test] fn debug_file_inspect() -> io::Result<()> { let path = "/tmp/kafka-logs/test-0/00000000000000000000.index"; let file = File::open(path)?; let metadata = file.metadata()?; println!("file len: {:#?}",metadata.len()); Ok(()) } */ }
pub async fn open<P>(path: P,min_len: u64) -> Result<(Self, File), IoError> where P: AsRef<Path> { let m_path = path.as_ref().to_owned(); let (m_map, mfile,_) = spawn_blocking (move || { let mfile = OpenOptions::new().read(true).open(&m_path).unwrap(); let meta = mfile.metadata().unwrap(); if meta.len() == 0 { mfile.set_len(min_len)?; } unsafe { Mmap::map(&mfile) }.map(|mm_file| (mm_file, mfile,m_path)) }).await?; Ok(( MemoryMappedFile::from_mmap(m_map), mfile.into() )) }
function_block-full_function
[ { "content": "#[proc_macro_attribute]\n\npub fn test_async(_attr: TokenStream, item: TokenStream) -> TokenStream {\n\n\n\n let input = syn::parse_macro_input!(item as ItemFn);\n\n let name = &input.sig.ident;\n\n let sync_name = format!(\"{}_sync\",name);\n\n let out_fn_iden = Ident::new(&sync_name, Span::call_site());\n\n\n\n let expression = quote! {\n\n\n\n #[test]\n\n fn #out_fn_iden() {\n\n\n\n ::flv_future_aio::util::init_logger();\n\n \n\n #input\n\n \n\n let ft = async {\n\n #name().await\n\n };\n\n\n\n if let Err(err) = ::flv_future_aio::task::run_block_on(ft) {\n\n assert!(false,\"error: {:?}\",err);\n\n } \n\n \n\n }\n\n };\n\n \n\n expression.into()\n\n \n\n}", "file_path": "async-test-derive/src/lib.rs", "rank": 0, "score": 69563.8039997589 }, { "content": "fn to_bytes(bytes: Vec<u8>) -> Bytes {\n\n let mut buf = BytesMut::with_capacity(bytes.len());\n\n buf.put_slice(&bytes);\n\n buf.freeze()\n\n}\n\n\n\n\n\n\n\n\n\n#[test_async]\n\nasync fn test_async_tcp() -> Result<(), Error> {\n\n let addr = \"127.0.0.1:9998\".parse::<SocketAddr>().expect(\"parse\");\n\n\n\n let server_ft = async {\n\n \n\n debug!(\"server: binding\");\n\n let listener = TcpListener::bind(&addr).await?;\n\n debug!(\"server: successfully binding. waiting for incoming\");\n\n let mut incoming = listener.incoming();\n\n while let Some(stream) = incoming.next().await {\n", "file_path": "src/net/tcp_stream.rs", "rank": 1, "score": 64590.54437050902 }, { "content": "#[async_trait]\n\npub trait AsyncFile {\n\n async fn reset_to_beginning(&mut self) -> Result<(), IoError>;\n\n\n\n fn raw_slice(&self, position: u64, len: u64) -> AsyncFileSlice;\n\n\n\n async fn as_slice(\n\n &self,\n\n position: u64,\n\n desired_len_opt: Option<u64>,\n\n ) -> Result<AsyncFileSlice, IoError>;\n\n}\n\n\n\n#[async_trait]\n\nimpl AsyncFile for File {\n\n async fn reset_to_beginning(&mut self) -> Result<(), IoError> {\n\n self.seek(SeekFrom::Start(0)).await.map(|_| ())\n\n }\n\n\n\n /// return raw slice with fiel descriptor, this doesn't not check\n\n #[cfg(unix)]\n", "file_path": "src/fs/async_file.rs", "rank": 2, "score": 61814.86501525424 }, { "content": "pub trait AsyncWrite2: AsyncWrite + Unpin {\n\n fn write_buf_all<'a, B>(&'a mut self, buf: B) -> WriteBufAll<'a, Self, B>\n\n where\n\n B: AsRef<[u8]>,\n\n {\n\n WriteBufAll::new(self, buf)\n\n }\n\n}\n\n\n\n\n\nimpl AsyncWrite2 for File{}", "file_path": "src/write.rs", "rank": 3, "score": 58588.27044006919 }, { "content": " #[async_trait]\n\n pub trait AsyncDispatcher: Sized + Send + 'static {\n\n \n\n fn run(self) {\n\n let ft = async move {\n\n self.dispatch_loop().await;\n\n };\n\n\n\n spawn(ft);\n\n }\n\n\n\n /// perform dispatch loop\n\n /// this is where dispatcher should perform select\n\n async fn dispatch_loop(mut self);\n\n }\n\n\n\n\n\n\n\n #[cfg(test)]\n\n mod test {\n\n\n", "file_path": "src/actor/mod.rs", "rank": 4, "score": 47708.52515197771 }, { "content": "type InnerFile = Arc<RwLock<AsyncFile>>;\n\n\n\n#[derive(Clone)]\n\npub struct SharedAsyncFile {\n\n inner: InnerFile,\n\n}\n\n\n\nimpl SharedAsyncFile {\n\n fn new(file: AsyncFile) -> Self {\n\n SharedAsyncFile {\n\n inner: Arc::new(RwLock::new(file)),\n\n }\n\n }\n\n\n\n fn read<'a>(&self, buf: &'a mut [u8]) -> SharedAsyncFileRead<'a> {\n\n SharedAsyncFileRead {\n\n inner: self.inner.clone(),\n\n buf,\n\n }\n\n }\n", "file_path": "src/fs/shared_file.rs", "rank": 5, "score": 38882.57248027461 }, { "content": " #[async_trait]\n\n pub trait TcpDomainConnector {\n\n\n\n type WrapperStream: AsyncRead + AsyncWrite + Unpin + Send;\n\n\n\n async fn connect(&self,domain: &str) -> Result<(Self::WrapperStream,RawFd),IoError>;\n\n }\n\n\n\n\n\n #[derive(Clone)]\n\n pub struct DefaultTcpDomainConnector{}\n\n\n\n impl DefaultTcpDomainConnector {\n\n pub fn new() -> Self {\n\n Self{}\n\n }\n\n }\n\n\n\n #[async_trait]\n\n impl TcpDomainConnector for DefaultTcpDomainConnector {\n\n\n", "file_path": "src/net/mod.rs", "rank": 6, "score": 30532.97294138116 }, { "content": "#[async_trait]\n\npub trait ZeroCopyWrite {\n\n async fn zero_copy_write(&mut self, source: &AsyncFileSlice) -> Result<usize, SendFileError>;\n\n}\n\n\n\n#[async_trait]\n\nimpl <T> ZeroCopyWrite for T where T: AsRawFd + Send {\n\n\n\n async fn zero_copy_write(&mut self, source: &AsyncFileSlice) -> Result<usize, SendFileError> {\n\n let size = source.len();\n\n let target_fd = self.as_raw_fd();\n\n let source_fd = source.fd();\n\n\n\n #[cfg(target_os = \"linux\")]\n\n let ft = {\n\n let offset = source.position() as off_t;\n\n\n\n spawn_blocking(move || {\n\n\n\n let mut total_transferred: usize = 0; // total bytes transferred so far\n\n let mut current_offset = offset;\n", "file_path": "src/zero_copy.rs", "rank": 7, "score": 30532.97294138116 }, { "content": " assert_eq!(output.len(), 4);\n\n let contents = String::from_utf8(output).expect(\"conversion\");\n\n assert_eq!(contents, \"xyzt\");\n\n\n\n Ok(())\n\n }\n\n\n\n #[test_async]\n\n async fn async_file_write_read_same() -> Result<(), IoError> {\n\n let test_file_path = temp_dir().join(\"read_write_test\");\n\n ensure_clean_file(&test_file_path);\n\n\n\n let mut output = Vec::new();\n\n let mut file = file_util::open_read_write(&test_file_path).await?;\n\n file.write_all(b\"test\").await?;\n\n file.seek(SeekFrom::Start(0)).await?;\n\n file.read_to_end(&mut output).await?;\n\n assert_eq!(output.len(), 4);\n\n let contents = String::from_utf8(output).expect(\"conversion\");\n\n assert_eq!(contents, \"test\");\n", "file_path": "src/fs/async_file.rs", "rank": 8, "score": 26359.09786029337 }, { "content": "\n\n Ok(())\n\n }\n\n\n\n #[test_async]\n\n async fn async_file_write_append_same() -> Result<(), IoError> {\n\n let test_file_path = temp_dir().join(\"read_append_test\");\n\n ensure_clean_file(&test_file_path);\n\n\n\n let mut output = Vec::new();\n\n let mut file = file_util::open_read_append(&test_file_path).await?;\n\n file.write_all(b\"test\").await?;\n\n file.seek(SeekFrom::Start(0)).await?;\n\n file.write_all(b\"xyz\").await?;\n\n file.seek(SeekFrom::Start(0)).await?;\n\n file.read_to_end(&mut output).await?;\n\n assert_eq!(output.len(), 7);\n\n let contents = String::from_utf8(output).expect(\"conversion\");\n\n assert_eq!(contents, \"testxyz\");\n\n\n", "file_path": "src/fs/async_file.rs", "rank": 9, "score": 26359.087104392594 }, { "content": " fn raw_slice(&self, position: u64, len: u64) -> AsyncFileSlice {\n\n AsyncFileSlice::new(self.as_raw_fd(), position, len)\n\n }\n\n\n\n /// Extract slice of file using file descriptor\n\n /// if desired len is not supplied, compute the len from metadata\n\n #[cfg(unix)]\n\n async fn as_slice(\n\n &self,\n\n position: u64,\n\n desired_len_opt: Option<u64>,\n\n ) -> Result<AsyncFileSlice, IoError> {\n\n let metadata = self.metadata().await?;\n\n let len = metadata.len();\n\n if position >= len {\n\n return Err(IoError::new(\n\n ErrorKind::UnexpectedEof,\n\n \"position is greater than available len\",\n\n ));\n\n }\n", "file_path": "src/fs/async_file.rs", "rank": 10, "score": 26358.425522282643 }, { "content": "\n\n #[test_async]\n\n async fn file_multiple_overwrite() -> Result<(), IoError> {\n\n let test_file_path = temp_dir().join(\"file_write_test\");\n\n ensure_clean_file(&test_file_path);\n\n\n\n // write 4 byte string\n\n let mut file = file_util::create(&test_file_path).await?;\n\n file.seek(SeekFrom::Start(0)).await?;\n\n file.write_all(b\"test\").await?;\n\n file.sync_all().await?;\n\n\n\n // go back beginning and overwrite\n\n let mut file = file_util::create(&test_file_path).await?;\n\n file.seek(SeekFrom::Start(0)).await?;\n\n file.write_all(b\"xyzt\").await?;\n\n file.sync_all().await?;\n\n let mut output = Vec::new();\n\n let mut file = file_util::open(&test_file_path).await?;\n\n file.read_to_end(&mut output).await?;\n", "file_path": "src/fs/async_file.rs", "rank": 11, "score": 26357.923480319667 }, { "content": " Ok(())\n\n }\n\n\n\n #[test_async]\n\n async fn test_as_slice() -> Result<(), IoError> {\n\n let file = file_util::open(\"test-data/apirequest.bin\").await?;\n\n let f_slice = file.as_slice(0, None).await?;\n\n assert_eq!(f_slice.position(), 0);\n\n assert_eq!(f_slice.len(), 30);\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/fs/async_file.rs", "rank": 12, "score": 26356.650649349067 }, { "content": "\n\n\n\n // sync seek write and read\n\n // this is used for implementating async version\n\n #[test]\n\n fn test_sync_seek_write() -> Result<(), std::io::Error> {\n\n let mut option = std::fs::OpenOptions::new();\n\n option.read(true).write(true).create(true).append(false);\n\n let mut file = option.open(\"/tmp/x1\")?;\n\n file.seek(SeekFrom::Start(0))?;\n\n file.write_all(b\"test\")?;\n\n // file.write_all(b\"kkk\")?;\n\n file.sync_all()?;\n\n\n\n let mut f2 = File::open(\"/tmp/x1\")?;\n\n let mut contents = String::new();\n\n f2.read_to_string(&mut contents)?;\n\n assert_eq!(contents, \"test\");\n\n Ok(())\n\n }\n", "file_path": "src/fs/async_file.rs", "rank": 13, "score": 26352.602344064308 }, { "content": " let slice_len = if let Some(desired_len) = desired_len_opt {\n\n if position + desired_len >= len {\n\n return Err(IoError::new(\n\n ErrorKind::UnexpectedEof,\n\n \"not available bytes\",\n\n ));\n\n }\n\n desired_len\n\n } else {\n\n len - position\n\n };\n\n\n\n trace!(\"file trace: position: {}, len: {}\", position, len);\n\n\n\n Ok(self.raw_slice(position, slice_len))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "src/fs/async_file.rs", "rank": 14, "score": 26351.768730078504 }, { "content": "use std::io::Error as IoError;\n\nuse std::io::ErrorKind;\n\nuse std::io::SeekFrom;\n\n\n\nuse std::os::unix::io::AsRawFd;\n\n\n\nuse log::trace;\n\nuse async_trait::async_trait;\n\n\n\n\n\n#[cfg(feature = \"asyncstd\")]\n\nuse async_std::io::prelude::SeekExt;\n\n\n\nuse super::AsyncFileSlice;\n\nuse super::File;\n\n\n\n#[async_trait]\n", "file_path": "src/fs/async_file.rs", "rank": 15, "score": 26350.35584707015 }, { "content": "\n\n use std::env::temp_dir;\n\n use std::fs::File;\n\n use std::io::Error as IoError;\n\n use std::io::Write;\n\n use std::io::Seek as _;\n\n use std::io::SeekFrom;\n\n use std::io::Read;\n\n\n\n\n\n #[cfg(feature = \"asyncstd\")]\n\n use async_std::io::prelude::SeekExt;\n\n \n\n use flv_util::fixture::ensure_clean_file;\n\n\n\n use crate::test_async;\n\n use crate::io::AsyncReadExt;\n\n use crate::io::AsyncWriteExt;\n\n use crate::fs::util as file_util;\n\n use super::AsyncFile;\n", "file_path": "src/fs/async_file.rs", "rank": 16, "score": 26350.173868317088 }, { "content": "extern crate proc_macro;\n\n\n\nuse proc_macro::TokenStream;\n\nuse quote::quote;\n\nuse syn::ItemFn;\n\nuse syn::Ident;\n\nuse proc_macro2::Span;\n\n\n\n\n\n#[proc_macro_attribute]\n", "file_path": "async-test-derive/src/lib.rs", "rank": 17, "score": 25033.242199661046 }, { "content": "# Friendly, test runner for async function\n\n\n\nTo run a test, annotate as below:\n\n```\n\n#[test_async]\n\nasync fn test_sum() -> Result<(),std::io::Result> {\n\n assert(true,\"I am alive);\n\n Ok(())\n\n}\n", "file_path": "async-test-derive/README.md", "rank": 18, "score": 16160.543409691838 }, { "content": " &self.path\n\n }\n\n\n\n pub fn inner(&self) -> &File {\n\n &self.writer\n\n }\n\n\n\n pub fn mut_inner(&mut self) -> &mut File {\n\n &mut self.writer\n\n }\n\n\n\n #[allow(unused)]\n\n #[cfg(unix)]\n\n pub fn slice_from(&self, position: u64, len: u64) -> Result<AsyncFileSlice, IoError> {\n\n Ok(self.writer.raw_slice(position, len))\n\n }\n\n}\n\n\n\n\n\nimpl AsyncWrite for BoundedFileSink {\n", "file_path": "src/fs/bounded.rs", "rank": 22, "score": 26.780753564346675 }, { "content": " current_len: u64,\n\n writer: File,\n\n path: PathBuf,\n\n}\n\n\n\nimpl Unpin for BoundedFileSink {}\n\n\n\n\n\nimpl BoundedFileSink {\n\n\n\n unsafe_pinned!(writer: File);\n\n unsafe_unpinned!(current_len: u64);\n\n\n\n #[allow(unused)]\n\n pub async fn create<P>(path: P, option: BoundedFileOption) -> Result<Self, io::Error>\n\n where\n\n P: AsRef<Path>,\n\n {\n\n let inner_path = path.as_ref();\n\n let writer = file_util::create(inner_path).await?;\n", "file_path": "src/fs/bounded.rs", "rank": 23, "score": 26.596652751647564 }, { "content": " Self {\n\n inner: inner.clone(),\n\n buf,\n\n }\n\n }\n\n}\n\n\n\nimpl Unpin for SharedAsyncFileRead<'_> {}\n\n\n\nimpl Future for SharedAsyncFileRead<'_> {\n\n type Output = Result<usize, IoError>;\n\n\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {\n\n trace!(\"reading bytes\");\n\n let this = &mut *self;\n\n let mut inner = this.inner.write().unwrap();\n\n inner.poll_read(cx, this.buf)\n\n }\n\n}\n\n\n", "file_path": "src/fs/shared_file.rs", "rank": 25, "score": 24.224342816775266 }, { "content": " Ok(Self {\n\n writer,\n\n path: inner_path.to_owned(),\n\n current_len: 0,\n\n option,\n\n })\n\n }\n\n\n\n #[allow(unused)]\n\n pub async fn open_write<P>(path: P, option: BoundedFileOption) -> Result<Self, io::Error>\n\n where\n\n P: AsRef<Path>,\n\n {\n\n let file_path = path.as_ref();\n\n let writer = file_util::open(file_path).await?;\n\n let metadata = metadata(file_path).await?;\n\n let len = metadata.len();\n\n\n\n Ok(Self {\n\n writer,\n", "file_path": "src/fs/bounded.rs", "rank": 27, "score": 23.17562665026504 }, { "content": "\n\n pub fn seek(&self, pos: SeekFrom) -> SharedSeekFuture {\n\n SharedSeekFuture::new(self.inner.clone(), pos)\n\n }\n\n}\n\n\n\nimpl AsyncRead for SharedAsyncFile {\n\n fn poll_read(\n\n self: Pin<&mut Self>,\n\n cx: &mut Context,\n\n buf: &mut [u8],\n\n ) -> Poll<Result<usize, IoError>> {\n\n self.inner.write().unwrap().poll_read(cx, buf)\n\n }\n\n}\n\n\n\nimpl From<AsyncFile> for SharedAsyncFile {\n\n fn from(file: AsyncFile) -> Self {\n\n SharedAsyncFile::new(file)\n\n }\n", "file_path": "src/fs/shared_file.rs", "rank": 28, "score": 21.521374322588706 }, { "content": " let this = &mut *self;\n\n let mut inner = this.inner.write().unwrap();\n\n let ft = inner.seek(this.seek.clone());\n\n pin_mut!(ft);\n\n ft.poll(cx)\n\n }\n\n}\n\n\n\n/// Based on Futures Read struct\n\n/// Read future on shared file\n\n/// Only allow one read at time using lock\n\npub struct SharedAsyncFileRead<'a> {\n\n inner: InnerFile,\n\n buf: &'a mut [u8],\n\n}\n\n\n\nimpl<'a> SharedAsyncFileRead<'a> {\n\n unsafe_unpinned!(buf: &'a mut [u8]);\n\n\n\n fn new<'b: 'a>(inner: InnerFile, buf: &'b mut [u8]) -> Self {\n", "file_path": "src/fs/shared_file.rs", "rank": 30, "score": 20.730887314958856 }, { "content": "}\n\n\n\npub struct SharedSeekFuture {\n\n inner: InnerFile,\n\n seek: SeekFrom,\n\n}\n\n\n\nimpl Unpin for SharedSeekFuture {}\n\n\n\nimpl SharedSeekFuture {\n\n fn new(file: InnerFile, seek: SeekFrom) -> Self {\n\n SharedSeekFuture { inner: file, seek }\n\n }\n\n}\n\n\n\nimpl Future for SharedSeekFuture {\n\n type Output = Result<u64, IoError>;\n\n\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<Self::Output> {\n\n trace!(\"reading bytes\");\n", "file_path": "src/fs/shared_file.rs", "rank": 31, "score": 20.380634422176016 }, { "content": " path: file_path.to_owned(),\n\n current_len: len,\n\n option,\n\n })\n\n }\n\n\n\n #[allow(unused)]\n\n pub async fn open_append<P>(path: P, option: BoundedFileOption) -> Result<Self, io::Error>\n\n where\n\n P: AsRef<Path>,\n\n {\n\n let file_path = path.as_ref();\n\n let writer = file_util::open_read_append(file_path).await?;\n\n let metadata = metadata(file_path).await?;\n\n let len = metadata.len();\n\n\n\n Ok(Self {\n\n writer,\n\n path: file_path.to_owned(),\n\n current_len: len,\n", "file_path": "src/fs/bounded.rs", "rank": 32, "score": 19.891144220758438 }, { "content": "\n\nuse std::os::unix::io::AsRawFd;\n\nuse std::os::unix::io::RawFd;\n\n\n\n\n\n\n\n/// Slice of the file\n\n/// This works only on raw fd\n\n#[derive(Default,Debug,Clone)]\n\npub struct AsyncFileSlice {\n\n fd: RawFd,\n\n position: u64,\n\n len: u64\n\n}\n\n\n\nimpl AsyncFileSlice {\n\n\n\n pub fn new(fd: RawFd,position: u64,len: u64) -> Self {\n\n Self {\n\n fd,\n", "file_path": "src/fs/file_slice.rs", "rank": 34, "score": 19.02598492929743 }, { "content": "\n\n pub fn new() -> Self {\n\n Self(ClientConfig::new())\n\n }\n\n\n\n pub fn load_ca_cert<P: AsRef<Path>>(mut self,path: P) -> Result<Self,IoError> {\n\n\n\n self.0.root_store\n\n .add_pem_file(&mut BufReader::new(File::open(path)?))\n\n .map_err(|_| IoError::new(ErrorKind::InvalidInput, \"invalid ca crt\"))?;\n\n\n\n Ok(self)\n\n }\n\n\n\n pub fn load_ca_cert_from_bytes(mut self,buffer: &Vec<u8>) -> Result<Self, IoError> {\n\n\n\n let mut bytes = Cursor::new(buffer);\n\n self.0.root_store\n\n .add_pem_file(&mut bytes)\n\n .map_err(|_| IoError::new(ErrorKind::InvalidInput, \"invalid ca crt\"))?;\n", "file_path": "src/net/tls.rs", "rank": 35, "score": 18.141212481485113 }, { "content": "\n\n impl Future for TestFuture {\n\n\n\n type Output = u16;\n\n\n\n fn poll(self: Pin<&mut Self>, _cx: &mut Context) -> Poll<Self::Output> {\n\n Poll::Ready(2)\n\n }\n\n\n\n }\n\n \n\n\n\n #[test_async]\n\n async fn test_future() -> Result<(),Error> {\n\n\n\n let t = TestFuture{};\n\n let v: u16 = t.await;\n\n assert_eq!(v,2);\n\n Ok(())\n\n }\n", "file_path": "src/test_util.rs", "rank": 36, "score": 18.030024336229328 }, { "content": "\n\n fn poll_write(\n\n mut self: Pin<&mut Self>,\n\n cx: &mut Context<'_>,\n\n buf: &[u8],\n\n ) -> Poll<io::Result<usize>> {\n\n\n\n match self.as_mut().writer().poll_write(cx,buf) {\n\n Poll::Pending => Poll::Pending,\n\n Poll::Ready(result) => {\n\n match result {\n\n Ok(size) => {\n\n let current_len = self.as_ref().current_len + size as u64;\n\n *(self.as_mut().current_len()) = current_len;\n\n trace!(\"success write: {}, current len: {}\",size,self.as_ref().current_len);\n\n Poll::Ready(Ok(size))\n\n },\n\n Err(err) => Poll::Ready(Err(err))\n\n } \n\n }\n", "file_path": "src/fs/bounded.rs", "rank": 37, "score": 17.745513955014253 }, { "content": "\n\nuse futures::Stream;\n\n\n\nuse crate::fs::AsyncFile;\n\n\n\npub struct AsyncFileStream {\n\n file: AsyncFile\n\n}\n\n\n\nimpl AsyncFileStream {\n\n pub fn new(file: AsyncFile) -> Self {\n\n Self {\n\n file\n\n }\n\n }\n\n}\n\n\n\n\n\nimpl Stream for AsyncFileStream {\n\n\n\n fn poll_next(self: Pin<&mut Self>, lw: &Waker) -> Poll<Option<Self::Item>> {\n\n \n\n }\n\n\n\n}", "file_path": "src/fs/stream.rs", "rank": 38, "score": 17.561131071769314 }, { "content": "use crate::fs::AsyncFile;\n\n#[cfg(unix)]\n\nuse crate::fs::AsyncFileSlice;\n\nuse crate::fs::util as file_util;\n\nuse crate::io::AsyncWrite;\n\n\n\n\n\n#[derive(Debug)]\n\npub enum BoundedFileSinkError {\n\n IoError(io::Error),\n\n MaxLenReached, // exceed max limit\n\n}\n\n\n\nimpl fmt::Display for BoundedFileSinkError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match self {\n\n Self::IoError(err) => write!(f, \"{}\", err),\n\n Self::MaxLenReached => write!(f, \"max len reached\"),\n\n }\n\n }\n", "file_path": "src/fs/bounded.rs", "rank": 39, "score": 17.544599938797898 }, { "content": " f_sink.mut_inner().seek(SeekFrom::Start(0)).await.expect(\"reset to beginning\");\n\n // now read back\n\n let mut read_buf: Vec<u8> = vec![];\n\n f_sink.mut_inner().read_to_end(&mut read_buf).await.expect(\"read\");\n\n assert_eq!(read_buf.len(),147);\n\n \n\n Ok(())\n\n }\n\n\n\n\n\n\n\n mod inner {\n\n\n\n use std::io::Error as IoError;\n\n use std::io::Write;\n\n\n\n use crate::test_async;\n\n \n\n use super::temp_dir;\n\n use super::ensure_clean_file;\n", "file_path": "src/fs/bounded.rs", "rank": 40, "score": 17.295596271440733 }, { "content": "pub use async_std::fs::*;\n\n\n\n#[cfg(feature = \"tokio2\")]\n\npub use tokio::fs::*;\n\n\n\n\n\npub mod util {\n\n\n\n use std::io::Error as IoError;\n\n use std::path::Path;\n\n\n\n use super::File;\n\n use super::OpenOptions;\n\n\n\n pub async fn create_dir_all<P: AsRef<Path>>(path: P) -> Result<(), IoError> {\n\n\n\n super::create_dir_all(path.as_ref()).await\n\n }\n\n\n\n\n", "file_path": "src/fs/mod.rs", "rank": 42, "score": 16.941388074485328 }, { "content": " let (res, len) = sendfile(\n\n source_fd,\n\n target_fd,\n\n current_offset as i64,\n\n Some(to_be_transfer),\n\n None,\n\n None,\n\n );\n\n\n\n log::trace!(\"mac zero copy bytes transferred: {}\", len);\n\n total_transferred += len as u64;\n\n current_offset += len as u64;\n\n match res {\n\n Ok(_) => {\n\n if total_transferred < size {\n\n log::debug!(\"current transferred: {} less than total: {}, continuing\",total_transferred,size);\n\n } else {\n\n return Ok(len as usize)\n\n }\n\n \n", "file_path": "src/zero_copy.rs", "rank": 43, "score": 16.593069691837417 }, { "content": " #[test_async]\n\n async fn test_sink_file_write_multiple_path() -> Result<(), BoundedFileSinkError> {\n\n let test_file = temp_dir().join(TEST_FILE_NAME2);\n\n ensure_clean_file(&test_file);\n\n\n\n let mut f_sink = BoundedFileSink::create(&test_file, BoundedFileOption::default()).await.expect(\"create\");\n\n\n\n let bytes = vec![0x1; 1000];\n\n f_sink.write_all(&bytes).await.expect(\"first write\");\n\n f_sink.write_all(&bytes).await.expect(\"second write\");\n\n\n\n assert_eq!(f_sink.get_current_len(),2000);\n\n f_sink.flush().await.expect(\"flush\");\n\n\n\n let test_file = temp_dir().join(TEST_FILE_NAME2);\n\n let mut f = StdFile::open(test_file).expect(\"test file should exists\");\n\n let mut buffer = vec![0; 2000];\n\n let len = f.read(&mut buffer)?;\n\n assert_eq!(len, 2000);\n\n Ok(())\n", "file_path": "src/fs/bounded.rs", "rank": 44, "score": 16.546964883970613 }, { "content": " \n\n loop {\n\n\n\n let to_be_transfer = size as usize - total_transferred;\n\n\n\n log::trace!(\n\n \"trying: zero copy source fd: {} offset: {} len: {}, target: fd{}\",\n\n source_fd,\n\n current_offset,\n\n to_be_transfer,\n\n target_fd\n\n );\n\n \n\n match sendfile(target_fd, source_fd, Some(&mut current_offset), to_be_transfer) {\n\n Ok(len) => {\n\n \n\n total_transferred += len as usize;\n\n current_offset += len as off_t;\n\n log::trace!(\"actual: zero copy bytes transferred: {} out of {}\", len, size);\n\n \n", "file_path": "src/zero_copy.rs", "rank": 46, "score": 15.939575376215982 }, { "content": "\n\n // read file and zero copy to tcp stream\n\n\n\n let _rt = join(client, server).await;\n\n Ok(())\n\n }\n\n\n\n\n\n #[test_async]\n\n async fn test_zero_copy_large_size() -> Result<(), SendFileError> {\n\n\n\n const MAX_BYTES: usize = 300000;\n\n const PORT: u16 = 8888;\n\n\n\n use std::env::temp_dir;\n\n use crate::io::AsyncWriteExt;\n\n\n\n const TEST_ITERATION: u16 = 20;\n\n\n\n async fn init_file() {\n", "file_path": "src/zero_copy.rs", "rank": 47, "score": 15.867206236758825 }, { "content": "\n\n use futures::io::{AsyncRead, AsyncWrite};\n\n\n\n\n\n\n\n impl AsyncRead for AllTcpStream {\n\n\n\n #[project] \n\n fn poll_read(\n\n self: Pin<&mut Self>,\n\n cx: &mut Context<'_>,\n\n buf: &mut [u8],\n\n ) -> Poll<io::Result<usize>> {\n\n\n\n #[project]\n\n match self.project() {\n\n AllTcpStream::Tcp(stream) => stream.poll_read(cx,buf),\n\n AllTcpStream::Tls(stream) => stream.poll_read(cx,buf)\n\n }\n\n\n", "file_path": "src/net/tls.rs", "rank": 48, "score": 15.67365468356645 }, { "content": " spawn_blocking(move || {\n\n \n\n use nix::errno::Errno;\n\n\n\n let mut total_transferred = 0;\n\n let mut current_offset = offset as u64;\n\n\n\n loop {\n\n\n\n let to_be_transfer = (size - total_transferred) as i64;\n\n\n\n\n\n log::trace!(\n\n \"mac zero copy source fd: {} offset: {} len: {}, target: fd{}\",\n\n source_fd,\n\n current_offset,\n\n to_be_transfer,\n\n target_fd\n\n );\n\n\n", "file_path": "src/zero_copy.rs", "rank": 49, "score": 15.630097128728632 }, { "content": " debug!(\"client: got connection. waiting\");\n\n if let Some(value) = framed.next().await {\n\n debug!(\"client :received first value from server\");\n\n let bytes = value?;\n\n debug!(\"client :received bytes len: {}\",bytes.len());\n\n assert_eq!(bytes.len(),3);\n\n let values = bytes.take(3).into_inner();\n\n assert_eq!(values[0],0x05);\n\n assert_eq!(values[1],0x0a);\n\n assert_eq!(values[2],0x63);\n\n } else {\n\n assert!(false,\"no value received\");\n\n }\n\n\n\n\n\n \n\n Ok(()) as Result<(), Error>\n\n };\n\n\n\n\n\n let _rt = join(client_ft,server_ft).await;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/net/tcp_stream.rs", "rank": 50, "score": 15.464113459724382 }, { "content": " position,\n\n len\n\n }\n\n }\n\n\n\n pub fn position(&self) -> u64 {\n\n self.position\n\n }\n\n\n\n pub fn len(&self) -> u64 {\n\n self.len\n\n }\n\n\n\n pub fn fd(&self) -> RawFd {\n\n self.fd\n\n }\n\n\n\n\n\n}\n\n\n\n\n\nimpl AsRawFd for AsyncFileSlice {\n\n\n\n fn as_raw_fd(&self) -> RawFd {\n\n self.fd\n\n } \n\n}\n\n\n", "file_path": "src/fs/file_slice.rs", "rank": 51, "score": 15.44523019242003 }, { "content": "\n\n\n\n fn test_poll(_cx: &mut Context) -> Poll<u16> {\n\n Poll::Ready(4)\n\n }\n\n\n\n #[test_async]\n\n async fn test_future_with_poll() -> Result<(),Error> {\n\n\n\n assert_eq!(poll_fn(test_poll).await,4);\n\n Ok(())\n\n }\n\n\n\n\n\n\n\n\n\n}", "file_path": "src/test_util.rs", "rank": 52, "score": 15.289271711182307 }, { "content": " }\n\n\n\n if let Some(value) = framed.next().await {\n\n debug!(\"client: received 2nd value from server\");\n\n let bytes = value.expect(\"packet decoding works\");\n\n let values = bytes.take(2).into_inner();\n\n assert_eq!(values.len(),2);\n\n\n\n } else {\n\n assert!(false,\"no value received\");\n\n }\n\n\n\n \n\n Ok(()) as Result<(), IoError>\n\n };\n\n\n\n\n\n let _rt = join(client_ft,server_ft).await;\n\n\n\n Ok(())\n\n }\n\n}", "file_path": "src/net/tls.rs", "rank": 53, "score": 15.277640696149263 }, { "content": "\n\npub mod bytes {\n\n pub use bytes::Bytes;\n\n pub use bytes::BytesMut;\n\n pub use bytes::BufMut;\n\n}\n\n\n\n\n\npub mod util {\n\n pub use flv_util::*;\n\n}\n\n\n", "file_path": "src/lib.rs", "rank": 54, "score": 15.22079488761469 }, { "content": " let len = tcp_stream.read(&mut buf).await?;\n\n assert_eq!(len, 30);\n\n } else {\n\n assert!(false, \"client should connect\");\n\n }\n\n Ok(()) as Result<(), SendFileError>\n\n };\n\n\n\n let client = async {\n\n let file = file_util::open(\"test-data/apirequest.bin\").await?;\n\n sleep(time::Duration::from_millis(100)).await;\n\n let addr = \"127.0.0.1:9999\".parse::<SocketAddr>().expect(\"parse\");\n\n debug!(\"client: file loaded\");\n\n let mut stream = TcpStream::connect(&addr).await?;\n\n debug!(\"client: connected to server\");\n\n let f_slice = file.as_slice(0, None).await?;\n\n debug!(\"client: send back file using zero copy\");\n\n stream.zero_copy_write(&f_slice).await?;\n\n Ok(()) as Result<(), SendFileError>\n\n };\n", "file_path": "src/zero_copy.rs", "rank": 55, "score": 15.15284749838637 }, { "content": " /// open for write only\n\n pub async fn create<P>(path: P) -> Result<File, IoError>\n\n where\n\n P: AsRef<Path>,\n\n {\n\n File::create(path.as_ref()).await\n\n }\n\n\n\n /// open for only read\n\n pub async fn open<P>(path: P) -> Result<File, IoError>\n\n where\n\n P: AsRef<Path>,\n\n {\n\n let file_path = path.as_ref();\n\n File::open(file_path).await\n\n }\n\n\n\n /// open for read and write\n\n pub async fn open_read_write<P>(path: P) -> Result<File, IoError>\n\n where\n", "file_path": "src/fs/mod.rs", "rank": 56, "score": 15.12621486425325 }, { "content": " /// create builder with client authentication\n\n /// must pass CA root\n\n pub fn new_client_authenticate<P: AsRef<Path>>(path: P) -> Result<Self,IoError> {\n\n\n\n let root_store = load_root_ca(path)?;\n\n \n\n Ok(Self(ServerConfig::new(AllowAnyAuthenticatedClient::new(root_store))))\n\n }\n\n\n\n pub fn load_server_certs<P: AsRef<Path>>(\n\n mut self,\n\n cert_path: P,\n\n key_path: P,\n\n ) -> Result<Self,IoError> {\n\n\n\n\n\n let server_crt = load_certs(cert_path)?;\n\n let mut server_keys = load_keys(key_path)?;\n\n self.0\n\n .set_single_cert(server_crt,server_keys.remove(0))\n", "file_path": "src/net/tls.rs", "rank": 57, "score": 14.957095687740946 }, { "content": " let mut file = AsyncFile::create(&test_file_path).await?;\n\n file.write_all(b\"test\").await?;\n\n\n\n let mut buffer = [0; 4];\n\n let read_file = AsyncFile::open(&test_file_path).await?;\n\n let shared_file = SharedAsyncFile::new(read_file);\n\n let read_len = shared_file.read(&mut buffer).await?;\n\n assert_eq!(read_len, 4);\n\n let contents = String::from_utf8(buffer.to_vec()).expect(\"conversion\");\n\n assert_eq!(contents, \"test\");\n\n\n\n let mut output = Vec::new();\n\n let mut file2 = shared_file.clone();\n\n file2.seek(SeekFrom::Start(0)).await?;\n\n file2.read_to_end(&mut output).await?;\n\n let contents = String::from_utf8(output).expect(\"conversion\");\n\n assert_eq!(contents, \"test\");\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/fs/shared_file.rs", "rank": 58, "score": 14.915372782445274 }, { "content": " let temp_file = temp_dir().join(\"async_large\");\n\n // do zero copy\n\n let file = file_util::open(&temp_file).await.expect(\"re opening\");\n\n let f_slice = file.as_slice(0, None).await.expect(\"filed opening\");\n\n assert_eq!(f_slice.len(),MAX_BYTES as u64);\n\n\n\n let listener = TcpListener::bind(format!(\"127.0.0.1:{}\",PORT)).await.expect(\"failed bind\");\n\n \n\n debug!(\"server: listening\");\n\n let mut incoming = listener.incoming();\n\n\n\n for i in 0..TEST_ITERATION {\n\n\n\n if let Some(stream) = incoming.next().await {\n\n debug!(\"server {} got connection. waiting\",i);\n\n let mut tcp_stream = stream?;\n\n \n\n debug!(\"server {}, send back file using zero copy: {:#?}\",i,f_slice.len());\n\n tcp_stream.zero_copy_write(&f_slice).await.expect(\"file slice\");\n\n\n", "file_path": "src/zero_copy.rs", "rank": 60, "score": 14.84953406565959 }, { "content": " }\n\n\n\n /// example of async test\n\n #[test_async]\n\n async fn test_sink_file_max_reached() -> Result<(), BoundedFileSinkError> {\n\n let test_file = temp_dir().join(MAX_TEST_FILE_NAME);\n\n ensure_clean_file(&test_file);\n\n\n\n let option = BoundedFileOption { max_len: Some(10) };\n\n\n\n let mut f_sink = BoundedFileSink::create(&test_file, option).await.expect(\"file created\");\n\n\n\n let bytes = vec![0x01; 8];\n\n f_sink.write_all(&bytes).await.expect(\"first write\");\n\n assert_eq!(f_sink.get_current_len(), 8);\n\n assert!(!f_sink.can_be_appended(20));\n\n Ok(())\n\n }\n\n\n\n\n", "file_path": "src/fs/bounded.rs", "rank": 62, "score": 14.576840818334027 }, { "content": "pub mod broadcast;\n\n\n\n#[cfg(feature = \"asyncstd\")]\n\nmod inner_sync {\n\n pub use async_std::sync::Arc;\n\n pub use async_std::sync::Barrier;\n\n pub use async_std::sync::BarrierWaitResult;\n\n pub use async_std::sync::Mutex;\n\n pub use async_std::sync::MutexGuard;\n\n pub use async_std::sync::RwLock;\n\n pub use async_std::sync::RwLockReadGuard;\n\n pub use async_std::sync::RwLockWriteGuard;\n\n pub use async_std::sync::Weak;\n\n}\n\n#[cfg(feature = \"asyncstd\")]\n\npub use inner_sync::*;\n\n\n\npub mod mpsc {\n\n\n\n #[cfg(feature = \"asyncstd\")]\n", "file_path": "src/sync/mod.rs", "rank": 63, "score": 14.546248848256578 }, { "content": "\n\n Ok(self)\n\n\n\n }\n\n\n\n pub fn load_client_certs<P: AsRef<Path>>(\n\n mut self,\n\n cert_path: P,\n\n key_path: P,\n\n ) -> Result<Self,IoError> {\n\n\n\n\n\n let client_certs = load_certs(cert_path)?;\n\n let mut client_keys = load_keys(key_path)?;\n\n self.0\n\n .set_single_client_cert(client_certs,client_keys.remove(0))\n\n .map_err(|_| IoError::new(ErrorKind::InvalidInput, \"invalid cert\"))?;\n\n \n\n Ok(self)\n\n }\n", "file_path": "src/net/tls.rs", "rank": 64, "score": 14.491155161687388 }, { "content": "\n\n const CA_PATH: &'static str = \"certs/certs/ca.crt\";\n\n\n\n fn to_bytes(bytes: Vec<u8>) -> Bytes {\n\n let mut buf = BytesMut::with_capacity(bytes.len());\n\n buf.put_slice(&bytes);\n\n buf.freeze()\n\n }\n\n\n\n\n\n #[test_async]\n\n async fn test_async_tls() -> Result<(), IoError> {\n\n\n\n \n\n test_tls(\n\n AcceptorBuilder::new_no_client_authentication()\n\n .load_server_certs(\"certs/certs/server.crt\",\"certs/certs/server.key\")?\n\n .build(),\n\n ConnectorBuilder::new()\n\n .no_cert_verification()\n", "file_path": "src/net/tls.rs", "rank": 65, "score": 14.279447971437687 }, { "content": "pub use inner::*;\n\n\n\n\n\n#[cfg(feature = \"asyncstd\")]\n\nmod inner {\n\n\n\n use std::time::Duration;\n\n\n\n use futures_timer::Delay;\n\n\n\n pub async fn sleep(duration: Duration) {\n\n let delay = Delay::new(duration);\n\n delay.await;\n\n }\n\n}\n\n\n\n#[cfg(feature = \"tokio2\")]\n\nmod inner {\n\n\n\n use std::time::Duration;\n", "file_path": "src/timer.rs", "rank": 66, "score": 14.231807965468676 }, { "content": "// Pinning is never projected to fields\n\nimpl<W: ?Sized + Unpin, B> Unpin for WriteBufAll<'_, W, B> {}\n\n\n\nimpl<'a, W: AsyncWrite + ?Sized + Unpin, B> WriteBufAll<'a, W, B> {\n\n pub(super) fn new(writer: &'a mut W, buf: B) -> Self {\n\n WriteBufAll { writer, buf }\n\n }\n\n}\n\n\n\nimpl<W: AsyncWrite + ?Sized + Unpin, B> Future for WriteBufAll<'_, W, B>\n\nwhere\n\n B: AsRef<[u8]>,\n\n{\n\n type Output = io::Result<()>;\n\n\n\n fn poll(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<io::Result<()>> {\n\n let this = &mut *self;\n\n let mut buf = this.buf.as_ref();\n\n while !buf.is_empty() {\n\n let n = ready!(Pin::new(&mut this.writer).poll_write(cx, buf))?;\n", "file_path": "src/write.rs", "rank": 67, "score": 14.080700616892429 }, { "content": "\n\n #[async_trait]\n\n impl TcpDomainConnector for TlsAnonymousConnector {\n\n\n\n type WrapperStream = DefaultClientTlsStream;\n\n\n\n async fn connect(&self,domain: &str) -> Result<(Self::WrapperStream,RawFd),IoError> {\n\n let tcp_stream = TcpStream::connect(domain).await?;\n\n let fd = tcp_stream.as_raw_fd();\n\n Ok((self.0.connect(domain, tcp_stream).await?,fd))\n\n }\n\n }\n\n\n\n\n\n\n\n #[derive(Clone)]\n\n pub struct TlsDomainConnector {\n\n domain: String,\n\n connector: TlsConnector\n\n }\n", "file_path": "src/net/tls.rs", "rank": 68, "score": 13.881885762231851 }, { "content": " debug!(\"server: got connection from client\");\n\n let tcp_stream = stream?;\n\n let mut framed = Framed::new(tcp_stream.compat(),BytesCodec::new());\n\n debug!(\"server: sending values to client\");\n\n let data = vec![0x05, 0x0a, 0x63];\n\n framed.send(to_bytes(data)).await?;\n\n return Ok(()) as Result<(),Error>\n\n\n\n }\n\n \n\n Ok(()) as Result<(), Error>\n\n };\n\n\n\n let client_ft = async {\n\n \n\n debug!(\"client: sleep to give server chance to come up\");\n\n sleep(time::Duration::from_millis(100)).await;\n\n debug!(\"client: trying to connect\");\n\n let tcp_stream = TcpStream::connect(&addr).await?;\n\n let mut framed = Framed::new(tcp_stream.compat(),BytesCodec::new());\n", "file_path": "src/net/tcp_stream.rs", "rank": 70, "score": 13.85548422023783 }, { "content": " P: AsRef<Path>,\n\n {\n\n let file_path = path.as_ref();\n\n let mut option = OpenOptions::new();\n\n option.read(true).write(true).create(true).append(false);\n\n\n\n option.open(file_path).await\n\n }\n\n\n\n pub async fn open_read_append<P>(path: P) -> Result<File, IoError>\n\n where\n\n P: AsRef<Path>,\n\n {\n\n let file_path = path.as_ref();\n\n let mut option = OpenOptions::new();\n\n option.read(true).create(true).append(true);\n\n\n\n option.open(file_path).await\n\n }\n\n}\n\n\n\n\n\n\n\n\n\n\n", "file_path": "src/fs/mod.rs", "rank": 71, "score": 13.838633205424069 }, { "content": "}\n\n\n\nimpl From<io::Error> for BoundedFileSinkError {\n\n fn from(error: io::Error) -> Self {\n\n BoundedFileSinkError::IoError(error)\n\n }\n\n}\n\n\n\n#[derive(Default)]\n\npub struct BoundedFileOption {\n\n pub max_len: Option<u64>,\n\n}\n\n\n\n\n\n\n\n/// File Sink that tracks how much byte it has written\n\n/// This file will not block write operation. It is up to writer to check if maximum file has size is reached\n\n/// since AsyncWrite return IoError \n\npub struct BoundedFileSink {\n\n option: BoundedFileOption,\n", "file_path": "src/fs/bounded.rs", "rank": 72, "score": 13.782656829153904 }, { "content": " option,\n\n })\n\n }\n\n\n\n #[allow(unused)]\n\n pub fn get_current_len(&self) -> u64 {\n\n self.current_len\n\n }\n\n\n\n /// check if buf_len can be written\n\n pub fn can_be_appended(&self,buf_len: u64) -> bool {\n\n match self.option.max_len {\n\n Some(max_len) => self.current_len + buf_len <= max_len,\n\n None => true\n\n }\n\n \n\n }\n\n\n\n\n\n pub fn get_path(&self) -> &Path {\n", "file_path": "src/fs/bounded.rs", "rank": 73, "score": 13.661208444473015 }, { "content": " impl TcpDomainConnector for AllDomainConnector {\n\n\n\n type WrapperStream = AllTcpStream;\n\n\n\n async fn connect(&self,domain: &str) -> Result<(Self::WrapperStream,RawFd),IoError> { \n\n\n\n match self {\n\n Self::Tcp(connector) => { \n\n let (stream,fd) = connector.connect(domain).await?;\n\n Ok((AllTcpStream::tcp(stream),fd))\n\n },\n\n \n\n Self::TlsDomain(connector) => {\n\n let (stream,fd) = connector.connect(domain).await?;\n\n Ok((AllTcpStream::tls(stream),fd))\n\n },\n\n Self::TlsAnonymous(connector) => {\n\n let (stream,fd) = connector.connect(domain).await?;\n\n Ok((AllTcpStream::tls(stream),fd))\n\n }\n", "file_path": "src/net/tls.rs", "rank": 74, "score": 13.629216283644185 }, { "content": " use std::pin::Pin;\n\n use std::task::Context;\n\n use std::task::Poll;\n\n\n\n use futures::Future;\n\n use futures::future::poll_fn;\n\n \n\n\n\n use crate::test_async;\n\n \n\n \n\n // actual test run\n\n \n\n #[test_async]\n\n async fn async_derive_test() -> Result<(),Error> {\n\n assert!(true,\"I am live\");\n\n Ok(())\n\n }\n\n \n\n \n", "file_path": "src/test_util.rs", "rank": 75, "score": 13.434348979805979 }, { "content": " use crate::fs::AsyncFile;\n\n use crate::timer::sleep;\n\n use crate::test_async;\n\n\n\n use super::SendFileError;\n\n\n\n #[test_async]\n\n async fn test_zero_copy_from_fs_to_socket() -> Result<(), SendFileError> {\n\n // spawn tcp client and check contents\n\n let server = async {\n\n\n\n #[allow(unused_mut)]\n\n let mut listener = TcpListener::bind(\"127.0.0.1:9999\").await?;\n\n \n\n debug!(\"server: listening\");\n\n let mut incoming = listener.incoming();\n\n if let Some(stream) = incoming.next().await {\n\n debug!(\"server: got connection. waiting\");\n\n let mut tcp_stream = stream?;\n\n let mut buf = [0; 30];\n", "file_path": "src/zero_copy.rs", "rank": 76, "score": 13.352484304628717 }, { "content": "\n\n\n\n#[cfg(feature = \"asyncstd\")]\n\npub use inner::*;\n\n\n\n#[cfg(feature = \"asyncstd\")]\n\nmod inner {\n\n pub use async_std::io::*;\n\n pub use async_std::prelude::*;\n\n pub use futures::io::AsyncWrite;\n\n pub use futures::io::AsyncReadExt;\n\n pub use futures::io::AsyncWriteExt;\n\n pub use futures::io::AsyncSeekExt;\n\n pub use futures::io::AsyncBufReadExt;\n\n}\n\n\n\n\n\n#[cfg(feature = \"asyncstd\")] \n\nmod async_prelude {\n\n pub use async_std::io::prelude::SeekExt as AsyncSeekExt;\n\n}\n\n\n\n#[cfg(feature = \"tokio2\")]\n\npub use tokio::io::*;\n\n\n", "file_path": "src/io/mod.rs", "rank": 77, "score": 13.312845691224467 }, { "content": "\n\n impl TlsDomainConnector {\n\n pub fn new(connector: TlsConnector,domain: String) -> Self {\n\n Self {\n\n domain,\n\n connector\n\n }\n\n }\n\n }\n\n\n\n #[async_trait]\n\n impl TcpDomainConnector for TlsDomainConnector {\n\n\n\n type WrapperStream = DefaultClientTlsStream;\n\n\n\n async fn connect(&self,addr: &str) -> Result<(Self::WrapperStream,RawFd),IoError> {\n\n debug!(\"connect to tls addr: {}\",addr);\n\n let tcp_stream = TcpStream::connect(addr).await?;\n\n let fd = tcp_stream.as_raw_fd();\n\n\n", "file_path": "src/net/tls.rs", "rank": 78, "score": 13.245939316539305 }, { "content": "\n\n #[test_async]\n\n async fn test_sink_file_write_read() -> Result<(), BoundedFileSinkError> {\n\n\n\n const WRITE_FILE: &str = \"file_test_write_bounded\";\n\n\n\n let test_file = temp_dir().join(WRITE_FILE);\n\n ensure_clean_file(&test_file);\n\n\n\n let mut f_sink = BoundedFileSink::open_append(&test_file, BoundedFileOption::default()).await?;\n\n\n\n let bytes: Vec<u8> = vec![0x01; 73];\n\n f_sink.write_all(&bytes).await.expect(\"send success\");\n\n debug!(\"current len: {}\",f_sink.get_current_len());\n\n let bytes: Vec<u8> = vec![0x01; 74];\n\n f_sink.write_all(&bytes).await.expect(\"send success\");\n\n assert_eq!(f_sink.get_current_len(),147);\n\n\n\n\n\n // check if we read back\n", "file_path": "src/fs/bounded.rs", "rank": 79, "score": 13.19440335186454 }, { "content": " }\n\n\n\n }\n\n\n\n impl AsyncWrite for AllTcpStream {\n\n\n\n #[project] \n\n fn poll_write(\n\n self: Pin<&mut Self>, \n\n cx: &mut Context, \n\n buf: &[u8]\n\n ) -> Poll<Result<usize, io::Error>> {\n\n\n\n #[project]\n\n match self.project() {\n\n AllTcpStream::Tcp(stream) => stream.poll_write(cx,buf),\n\n AllTcpStream::Tls(stream) => stream.poll_write(cx,buf)\n\n }\n\n }\n\n\n", "file_path": "src/net/tls.rs", "rank": 80, "score": 13.086530432812745 }, { "content": " pub use async_std::sync::Receiver;\n\n pub use async_std::sync::Sender;\n\n pub use async_std::sync::channel;\n\n\n\n\n\n #[cfg(feature = \"tokio2\")]\n\n pub use tokio::sync::broadcast::*;\n\n}\n\n\n\n\n\npub use inner::Channel;\n\n\n\nmod inner {\n\n\n\n use super::mpsc::Receiver;\n\n use super::mpsc::Sender;\n\n use super::mpsc::channel;\n\n\n\n /// abstraction for multi sender receiver channel\n\n #[derive(Debug)]\n", "file_path": "src/sync/mod.rs", "rank": 81, "score": 13.013499409015143 }, { "content": " use crate::timer::sleep;\n\n\n\n let guard = self.count.lock().await;\n\n let count = *guard;\n\n drop(guard);\n\n for _ in 0..count {\n\n let mut guard = self.count.lock().await;\n\n *guard = *guard - 1;\n\n sleep(self.delay).await;\n\n }\n\n\n\n }\n\n\n\n }\n\n\n\n\n\n #[test_async]\n\n async fn test_dispatcher() -> Result<(),()> {\n\n\n\n let count = Lock::new(5);\n", "file_path": "src/actor/mod.rs", "rank": 82, "score": 12.807694404217244 }, { "content": "\n\n pub fn load_client_certs_from_bytes(mut self,cert_buf: &Vec<u8>,key_buf: &Vec<u8>) -> Result<Self,IoError> {\n\n\n\n \n\n let client_certs = load_certs_from_reader(&mut Cursor::new(cert_buf))?;\n\n let mut client_keys = load_keys_from_reader(&mut Cursor::new(key_buf))?;\n\n self.0\n\n .set_single_client_cert(client_certs,client_keys.remove(0))\n\n .map_err(|_| IoError::new(ErrorKind::InvalidInput, \"invalid cert\"))?;\n\n \n\n Ok(self)\n\n }\n\n\n\n \n\n pub fn no_cert_verification(mut self) -> Self {\n\n\n\n self.0\n\n .dangerous()\n\n .set_certificate_verifier(Arc::new(NoCertificateVerification {}));\n\n\n", "file_path": "src/net/tls.rs", "rank": 83, "score": 12.730501032920627 }, { "content": "pub use builder::*;\n\n\n\nmod cert {\n\n use std::io::Error as IoError;\n\n use std::io::ErrorKind;\n\n use std::path::Path;\n\n use std::io::BufReader;\n\n use std::io::BufRead;\n\n use std::fs::File;\n\n \n\n use rustls::internal::pemfile::certs;\n\n use rustls::internal::pemfile::rsa_private_keys;\n\n\n\n \n\n use super::Certificate;\n\n use super::PrivateKey;\n\n use super::RootCertStore;\n\n\n\n pub fn load_certs<P: AsRef<Path>>(path: P) -> Result<Vec<Certificate>,IoError> {\n\n load_certs_from_reader(&mut BufReader::new(File::open(path)?))\n", "file_path": "src/net/tls.rs", "rank": 85, "score": 12.358546480342307 }, { "content": "\n\n debug!(\"server: handshaking\");\n\n let tls_stream = handshake.await.expect(\"hand shake failed\");\n\n \n\n // handle connection\n\n let mut framed = Framed::new(tls_stream,BytesCodec{});\n\n debug!(\"server: sending values to client\");\n\n let data = vec![0x05, 0x0a, 0x63];\n\n framed.send(to_bytes(data)).await.expect(\"send failed\");\n\n sleep(time::Duration::from_micros(1)).await;\n\n debug!(\"server: sending 2nd value to client\");\n\n let data2 = vec![0x20,0x11]; \n\n framed.send(to_bytes(data2)).await.expect(\"2nd send failed\");\n\n return Ok(()) as Result<(),IoError>\n\n\n\n }\n\n \n\n Ok(()) as Result<(), IoError>\n\n };\n\n\n", "file_path": "src/net/tls.rs", "rank": 86, "score": 12.319773789983543 }, { "content": " let mut buffer = Vec::with_capacity(MAX_BYTES);\n\n stream.read_exact(&mut buffer).await.expect(\"no more buffer\");\n\n debug!(\"client: {} test success\",i);\n\n\n\n // sleep 10 milliseconds between request to keep tcp connection otherwise it may lead to EPIPE error\n\n // \n\n sleep(time::Duration::from_millis(10)).await;\n\n }\n\n\n\n Ok(()) as Result<(), SendFileError>\n\n };\n\n\n\n // read file and zero copy to tcp stream\n\n\n\n let _rt = join(client, server).await;\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/zero_copy.rs", "rank": 87, "score": 12.20095743896584 }, { "content": "\n\n #[test_async]\n\n async fn test_sink_file_write_std() -> Result<(), IoError> {\n\n\n\n use std::fs::File;\n\n\n\n const WRITE_FILE: &str = \"file_test_two_write_std\";\n\n\n\n let test_file = temp_dir().join(WRITE_FILE);\n\n ensure_clean_file(&test_file);\n\n let mut f_sink = File::create(&test_file).expect(\"file created\");\n\n\n\n let bytes: Vec<u8> = vec![0x01; 73];\n\n f_sink.write_all(&bytes).expect(\"send success\");\n\n let bytes: Vec<u8> = vec![0x01; 74];\n\n f_sink.write_all(&bytes).expect(\"send success\");\n\n \n\n let metadata = std::fs::metadata(test_file).expect(\"data file should exists\");\n\n\n\n // even if file is not flushed, file has all the data \n\n assert_eq!(metadata.len(),147);\n\n \n\n Ok(())\n\n }\n\n }\n\n\n\n}\n", "file_path": "src/fs/bounded.rs", "rank": 88, "score": 12.11631550041865 }, { "content": "\n\n use log::debug;\n\n use async_trait::async_trait;\n\n\n\n use super::TlsConnector;\n\n use super::super::TcpDomainConnector;\n\n use super::super::DefaultTcpDomainConnector;\n\n use super::DefaultClientTlsStream;\n\n use super::TcpStream;\n\n use super::AllTcpStream;\n\n\n\n /// connect as anonymous client\n\n #[derive(Clone)]\n\n pub struct TlsAnonymousConnector(TlsConnector);\n\n\n\n impl From<TlsConnector> for TlsAnonymousConnector {\n\n fn from(connector: TlsConnector) -> Self {\n\n Self(connector)\n\n }\n\n }\n", "file_path": "src/net/tls.rs", "rank": 89, "score": 12.082458752010677 }, { "content": "\n\n let bytes = vec![0x01, 0x02, 0x03];\n\n f_sink.write_all(&bytes).await.expect(\"write bytes\");\n\n assert_eq!(f_sink.get_current_len(),3);\n\n\n\n f_sink.flush().await.expect(\"flush\");\n\n \n\n\n\n let test_file = temp_dir().join(TEST_FILE_NAME);\n\n let mut f = StdFile::open(test_file)?;\n\n let mut buffer = vec![0; 3];\n\n f.read(&mut buffer)?;\n\n assert_eq!(buffer[0], 0x01);\n\n assert_eq!(buffer[1], 0x02);\n\n assert_eq!(buffer[2], 0x03);\n\n Ok(())\n\n }\n\n\n\n const TEST_FILE_NAME2: &str = \"file_test_02\";\n\n\n", "file_path": "src/fs/bounded.rs", "rank": 90, "score": 11.964228574304695 }, { "content": "\n\n\n\n\n\nuse std::io::Error;\n\nuse std::net::SocketAddr;\n\nuse std::time;\n\n\n\nuse bytes::BufMut;\n\nuse bytes::Bytes;\n\nuse bytes::BytesMut;\n\nuse bytes::buf::ext::BufExt;\n\nuse futures::sink::SinkExt;\n\nuse futures::stream::StreamExt;\n\nuse futures::future::join;\n\nuse log::debug;\n\nuse tokio_util::codec::BytesCodec;\n\nuse tokio_util::codec::Framed;\n\nuse tokio_util::compat::FuturesAsyncReadCompatExt;\n\n\n\n\n\nuse crate::test_async;\n\nuse crate::timer::sleep;\n\n\n\nuse crate::net::TcpListener;\n\nuse crate::net::TcpStream;\n\n\n\n\n", "file_path": "src/net/tcp_stream.rs", "rank": 91, "score": 11.847231710564186 }, { "content": "mod mmap;\n\n#[cfg(unix)]\n\nmod file_slice;\n\n#[cfg(unix)]\n\nmod async_file;\n\nmod bounded;\n\n\n\n#[cfg(unix)]\n\npub use self::async_file::AsyncFile;\n\n#[cfg(unix)]\n\npub use self::file_slice::AsyncFileSlice;\n\npub use self::bounded::BoundedFileSink;\n\npub use self::bounded::BoundedFileOption;\n\npub use self::bounded::BoundedFileSinkError;\n\n\n\npub use self::mmap::MemoryMappedFile;\n\npub use self::mmap::MemoryMappedMutFile;\n\n\n\n\n\n#[cfg(feature = \"asyncstd\")]\n", "file_path": "src/fs/mod.rs", "rank": 92, "score": 11.69174765291828 }, { "content": " use pin_project::{pin_project, project};\n\n\n\n use super::TcpStream;\n\n use super::DefaultClientTlsStream;\n\n\n\n #[pin_project]\n\n pub enum AllTcpStream {\n\n Tcp(#[pin] TcpStream),\n\n Tls(#[pin] DefaultClientTlsStream)\n\n }\n\n\n\n impl AllTcpStream {\n\n pub fn tcp(stream: TcpStream) -> Self {\n\n Self::Tcp(stream)\n\n }\n\n\n\n pub fn tls(stream: DefaultClientTlsStream) -> Self {\n\n Self::Tls(stream)\n\n }\n\n }\n", "file_path": "src/net/tls.rs", "rank": 93, "score": 11.525287431917665 }, { "content": " use std::time::Duration;\n\n \n\n use async_lock::Lock;\n\n use async_trait::async_trait;\n\n\n\n use crate::test_async;\n\n use crate::timer::sleep;\n\n\n\n use super::*;\n\n\n\n struct SimpleDispatcher {\n\n count: Lock<u16>,\n\n delay: Duration\n\n }\n\n\n\n #[async_trait]\n\n impl AsyncDispatcher for SimpleDispatcher {\n\n\n\n async fn dispatch_loop(mut self) {\n\n\n", "file_path": "src/actor/mod.rs", "rank": 94, "score": 11.43982506670604 }, { "content": " use tokio::time::delay_for;\n\n \n\n\n\n pub async fn sleep(duration: Duration) {\n\n delay_for(duration).await;\n\n }\n\n}\n\n\n\n\n\n\n", "file_path": "src/timer.rs", "rank": 95, "score": 11.351176644067653 }, { "content": " use bytes::BytesMut;\n\n use bytes::buf::ext::BufExt;\n\n use futures::sink::SinkExt;\n\n use futures::stream::StreamExt;\n\n use futures::future::join;\n\n use futures_codec::BytesCodec;\n\n use futures_codec::Framed;\n\n use async_tls::TlsConnector;\n\n use async_tls::TlsAcceptor;\n\n\n\n\n\n use crate::test_async;\n\n use crate::timer::sleep;\n\n\n\n use crate::net::TcpListener;\n\n use crate::net::TcpStream;\n\n\n\n use super::ConnectorBuilder;\n\n use super::AcceptorBuilder;\n\n use super::AllTcpStream;\n", "file_path": "src/net/tls.rs", "rank": 96, "score": 11.326620101564846 }, { "content": " debug!(\"connect to tls domain: {}\",self.domain);\n\n Ok((self.connector.connect(&self.domain, tcp_stream).await?,fd))\n\n }\n\n }\n\n\n\n \n\n\n\n\n\n #[derive(Clone)]\n\n pub enum AllDomainConnector {\n\n Tcp(DefaultTcpDomainConnector),\n\n TlsDomain(TlsDomainConnector),\n\n TlsAnonymous(TlsAnonymousConnector)\n\n }\n\n\n\n impl Default for AllDomainConnector {\n\n fn default() -> Self {\n\n Self::default_tcp()\n\n }\n\n }\n", "file_path": "src/net/tls.rs", "rank": 97, "score": 11.30558641908149 }, { "content": " pub struct Channel<T> {\n\n receiver: Receiver<T>,\n\n sender: Sender<T>\n\n }\n\n\n\n impl <T>Channel<T> {\n\n\n\n pub fn new(capacity: usize) -> Self {\n\n\n\n let (sender,receiver) = channel(capacity);\n\n Self {\n\n receiver,\n\n sender\n\n }\n\n }\n\n\n\n /// create new clone of sender\n\n pub fn sender(&self) -> Sender<T> {\n\n self.sender.clone()\n\n }\n", "file_path": "src/sync/mod.rs", "rank": 98, "score": 11.223719020657029 }, { "content": "\n\n\n\n impl AllDomainConnector {\n\n\n\n pub fn default_tcp() -> Self {\n\n Self::Tcp(DefaultTcpDomainConnector::new())\n\n }\n\n\n\n pub fn new_tls_domain(connector: TlsDomainConnector) -> Self {\n\n Self::TlsDomain(connector)\n\n }\n\n\n\n pub fn new_tls_anonymous(connector: TlsAnonymousConnector) -> Self {\n\n Self::TlsAnonymous(connector)\n\n }\n\n\n\n }\n\n\n\n \n\n #[async_trait]\n", "file_path": "src/net/tls.rs", "rank": 99, "score": 11.21184438942809 } ]
Rust
src/board.rs
jstnlef/rustris
0133b9e43c22cf26f640da8e74cef9c365c5b7ad
use std::collections::VecDeque; use piston_window::{Context, G2d, Line, Transformed, types, Rectangle}; use piston_window::rectangle; use piston_window::grid::Grid; use colors::GREY; use tetromino::{Piece, Block}; use settings::*; type GridRow = [CellState; WIDTH_IN_BLOCKS as usize]; pub struct Board { grid: VecDeque<GridRow> } impl Board { pub fn new() -> Board { Board { grid: Self::create_empty_grid() } } pub fn set_piece(&mut self, piece: &Piece) { for block in piece.blocks_iter() { self.set_cell_state(block, CellState::Block(piece.get_color())); } } pub fn is_space_occupied(&self, block: Block) -> bool { match self.get_cell_state(block.x, block.y) { CellState::Block(_) => true, CellState::Empty => false } } pub fn remove_completed_rows(&mut self) -> u32 { let completed_row_indexes = self.find_completed_row_indexes(); for i in &completed_row_indexes { self.grid.remove(*i); } for _ in &completed_row_indexes { self.grid.push_front(Self::create_empty_row()); } debug_assert!(self.grid.len() == HEIGHT_IN_BLOCKS as usize); completed_row_indexes.len() as u32 } fn find_completed_row_indexes(&self) -> Vec<usize> { let mut completed_row_indexes = Vec::new(); let rows = self.grid.iter().rev().take_while(|&row| !Self::row_is_empty(row)); for (i, row) in rows.enumerate() { if Self::row_is_complete(row) { let grid_index = (HEIGHT_IN_BLOCKS - 1) as usize - i; completed_row_indexes.push(grid_index); } } completed_row_indexes } fn get_cell_state(&self, x: i32, y: i32) -> CellState { self.grid[y as usize][x as usize] } fn set_cell_state(&mut self, block: Block, cell_state: CellState) { self.grid[block.y as usize][block.x as usize] = cell_state; } fn create_empty_grid() -> VecDeque<GridRow> { let mut grid = VecDeque::with_capacity(HEIGHT_IN_BLOCKS as usize); for _ in 0..HEIGHT_IN_BLOCKS { grid.push_back(Self::create_empty_row()); } grid } fn create_empty_row() -> GridRow { [CellState::Empty; WIDTH_IN_BLOCKS as usize] } fn row_is_empty(row: &GridRow) -> bool { row.iter().all(|&block| block == CellState::Empty) } fn row_is_complete(row: &GridRow) -> bool { row.iter().all(|&block| block != CellState::Empty) } pub fn render(&self, context: Context, graphics: &mut G2d) { let grid = Grid { cols: WIDTH_IN_BLOCKS as u32, rows: HEIGHT_IN_BLOCKS as u32, units: BLOCK_SIZE }; let line = Line::new(GREY, GRID_LINE_WIDTH); let transform = context.transform.trans(GRID_X_OFFSET, GRID_Y_OFFSET); grid.draw(&line, &Default::default(), transform, graphics); for x in 0..WIDTH_IN_BLOCKS { for y in 0..HEIGHT_IN_BLOCKS { self.get_cell_state(x, y).render(x, y, context, graphics); } } } } #[derive(Clone, Copy, Debug, PartialEq)] pub enum CellState { Empty, Block(types::Color) } impl CellState { fn render(&self, x: i32, y: i32, context: Context, graphics: &mut G2d) { match *self { CellState::Block(color) => { let rect = Rectangle { color: color, shape: rectangle::Shape::Square, border: None }; Block::new(x, y).render_in_grid(rect, context, graphics); }, CellState::Empty => {} } } } #[cfg(test)] mod tests { use super::*; use colors::{RED, CYAN}; use tetromino::{Block, Piece, I}; use settings::*; #[test] fn test_set_piece() { let mut board = Board::new(); let piece = Piece::create(&I); board.set_piece(&piece); assert_eq!(board.get_cell_state(2, 1), CellState::Empty); assert_eq!(board.get_cell_state(3, 1), CellState::Block(CYAN)); assert_eq!(board.get_cell_state(4, 1), CellState::Block(CYAN)); assert_eq!(board.get_cell_state(5, 1), CellState::Block(CYAN)); assert_eq!(board.get_cell_state(6, 1), CellState::Block(CYAN)); assert_eq!(board.get_cell_state(7, 1), CellState::Empty); } #[test] fn test_is_space_occupied() { let mut board = Board::new(); let block = Block{x: 2, y: 2}; board.set_cell_state(block, CellState::Block(RED)); assert!(board.is_space_occupied(block)); assert!(!board.is_space_occupied(Block{x: 0, y: 0})); } #[test] fn test_set_cell_state() { let mut board = Board::new(); assert_eq!(board.get_cell_state(0, 0), CellState::Empty); board.set_cell_state(Block{x: 2, y: 2}, CellState::Block(RED)); assert_eq!(board.get_cell_state(2, 2), CellState::Block(RED)); } #[test] fn test_row_is_empty() { let mut empty_row = [CellState::Empty; WIDTH_IN_BLOCKS as usize]; assert!(Board::row_is_empty(&empty_row)); empty_row[1] = CellState::Block(RED); assert!(!Board::row_is_empty(&empty_row)); } #[test] fn test_row_is_complete() { let mut complete_row = [CellState::Block(RED); WIDTH_IN_BLOCKS as usize]; assert!(Board::row_is_complete(&complete_row)); complete_row[1] = CellState::Empty; assert!(!Board::row_is_complete(&complete_row)); } #[test] fn test_create_empty_row() { let row = Board::create_empty_row(); assert!(Board::row_is_empty(&row)); } #[test] fn test_find_completed_row_indexes_simple() { let i = (HEIGHT_IN_BLOCKS - 1) as usize; let mut board = Board::new(); let complete_row = [CellState::Block(RED); WIDTH_IN_BLOCKS as usize]; board.grid[i] = complete_row; let result = board.find_completed_row_indexes(); let expected = vec![i]; assert_eq!(result, expected); } #[test] fn test_find_completed_row_indexes_multiple() { let i = (HEIGHT_IN_BLOCKS - 1) as usize; let mut board = Board::new(); let complete_row = [CellState::Block(RED); WIDTH_IN_BLOCKS as usize]; board.grid[i] = complete_row; board.grid[i - 1] = complete_row; let result = board.find_completed_row_indexes(); let expected = vec![i, i - 1]; assert_eq!(result, expected); } #[test] fn test_find_completed_row_indexes_skip_row() { let i = (HEIGHT_IN_BLOCKS - 1) as usize; let mut board = Board::new(); let complete_row = [CellState::Block(RED); WIDTH_IN_BLOCKS as usize]; let mut incomplete_row = complete_row; incomplete_row[4] = CellState::Empty; board.grid[i] = complete_row; board.grid[i - 1] = incomplete_row; board.grid[i - 2] = complete_row; let result = board.find_completed_row_indexes(); let expected = vec![i, i - 2]; assert_eq!(result, expected); } #[test] fn test_remove_completed_rows_simple() { let i = (HEIGHT_IN_BLOCKS - 1) as usize; let mut board = Board::new(); let complete_row = [CellState::Block(RED); WIDTH_IN_BLOCKS as usize]; board.grid[i] = complete_row; let n = board.remove_completed_rows(); assert_eq!(n, 1); assert_eq!(board.grid, Board::create_empty_grid()); } #[test] fn test_remove_completed_rows_moves_down() { let i = (HEIGHT_IN_BLOCKS - 1) as usize; let mut board = Board::new(); let complete_row = [CellState::Block(RED); WIDTH_IN_BLOCKS as usize]; board.grid[i] = complete_row; board.grid[i-1][0] = CellState::Block(RED); let n = board.remove_completed_rows(); let mut expected_grid = Board::create_empty_grid(); expected_grid[i][0] = CellState::Block(RED); assert_eq!(n, 1); assert_eq!(board.grid, expected_grid); } #[test] fn test_remove_completed_rows_moves_two_down() { let i = (HEIGHT_IN_BLOCKS - 1) as usize; let mut board = Board::new(); let complete_row = [CellState::Block(RED); WIDTH_IN_BLOCKS as usize]; board.grid[i] = complete_row; board.grid[i-1][0] = CellState::Block(RED); board.grid[i-2] = complete_row; board.grid[i-3][1] = CellState::Block(RED); let n = board.remove_completed_rows(); let mut expected_grid = Board::create_empty_grid(); expected_grid[i][0] = CellState::Block(RED); expected_grid[i-1][1] = CellState::Block(RED); assert_eq!(n, 2); assert_eq!(board.grid, expected_grid); } }
use std::collections::VecDeque; use piston_window::{Context, G2d, Line, Transformed, types, Rectangle}; use piston_window::rectangle; use piston_window::grid::Grid; use colors::GREY; use tetromino::{Piece, Block}; use settings::*; type GridRow = [CellState; WIDTH_IN_BLOCKS as usize]; pub struct Board { grid: VecDeque<GridRow> } impl Board { pub fn new() -> Board { Board { grid: Self::create_empty_grid() } } pub fn set_piece(&mut self, piece: &Piece) { for block in piece.blocks_iter() { self.set_cell_state(block, CellState::Block(piece.get_color())); } } pub fn is_space_occupied(&self, block: Block) -> bool { match self.get_cell_state(block.x, block.y) { CellState::Block(_) => true, CellState::Empty => false } } pub fn remove_completed_rows(&mut self) -> u32 { let completed_row_indexes = self.find_completed_row_indexes(); for i in &completed_row_indexes { self.grid.remove(*i); } for _ in &completed_row_indexes { self.grid.push_front(Self::create_empty_row()); } debug_assert!(self.grid.len() == HEIGHT_IN_BLOCKS as usize); completed_row_indexes.len() as u32 } fn find_completed_row_indexes(&self) -> Vec<usize> { let mut completed_row_indexes = Vec::new(); let rows = self.grid.iter().rev().take_while(|&row| !Self::row_is_empty(row)); for (i, row) in rows.enumerate() { if Self::row_is_complete(row) { let grid_index = (HEIGHT_IN_BLOCKS - 1) as usize - i; completed_row_indexes.push(grid_index); } } completed_row_indexes } fn get_cell_state(&self, x: i32, y: i32) -> CellState { self.grid[y as usize][x as usize] } fn set_cell_state(&mut self, block: Block, cell_state: CellState) { self.grid[block.y as usize][block.x as usize] = cell_state; }
fn create_empty_row() -> GridRow { [CellState::Empty; WIDTH_IN_BLOCKS as usize] } fn row_is_empty(row: &GridRow) -> bool { row.iter().all(|&block| block == CellState::Empty) } fn row_is_complete(row: &GridRow) -> bool { row.iter().all(|&block| block != CellState::Empty) } pub fn render(&self, context: Context, graphics: &mut G2d) { let grid = Grid { cols: WIDTH_IN_BLOCKS as u32, rows: HEIGHT_IN_BLOCKS as u32, units: BLOCK_SIZE }; let line = Line::new(GREY, GRID_LINE_WIDTH); let transform = context.transform.trans(GRID_X_OFFSET, GRID_Y_OFFSET); grid.draw(&line, &Default::default(), transform, graphics); for x in 0..WIDTH_IN_BLOCKS { for y in 0..HEIGHT_IN_BLOCKS { self.get_cell_state(x, y).render(x, y, context, graphics); } } } } #[derive(Clone, Copy, Debug, PartialEq)] pub enum CellState { Empty, Block(types::Color) } impl CellState { fn render(&self, x: i32, y: i32, context: Context, graphics: &mut G2d) { match *self { CellState::Block(color) => { let rect = Rectangle { color: color, shape: rectangle::Shape::Square, border: None }; Block::new(x, y).render_in_grid(rect, context, graphics); }, CellState::Empty => {} } } } #[cfg(test)] mod tests { use super::*; use colors::{RED, CYAN}; use tetromino::{Block, Piece, I}; use settings::*; #[test] fn test_set_piece() { let mut board = Board::new(); let piece = Piece::create(&I); board.set_piece(&piece); assert_eq!(board.get_cell_state(2, 1), CellState::Empty); assert_eq!(board.get_cell_state(3, 1), CellState::Block(CYAN)); assert_eq!(board.get_cell_state(4, 1), CellState::Block(CYAN)); assert_eq!(board.get_cell_state(5, 1), CellState::Block(CYAN)); assert_eq!(board.get_cell_state(6, 1), CellState::Block(CYAN)); assert_eq!(board.get_cell_state(7, 1), CellState::Empty); } #[test] fn test_is_space_occupied() { let mut board = Board::new(); let block = Block{x: 2, y: 2}; board.set_cell_state(block, CellState::Block(RED)); assert!(board.is_space_occupied(block)); assert!(!board.is_space_occupied(Block{x: 0, y: 0})); } #[test] fn test_set_cell_state() { let mut board = Board::new(); assert_eq!(board.get_cell_state(0, 0), CellState::Empty); board.set_cell_state(Block{x: 2, y: 2}, CellState::Block(RED)); assert_eq!(board.get_cell_state(2, 2), CellState::Block(RED)); } #[test] fn test_row_is_empty() { let mut empty_row = [CellState::Empty; WIDTH_IN_BLOCKS as usize]; assert!(Board::row_is_empty(&empty_row)); empty_row[1] = CellState::Block(RED); assert!(!Board::row_is_empty(&empty_row)); } #[test] fn test_row_is_complete() { let mut complete_row = [CellState::Block(RED); WIDTH_IN_BLOCKS as usize]; assert!(Board::row_is_complete(&complete_row)); complete_row[1] = CellState::Empty; assert!(!Board::row_is_complete(&complete_row)); } #[test] fn test_create_empty_row() { let row = Board::create_empty_row(); assert!(Board::row_is_empty(&row)); } #[test] fn test_find_completed_row_indexes_simple() { let i = (HEIGHT_IN_BLOCKS - 1) as usize; let mut board = Board::new(); let complete_row = [CellState::Block(RED); WIDTH_IN_BLOCKS as usize]; board.grid[i] = complete_row; let result = board.find_completed_row_indexes(); let expected = vec![i]; assert_eq!(result, expected); } #[test] fn test_find_completed_row_indexes_multiple() { let i = (HEIGHT_IN_BLOCKS - 1) as usize; let mut board = Board::new(); let complete_row = [CellState::Block(RED); WIDTH_IN_BLOCKS as usize]; board.grid[i] = complete_row; board.grid[i - 1] = complete_row; let result = board.find_completed_row_indexes(); let expected = vec![i, i - 1]; assert_eq!(result, expected); } #[test] fn test_find_completed_row_indexes_skip_row() { let i = (HEIGHT_IN_BLOCKS - 1) as usize; let mut board = Board::new(); let complete_row = [CellState::Block(RED); WIDTH_IN_BLOCKS as usize]; let mut incomplete_row = complete_row; incomplete_row[4] = CellState::Empty; board.grid[i] = complete_row; board.grid[i - 1] = incomplete_row; board.grid[i - 2] = complete_row; let result = board.find_completed_row_indexes(); let expected = vec![i, i - 2]; assert_eq!(result, expected); } #[test] fn test_remove_completed_rows_simple() { let i = (HEIGHT_IN_BLOCKS - 1) as usize; let mut board = Board::new(); let complete_row = [CellState::Block(RED); WIDTH_IN_BLOCKS as usize]; board.grid[i] = complete_row; let n = board.remove_completed_rows(); assert_eq!(n, 1); assert_eq!(board.grid, Board::create_empty_grid()); } #[test] fn test_remove_completed_rows_moves_down() { let i = (HEIGHT_IN_BLOCKS - 1) as usize; let mut board = Board::new(); let complete_row = [CellState::Block(RED); WIDTH_IN_BLOCKS as usize]; board.grid[i] = complete_row; board.grid[i-1][0] = CellState::Block(RED); let n = board.remove_completed_rows(); let mut expected_grid = Board::create_empty_grid(); expected_grid[i][0] = CellState::Block(RED); assert_eq!(n, 1); assert_eq!(board.grid, expected_grid); } #[test] fn test_remove_completed_rows_moves_two_down() { let i = (HEIGHT_IN_BLOCKS - 1) as usize; let mut board = Board::new(); let complete_row = [CellState::Block(RED); WIDTH_IN_BLOCKS as usize]; board.grid[i] = complete_row; board.grid[i-1][0] = CellState::Block(RED); board.grid[i-2] = complete_row; board.grid[i-3][1] = CellState::Block(RED); let n = board.remove_completed_rows(); let mut expected_grid = Board::create_empty_grid(); expected_grid[i][0] = CellState::Block(RED); expected_grid[i-1][1] = CellState::Block(RED); assert_eq!(n, 2); assert_eq!(board.grid, expected_grid); } }
fn create_empty_grid() -> VecDeque<GridRow> { let mut grid = VecDeque::with_capacity(HEIGHT_IN_BLOCKS as usize); for _ in 0..HEIGHT_IN_BLOCKS { grid.push_back(Self::create_empty_row()); } grid }
function_block-full_function
[ { "content": "pub fn set_ui(ref mut ui: UICell, game: &mut Rustris) {\n\n Canvas::new().flow_right(&[\n\n (LEFT_COLUMN, Canvas::new().color(color::DARK_CHARCOAL).pad(20.0)),\n\n (MIDDLE_COLUMN, Canvas::new().color(color::TRANSPARENT).length(300.0)),\n\n (RIGHT_COLUMN, Canvas::new().color(color::DARK_CHARCOAL).pad(20.0)),\n\n ]).set(MASTER, ui);\n\n set_scoreboard(ui, game.get_game_stats());\n\n set_next_piece(ui);\n\n\n\n if game.is_paused() {\n\n set_pause_menu(ui, game);\n\n } else if game.is_game_over() {\n\n set_game_over_menu(ui, game);\n\n }\n\n\n\n}\n\n\n", "file_path": "src/ui.rs", "rank": 1, "score": 84946.77959347417 }, { "content": "fn set_next_piece(ui: &mut UICell) {\n\n Canvas::new()\n\n .label(\"Next Piece\")\n\n .label_color(color::WHITE)\n\n .w_h(NEXT_PIECE_WIDTH, NEXT_PIECE_HEIGHT)\n\n .frame(1.0)\n\n .frame_color(color::WHITE)\n\n .pad(1.0)\n\n .mid_top_of(RIGHT_COLUMN)\n\n .set(NEXT_PIECE, ui);\n\n}\n\n\n\nwidget_ids! {\n\n // Canvas IDs\n\n MASTER,\n\n LEFT_COLUMN,\n\n MIDDLE_COLUMN,\n\n RIGHT_COLUMN,\n\n\n\n // Scoreboard IDs\n", "file_path": "src/ui.rs", "rank": 2, "score": 80916.53722052286 }, { "content": "fn set_pause_menu(ui: &mut UICell, game: &mut Rustris) {\n\n Canvas::new()\n\n .w_h(WINDOW_WIDTH as f64, WINDOW_HEIGHT as f64)\n\n .floating(true)\n\n .middle_of(MASTER)\n\n .rgba(0.0, 0.0, 0.0, 0.3)\n\n .set(PAUSE_OVERLAY, ui);\n\n\n\n Canvas::new().flow_down(&[\n\n (RESUME_CANVAS, Canvas::new())\n\n ]).label(\"Paused\")\n\n .label_color(color::WHITE)\n\n .w_h(200.0, 200.0)\n\n .frame(1.0)\n\n .frame_color(color::WHITE)\n\n .pad(1.0)\n\n .middle_of(PAUSE_OVERLAY)\n\n .set(PAUSE_MENU, ui);\n\n\n\n Button::new()\n", "file_path": "src/ui.rs", "rank": 3, "score": 66130.7796622213 }, { "content": "fn set_game_over_menu(ui: &mut UICell, game: &mut Rustris) {\n\n Canvas::new()\n\n .w_h(WINDOW_WIDTH as f64, WINDOW_HEIGHT as f64)\n\n .floating(true)\n\n .middle_of(MASTER)\n\n .rgba(0.0, 0.0, 0.0, 0.3)\n\n .set(GAME_OVER_OVERLAY, ui);\n\n\n\n Canvas::new().flow_down(&[\n\n (FINAL_SCORE_CANVAS, Canvas::new()),\n\n (RESTART_CANVAS, Canvas::new())\n\n ]).label(\"Game Over\")\n\n .label_color(color::WHITE)\n\n .w_h(200.0, 200.0)\n\n .frame(1.0)\n\n .frame_color(color::WHITE)\n\n .pad(1.0)\n\n .middle_of(GAME_OVER_OVERLAY)\n\n .set(GAME_OVER_MENU, ui);\n\n\n", "file_path": "src/ui.rs", "rank": 4, "score": 66130.7796622213 }, { "content": "fn set_scoreboard(ui: &mut UICell, stats: &GameStats) {\n\n Canvas::new().flow_down(&[\n\n (SCORE_CANVAS, Canvas::new().label(\"Score\").label_color(color::WHITE)),\n\n (LEVEL_CANVAS, Canvas::new().label(\"Level\").label_color(color::WHITE)),\n\n (LINES_CANVAS, Canvas::new().label(\"Lines\").label_color(color::WHITE))\n\n ]).w_h(150.0, 250.0)\n\n .frame(1.0)\n\n .frame_color(color::WHITE)\n\n .pad(1.0)\n\n .mid_bottom_of(LEFT_COLUMN)\n\n .set(SCOREBOARD, ui);\n\n\n\n Text::new(&stats.get_score().to_string())\n\n .color(color::WHITE)\n\n .middle_of(SCORE_CANVAS)\n\n .set(SCORE, ui);\n\n Text::new(&stats.get_level().to_string())\n\n .color(color::WHITE)\n\n .middle_of(LEVEL_CANVAS)\n\n .set(LEVEL, ui);\n\n Text::new(&stats.get_lines().to_string())\n\n .color(color::WHITE)\n\n .middle_of(LINES_CANVAS)\n\n .set(LINES, ui);\n\n}\n\n\n", "file_path": "src/ui.rs", "rank": 5, "score": 58383.11815530498 }, { "content": "type Rotation = usize;\n\n\n\n#[derive(Clone, Copy)]\n\npub struct Piece {\n\n pub x: i32,\n\n pub y: i32,\n\n ptype: &'static Tetromino,\n\n rotation: Rotation\n\n}\n\nimpl Piece {\n\n pub fn create(ptype: &'static Tetromino) -> Piece {\n\n let x = (WIDTH_IN_BLOCKS / 2) - 2;\n\n Piece::new(x, 0, ptype, 0)\n\n }\n\n\n\n fn new(x: i32, y: i32, ptype: &'static Tetromino, rotation: Rotation) -> Piece {\n\n Piece {\n\n x: x,\n\n y: y,\n\n ptype: ptype,\n", "file_path": "src/tetromino.rs", "rank": 6, "score": 55452.32073473984 }, { "content": "pub fn create_ui(window: &PistonWindow) -> UI {\n\n let assets = find_folder::Search::KidsThenParents(3, 5)\n\n .for_folder(\"assets\").unwrap();\n\n let font_path = assets.join(\"fonts/NotoSans/NotoSans-Regular.ttf\");\n\n let theme = Theme::default();\n\n let glyph_cache = Glyphs::new(&font_path, window.factory.clone());\n\n UI::new(glyph_cache.unwrap(), theme)\n\n}\n\n\n", "file_path": "src/ui.rs", "rank": 7, "score": 49063.43559659248 }, { "content": "fn get_grid_position() -> ScreenPosition {\n\n ScreenPosition::new(\n\n GRID_X_OFFSET + GRID_LINE_WIDTH,\n\n GRID_Y_OFFSET + GRID_LINE_WIDTH\n\n )\n\n}\n\n\n\npub enum RenderType {\n\n Normal,\n\n Ghost\n\n}\n\nimpl RenderType {\n\n pub fn get_rectangle(&self, color: Color) -> Rectangle {\n\n match *self {\n\n RenderType::Ghost => Rectangle {\n\n color: color::BLACK,\n\n shape: rectangle::Shape::Square,\n\n border: Some(rectangle::Border {\n\n color: color,\n\n radius: GHOST_BORDER_WIDTH\n", "file_path": "src/tetromino.rs", "rank": 8, "score": 43516.61177276267 }, { "content": "fn main() {\n\n let window_title = format!(\"Rustris {}\", VERSION);\n\n\n\n let mut window: PistonWindow =\n\n WindowSettings::new(window_title, [WINDOW_WIDTH, WINDOW_HEIGHT])\n\n .exit_on_esc(true)\n\n .vsync(true)\n\n .build()\n\n .unwrap();\n\n\n\n let mut ui = create_ui(&window);\n\n let mut game = Rustris::new();\n\n\n\n window.set_ups(60);\n\n\n\n while let Some(event) = window.next() {\n\n // let the UI handle the event\n\n ui.handle_event(&event);\n\n event.update(|_| ui.set_widgets(|ui| set_ui(ui, &mut game)));\n\n\n", "file_path": "src/main.rs", "rank": 9, "score": 30670.470823883752 }, { "content": "pub const VERSION: &'static str = env!(\"CARGO_PKG_VERSION\");\n\npub const WINDOW_WIDTH: u32 = 800;\n\npub const WINDOW_HEIGHT: u32 = 600;\n\npub const HEIGHT_IN_BLOCKS: i32 = 20;\n\npub const WIDTH_IN_BLOCKS: i32 = 10;\n\npub const BLOCK_SIZE: f64 = 27.0;\n\npub const GRID_LINE_WIDTH: f64 = 1.0;\n\npub const GRID_X_OFFSET: f64 = (\n\n WINDOW_WIDTH as f64 / 2.0 - (WIDTH_IN_BLOCKS as f64/2.0 * BLOCK_SIZE)\n\n);\n\npub const GRID_Y_OFFSET: f64 = 25.0;\n\npub const GHOST_BORDER_WIDTH: f64 = 0.3;\n\npub const MAX_GAME_LEVEL: u32 = 10;\n\npub const NEXT_PIECE_WIDTH: f64 = 150.0;\n\npub const NEXT_PIECE_HEIGHT: f64 = 150.0;\n", "file_path": "src/settings.rs", "rank": 10, "score": 23615.83045487875 }, { "content": " pub x: i32,\n\n pub y: i32\n\n}\n\nimpl Block {\n\n pub fn new(x: i32, y: i32) -> Block {\n\n Block {x: x, y: y}\n\n }\n\n\n\n pub fn render_in_grid(&self, rect: Rectangle, context: Context, graphics: &mut G2d) {\n\n self.render(get_grid_position(), rect, context, graphics);\n\n }\n\n\n\n pub fn render(&self, position: ScreenPosition, rect: Rectangle, context: Context,\n\n graphics: &mut G2d) {\n\n let square = rectangle::square(\n\n position.x, position.y, BLOCK_SIZE - (2.0 * GRID_LINE_WIDTH)\n\n );\n\n let transform = context.transform.trans(\n\n (self.x as f64) * BLOCK_SIZE,\n\n (self.y as f64) * BLOCK_SIZE\n", "file_path": "src/tetromino.rs", "rank": 24, "score": 21.12259761086287 }, { "content": " })\n\n },\n\n RenderType::Normal => Rectangle {\n\n color: color,\n\n shape: rectangle::Shape::Square,\n\n border: None\n\n }\n\n }\n\n }\n\n}\n\n\n\npub struct BlockIterator {\n\n x: i32,\n\n y: i32,\n\n index: usize,\n\n blocks: &'static Configuration\n\n}\n\nimpl BlockIterator {\n\n pub fn new(x: i32, y: i32, blocks: &'static Configuration) -> BlockIterator {\n\n BlockIterator {\n", "file_path": "src/tetromino.rs", "rank": 25, "score": 16.65813564996921 }, { "content": "impl ScreenPosition {\n\n pub fn new(x: f64, y: f64) -> ScreenPosition {\n\n ScreenPosition {x: x, y: y}\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use tetromino::{Piece, I};\n\n use settings::*;\n\n\n\n #[test]\n\n fn test_is_valid_board_position() {\n\n let mut p = Piece::create(&I);\n\n let game = Rustris::new();\n\n assert_eq!(game.is_valid_board_position(&p), true);\n\n p.x = WIDTH_IN_BLOCKS + 1;\n\n assert_eq!(game.is_valid_board_position(&p), false);\n\n p.x = -1;\n", "file_path": "src/game.rs", "rank": 26, "score": 16.365783168573042 }, { "content": "use std::cmp::min;\n\n\n\nuse settings::MAX_GAME_LEVEL;\n\n\n\nconst LEVEL_THRESHOLD: u32 = 10;\n\n\n\npub struct GameStats {\n\n score: u32,\n\n lines: u32,\n\n}\n\nimpl GameStats {\n\n pub fn new() -> GameStats {\n\n GameStats {\n\n score: 0,\n\n lines: 0\n\n }\n\n }\n\n\n\n pub fn score_soft_drop(&mut self) {\n\n self.score += 1;\n", "file_path": "src/stats.rs", "rank": 27, "score": 15.649999057084102 }, { "content": "\n\n pub fn render_in_grid(&self, render_type: RenderType, context: Context, graphics: &mut G2d) {\n\n let position = get_grid_position();\n\n self.render(position, render_type, context, graphics);\n\n }\n\n\n\n pub fn render_in_next_piece(&self, render_type: RenderType, context: Context,\n\n graphics: &mut G2d) {\n\n let position = ScreenPosition::new(553.0, 80.0);\n\n self.render(position, render_type, context, graphics);\n\n }\n\n\n\n fn render(&self, position: ScreenPosition, render_type: RenderType, context: Context,\n\n graphics: &mut G2d) {\n\n let rect = render_type.get_rectangle(self.get_color());\n\n for block in self.blocks_iter() {\n\n block.render(position, rect, context, graphics);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/tetromino.rs", "rank": 28, "score": 15.246638154600134 }, { "content": " }\n\n\n\n pub fn score_hard_drop(&mut self, rows_dropped: u32) {\n\n self.score += 2 * rows_dropped;\n\n }\n\n\n\n pub fn score_completed_lines(&mut self, lines: u32) {\n\n self.score += match lines {\n\n 1 => 100,\n\n 2 => 300,\n\n 3 => 500,\n\n 4 => 800,\n\n _ => 0\n\n } * self.get_level();\n\n\n\n self.lines += lines;\n\n }\n\n\n\n pub fn get_score(&self) -> u32 {\n\n self.score\n", "file_path": "src/stats.rs", "rank": 29, "score": 14.631120825736097 }, { "content": "use piston_window::*;\n\n\n\nuse board::Board;\n\nuse randomizer::Randomizer;\n\nuse tetromino::*;\n\nuse settings::*;\n\nuse stats::GameStats;\n\n\n\n\n\npub struct Rustris {\n\n board: Board,\n\n randomizer: Randomizer,\n\n current_piece: Piece,\n\n next_piece: Piece,\n\n stats: GameStats,\n\n time_since_moved: f64,\n\n state: GameState\n\n}\n\nimpl Rustris {\n\n pub fn new() -> Rustris {\n", "file_path": "src/game.rs", "rank": 30, "score": 14.458481172541608 }, { "content": "use piston_window::{Context, G2d, Rectangle, Transformed, color, rectangle};\n\nuse piston_window::types::Color;\n\n\n\nuse colors::*;\n\nuse game::ScreenPosition;\n\nuse settings::*;\n\n\n\n\n\npub static I: Tetromino = Tetromino {\n\n configurations: [\n\n [Block{x:0, y:1}, Block{x:1, y:1}, Block{x:2, y:1}, Block{x:3, y:1}],\n\n [Block{x:2, y:0}, Block{x:2, y:1}, Block{x:2, y:2}, Block{x:2, y:3}],\n\n [Block{x:0, y:2}, Block{x:1, y:2}, Block{x:2, y:2}, Block{x:3, y:2}],\n\n [Block{x:1, y:0}, Block{x:1, y:1}, Block{x:1, y:2}, Block{x:1, y:3}]\n\n ],\n\n color: CYAN\n\n};\n\n\n\npub static J: Tetromino = Tetromino {\n\n configurations: [\n", "file_path": "src/tetromino.rs", "rank": 31, "score": 14.050052009241098 }, { "content": "\n\n fn lock_current_piece(&mut self) {\n\n self.board.set_piece(&self.current_piece);\n\n self.remove_completed_lines();\n\n self.get_new_piece();\n\n }\n\n\n\n fn remove_completed_lines(&mut self) {\n\n let number_removed = self.board.remove_completed_rows();\n\n self.stats.score_completed_lines(number_removed);\n\n }\n\n\n\n fn calculate_ghost_piece(&self) -> Piece {\n\n let mut ghost = self.current_piece;\n\n while self.is_valid_board_position(&ghost) {\n\n ghost.y += 1;\n\n }\n\n ghost.y -= 1;\n\n ghost\n\n }\n", "file_path": "src/game.rs", "rank": 32, "score": 13.944209828644405 }, { "content": "\n\n pub fn is_game_over(&self) -> bool {\n\n self.state == GameState::GameOver\n\n }\n\n\n\n pub fn set_game_state(&mut self, state: GameState) {\n\n self.state = state;\n\n }\n\n\n\n fn drop_delay(&self) -> f64 {\n\n ((MAX_GAME_LEVEL + 1) - self.stats.get_level()) as f64 * 0.10\n\n }\n\n\n\n fn is_valid_board_position(&self, piece: &Piece) -> bool {\n\n piece.blocks_iter().all(|block| {\n\n (block.x >= 0 && block.x < WIDTH_IN_BLOCKS &&\n\n block.y >= 0 && block.y < HEIGHT_IN_BLOCKS &&\n\n !self.board.is_space_occupied(block))\n\n })\n\n }\n", "file_path": "src/game.rs", "rank": 33, "score": 13.6366004960808 }, { "content": " }\n\n\n\n pub fn get_lines(&self) -> u32 {\n\n self.lines\n\n }\n\n\n\n pub fn get_level(&self) -> u32 {\n\n let level = (self.lines / LEVEL_THRESHOLD) + 1;\n\n min(level, MAX_GAME_LEVEL)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use settings::MAX_GAME_LEVEL;\n\n\n\n #[test]\n\n fn test_score_completed_lines() {\n\n let mut stats = GameStats::new();\n", "file_path": "src/stats.rs", "rank": 34, "score": 13.511856125512175 }, { "content": " pub fn render(&mut self, context: Context, graphics: &mut G2d) {\n\n self.board.render(context, graphics);\n\n self.calculate_ghost_piece().render_in_grid(RenderType::Ghost, context, graphics);\n\n self.current_piece.render_in_grid(RenderType::Normal, context, graphics);\n\n self.next_piece.render_in_next_piece(RenderType::Normal, context, graphics);\n\n }\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum GameState {\n\n Playing,\n\n Paused,\n\n GameOver\n\n}\n\n\n\n#[derive(Clone, Copy)]\n\npub struct ScreenPosition {\n\n pub x: f64,\n\n pub y: f64\n\n}\n", "file_path": "src/game.rs", "rank": 35, "score": 13.007618632111159 }, { "content": " self.stats = GameStats::new();\n\n self.time_since_moved = 0.0;\n\n self.set_game_state(GameState::Playing);\n\n }\n\n\n\n pub fn get_game_stats(&self) -> &GameStats {\n\n &self.stats\n\n }\n\n\n\n pub fn set_current_piece(&mut self, piece: Piece) {\n\n self.current_piece = piece;\n\n }\n\n\n\n pub fn is_playing(&self) -> bool {\n\n self.state == GameState::Playing\n\n }\n\n\n\n pub fn is_paused(&self) -> bool {\n\n self.state == GameState::Paused\n\n }\n", "file_path": "src/game.rs", "rank": 36, "score": 12.043301733249166 }, { "content": " rotation: rotation\n\n }\n\n }\n\n\n\n pub fn blocks_iter(&self) -> BlockIterator {\n\n let configuration = self.ptype.get_configuration(self.rotation);\n\n BlockIterator::new(self.x, self.y, configuration)\n\n }\n\n\n\n pub fn rotated(&self) -> Self {\n\n let new_rotation = (self.rotation + 1) % self.ptype.configurations.len();\n\n let mut rotated = Self::new(self.x, self.y, self.ptype, new_rotation);\n\n rotated.x -= rotated.wall_kick_translation();\n\n rotated\n\n }\n\n\n\n pub fn moved(&self, direction: Direction) -> Self {\n\n let (trans_x, trans_y) = match direction {\n\n Direction::Left => (-1, 0),\n\n Direction::Right => (1, 0),\n", "file_path": "src/tetromino.rs", "rank": 37, "score": 11.643873761873088 }, { "content": " let mut randomizer = Randomizer::new();\n\n let current_piece = randomizer.create_piece();\n\n let next_piece = randomizer.create_piece();\n\n Rustris {\n\n board: Board::new(),\n\n randomizer: randomizer,\n\n current_piece: current_piece,\n\n next_piece: next_piece,\n\n stats: GameStats::new(),\n\n time_since_moved: 0.0,\n\n state: GameState::Playing\n\n }\n\n }\n\n\n\n pub fn reset(&mut self) {\n\n self.board = Board::new();\n\n let mut randomizer = Randomizer::new();\n\n self.current_piece = randomizer.create_piece();\n\n self.next_piece = randomizer.create_piece();\n\n self.randomizer = randomizer;\n", "file_path": "src/game.rs", "rank": 38, "score": 10.880799120612979 }, { "content": "\n\n fn get_new_piece(&mut self) {\n\n let next = self.next_piece;\n\n if self.is_valid_board_position(&next){\n\n self.set_current_piece(next);\n\n self.next_piece = self.randomizer.create_piece();\n\n } else {\n\n self.state = GameState::GameOver;\n\n }\n\n }\n\n\n\n fn update(&mut self) {\n\n let moved = self.current_piece.moved(Direction::Down);\n\n if !self.is_valid_board_position(&moved) {\n\n self.lock_current_piece();\n\n } else {\n\n self.set_current_piece(moved);\n\n }\n\n }\n\n\n", "file_path": "src/game.rs", "rank": 39, "score": 10.339653044013216 }, { "content": " moved = Some(self.current_piece.moved(Direction::Right));\n\n }\n\n Button::Keyboard(Key::Space) => {\n\n let ghost = self.calculate_ghost_piece();\n\n let rows_dropped = (ghost.y - self.current_piece.y) as u32;\n\n self.stats.score_hard_drop(rows_dropped);\n\n self.set_current_piece(ghost);\n\n self.lock_current_piece();\n\n }\n\n Button::Keyboard(Key::P) => {\n\n self.state = GameState::Paused\n\n }\n\n _ => {}\n\n }\n\n }\n\n _ => {}\n\n }\n\n if let Some(piece) = moved {\n\n if self.is_valid_board_position(&piece) {\n\n self.set_current_piece(piece);\n", "file_path": "src/game.rs", "rank": 40, "score": 9.741691332135261 }, { "content": "use std::collections::VecDeque;\n\nuse rand::{Rng, thread_rng};\n\n\n\nuse tetromino::*;\n\n\n\npub struct Randomizer {\n\n history: VecDeque<&'static Tetromino>\n\n}\n\nimpl Randomizer {\n\n pub fn new() -> Randomizer {\n\n let mut rand = Randomizer {\n\n history: VecDeque::new()\n\n };\n\n rand.add_to_history(&Z);\n\n rand.add_to_history(&S);\n\n rand.add_to_history(&Z);\n\n rand.add_to_history(&S);\n\n rand\n\n }\n\n\n", "file_path": "src/randomizer.rs", "rank": 41, "score": 8.685272661355741 }, { "content": " Direction::Down => (0, 1)\n\n };\n\n Self::new(self.x + trans_x, self.y + trans_y, self.ptype, self.rotation)\n\n }\n\n\n\n fn wall_kick_translation(&self) -> i32 {\n\n let min_block = self.blocks_iter().min_by_key(|block| block.x).unwrap();\n\n let max_block = self.blocks_iter().max_by_key(|block| block.x).unwrap();\n\n if min_block.x < 0 {\n\n min_block.x\n\n } else if max_block.x >= WIDTH_IN_BLOCKS {\n\n max_block.x - WIDTH_IN_BLOCKS + 1\n\n } else {\n\n 0\n\n }\n\n }\n\n\n\n pub fn get_color(&self) -> Color {\n\n self.ptype.color\n\n }\n", "file_path": "src/tetromino.rs", "rank": 42, "score": 8.585573227361806 }, { "content": " fn handle_playing_input(&mut self, input: Input) {\n\n let mut moved: Option<Piece> = None;\n\n match input {\n\n Input::Press(key) => {\n\n match key {\n\n Button::Keyboard(Key::Up) => {\n\n moved = Some(self.current_piece.rotated());\n\n }\n\n Button::Keyboard(Key::Down) => {\n\n let move_down = self.current_piece.moved(Direction::Down);\n\n if self.is_valid_board_position(&move_down) {\n\n self.time_since_moved = 0.0;\n\n self.stats.score_soft_drop();\n\n self.set_current_piece(move_down);\n\n }\n\n }\n\n Button::Keyboard(Key::Left) => {\n\n moved = Some(self.current_piece.moved(Direction::Left));\n\n }\n\n Button::Keyboard(Key::Right) => {\n", "file_path": "src/game.rs", "rank": 43, "score": 8.308152961586792 }, { "content": "use find_folder;\n\nuse conrod::{\n\n Button, Canvas, Colorable, Frameable, Positionable, Labelable, Sizeable, Theme,\n\n Ui, UiCell, Text, Widget, color\n\n};\n\nuse piston_window::{G2d, Glyphs, Graphics, PistonWindow};\n\n\n\nuse game::{Rustris, GameState};\n\nuse stats::GameStats;\n\nuse settings::*;\n\n\n\n\n\npub type Backend = (<G2d<'static> as Graphics>::Texture, Glyphs);\n\npub type UI = Ui<Backend>;\n\npub type UICell<'a> = UiCell<'a, Backend>;\n\n\n\n\n", "file_path": "src/ui.rs", "rank": 44, "score": 8.279850081432329 }, { "content": " pub fn create_piece(&mut self) -> Piece {\n\n let all_tetrominos = [&I, &J, &L, &O, &S, &T, &Z];\n\n let mut random_ptype = None;\n\n for _ in 0..6 {\n\n random_ptype = thread_rng().choose(&all_tetrominos);\n\n match random_ptype {\n\n Some(ptype) => {\n\n if self.history.iter().all(|&item| item != *ptype) {\n\n break;\n\n }\n\n },\n\n _ => {}\n\n }\n\n }\n\n let ptype = random_ptype.unwrap();\n\n self.add_to_history(ptype);\n\n Piece::create(ptype)\n\n }\n\n\n\n fn add_to_history(&mut self, ptype: &'static Tetromino) {\n", "file_path": "src/randomizer.rs", "rank": 45, "score": 6.994616278468181 }, { "content": " assert_eq!(game.is_valid_board_position(&p), false);\n\n p.y = HEIGHT_IN_BLOCKS + 1;\n\n assert_eq!(game.is_valid_board_position(&p), false);\n\n p.y = -1;\n\n assert_eq!(game.is_valid_board_position(&p), false);\n\n }\n\n\n\n #[test]\n\n fn test_drop_delay() {\n\n let mut game = Rustris::new();\n\n assert_eq!(game.drop_delay(), 1.0);\n\n }\n\n}\n", "file_path": "src/game.rs", "rank": 46, "score": 6.7377089679818445 }, { "content": " }\n\n }\n\n }\n\n\n\n fn handle_paused_input(&mut self, input: Input) {\n\n match input {\n\n Input::Press(key) => {\n\n match key {\n\n Button::Keyboard(Key::P) => {\n\n self.state = GameState::Playing\n\n },\n\n _ => {}\n\n }\n\n },\n\n _ => {}\n\n }\n\n }\n\n\n\n pub fn on_input(&mut self, input: Input) {\n\n match self.state {\n", "file_path": "src/game.rs", "rank": 47, "score": 6.466452789641257 }, { "content": " p.x = 9;\n\n let kicked_translation = p.wall_kick_translation();\n\n assert_eq!(kicked_translation, 3);\n\n }\n\n\n\n #[test]\n\n fn test_wall_kick_out_of_bounds_left() {\n\n let mut p = Piece::create(&I);\n\n p.x = -2;\n\n let kicked_translation = p.wall_kick_translation();\n\n assert_eq!(kicked_translation, -2);\n\n }\n\n\n\n #[test]\n\n fn test_block_iterator() {\n\n let mut block_iter = BlockIterator::new(2, 2, &I.configurations[0]);\n\n assert_eq!(block_iter.next(), Some(Block{x: 2, y: 3}));\n\n assert_eq!(block_iter.next(), Some(Block{x: 3, y: 3}));\n\n assert_eq!(block_iter.next(), Some(Block{x: 4, y: 3}));\n\n assert_eq!(block_iter.next(), Some(Block{x: 5, y: 3}));\n\n assert_eq!(block_iter.next(), None);\n\n }\n\n}\n", "file_path": "src/tetromino.rs", "rank": 48, "score": 6.4383158323692316 }, { "content": "#[macro_use] extern crate conrod;\n\nextern crate find_folder;\n\nextern crate piston_window;\n\nextern crate rand;\n\n\n\nuse piston_window::{EventLoop, PistonWindow, WindowSettings, UpdateEvent, clear};\n\nuse piston_window::Event::{Input, Update, Render};\n\n\n\nmod board;\n\nmod colors;\n\nmod game;\n\nmod randomizer;\n\nmod tetromino;\n\nmod settings;\n\nmod stats;\n\nmod ui;\n\n\n\nuse game::Rustris;\n\nuse settings::*;\n\nuse ui::{create_ui, set_ui};\n\n\n\n\n", "file_path": "src/main.rs", "rank": 49, "score": 6.424604852445151 }, { "content": " x: x,\n\n y: y,\n\n index: 0,\n\n blocks: blocks\n\n }\n\n }\n\n}\n\nimpl Iterator for BlockIterator {\n\n type Item = Block;\n\n fn next(&mut self) -> Option<Block> {\n\n if self.index >= self.blocks.len() {\n\n return None;\n\n }\n\n let ref block = self.blocks[self.index];\n\n self.index += 1;\n\n let translated_x = self.x + block.x;\n\n let translated_y = self.y + block.y;\n\n Some(Block{x: translated_x, y: translated_y})\n\n }\n\n}\n", "file_path": "src/tetromino.rs", "rank": 50, "score": 6.366269466383794 }, { "content": " );\n\n rect.draw(square, &Default::default(), transform, graphics);\n\n }\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_piece_rotated() {\n\n let original = Piece::create(&I);\n\n assert_eq!(original.rotation, 0);\n\n let mut rotated = original.rotated();\n\n assert_eq!(rotated.rotation, 1);\n\n assert_eq!(original.x, rotated.x);\n\n rotated = rotated.rotated();\n\n assert_eq!(rotated.rotation, 2);\n\n assert_eq!(original.x, rotated.x);\n", "file_path": "src/tetromino.rs", "rank": 51, "score": 6.012750756661611 }, { "content": "\n\npub enum Direction {\n\n Left,\n\n Right,\n\n Down\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub struct Tetromino {\n\n configurations: [Configuration; 4],\n\n color: Color\n\n}\n\nimpl Tetromino {\n\n fn get_configuration(&self, rotation: Rotation) -> &Configuration {\n\n &self.configurations[rotation]\n\n }\n\n}\n\n\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub struct Block {\n", "file_path": "src/tetromino.rs", "rank": 52, "score": 5.790668881671264 }, { "content": " SCOREBOARD,\n\n SCORE_CANVAS,\n\n LEVEL_CANVAS,\n\n LINES_CANVAS,\n\n SCORE,\n\n LEVEL,\n\n LINES,\n\n\n\n // Pause Menu\n\n PAUSE_OVERLAY,\n\n PAUSE_MENU,\n\n RESUME_CANVAS,\n\n RESUME_BUTTON,\n\n\n\n // Game Over Menu\n\n GAME_OVER_OVERLAY,\n\n GAME_OVER_MENU,\n\n NEW_GAME_BUTTON,\n\n RESTART_CANVAS,\n\n FINAL_SCORE_CANVAS,\n\n FINAL_SCORE_TEXT,\n\n\n\n // Next Piece IDs\n\n NEXT_PIECE\n\n}\n", "file_path": "src/ui.rs", "rank": 53, "score": 5.594401261163261 }, { "content": " fn test_get_level() {\n\n let mut stats = GameStats::new();\n\n assert_eq!(stats.get_level(), 1);\n\n stats.lines = 89;\n\n assert_eq!(stats.get_level(), 9);\n\n stats.lines = 150;\n\n assert_eq!(stats.get_level(), MAX_GAME_LEVEL);\n\n }\n\n}\n\n\n", "file_path": "src/stats.rs", "rank": 54, "score": 5.396052792092021 }, { "content": " [Block{x:1, y:0}, Block{x:0, y:1}, Block{x:1, y:1}, Block{x:2, y:1}],\n\n [Block{x:2, y:1}, Block{x:1, y:0}, Block{x:1, y:1}, Block{x:1, y:2}],\n\n [Block{x:1, y:2}, Block{x:2, y:1}, Block{x:1, y:1}, Block{x:0, y:1}],\n\n [Block{x:0, y:1}, Block{x:1, y:0}, Block{x:1, y:1}, Block{x:1, y:2}]\n\n ],\n\n color: PURPLE\n\n};\n\n\n\npub static Z: Tetromino = Tetromino {\n\n configurations: [\n\n [Block{x:0, y:0}, Block{x:1, y:0}, Block{x:1, y:1}, Block{x:2, y:1}],\n\n [Block{x:2, y:0}, Block{x:2, y:1}, Block{x:1, y:1}, Block{x:1, y:2}],\n\n [Block{x:2, y:2}, Block{x:1, y:2}, Block{x:1, y:1}, Block{x:0, y:1}],\n\n [Block{x:0, y:2}, Block{x:0, y:1}, Block{x:1, y:1}, Block{x:1, y:0}]\n\n ],\n\n color: RED\n\n};\n\n\n\npub type Configuration = [Block; 4];\n", "file_path": "src/tetromino.rs", "rank": 55, "score": 5.21037324512026 }, { "content": " [Block{x:1, y:0}, Block{x:1, y:1}, Block{x:2, y:0}, Block{x:2, y:1}],\n\n [Block{x:1, y:0}, Block{x:1, y:1}, Block{x:2, y:0}, Block{x:2, y:1}],\n\n [Block{x:1, y:0}, Block{x:1, y:1}, Block{x:2, y:0}, Block{x:2, y:1}],\n\n [Block{x:1, y:0}, Block{x:1, y:1}, Block{x:2, y:0}, Block{x:2, y:1}]\n\n ],\n\n color: YELLOW\n\n};\n\n\n\npub static S: Tetromino = Tetromino {\n\n configurations: [\n\n [Block{x:0, y:1}, Block{x:1, y:1}, Block{x:1, y:0}, Block{x:2, y:0}],\n\n [Block{x:1, y:0}, Block{x:1, y:1}, Block{x:2, y:1}, Block{x:2, y:2}],\n\n [Block{x:2, y:1}, Block{x:1, y:1}, Block{x:1, y:2}, Block{x:0, y:2}],\n\n [Block{x:1, y:2}, Block{x:1, y:1}, Block{x:0, y:1}, Block{x:0, y:0}]\n\n ],\n\n color: LIME\n\n};\n\n\n\npub static T: Tetromino = Tetromino {\n\n configurations: [\n", "file_path": "src/tetromino.rs", "rank": 56, "score": 5.1503717905333355 }, { "content": " [Block{x:0, y:0}, Block{x:0, y:1}, Block{x:1, y:1}, Block{x:2, y:1}],\n\n [Block{x:2, y:0}, Block{x:1, y:0}, Block{x:1, y:1}, Block{x:1, y:2}],\n\n [Block{x:2, y:2}, Block{x:2, y:1}, Block{x:1, y:1}, Block{x:0, y:1}],\n\n [Block{x:0, y:2}, Block{x:1, y:2}, Block{x:1, y:1}, Block{x:1, y:0}]\n\n ],\n\n color: BLUE\n\n};\n\n\n\npub static L: Tetromino = Tetromino {\n\n configurations: [\n\n [Block{x:2, y:0}, Block{x:2, y:1}, Block{x:1, y:1}, Block{x:0, y:1}],\n\n [Block{x:2, y:2}, Block{x:1, y:2}, Block{x:1, y:1}, Block{x:1, y:0}],\n\n [Block{x:0, y:2}, Block{x:0, y:1}, Block{x:1, y:1}, Block{x:2, y:1}],\n\n [Block{x:0, y:0}, Block{x:1, y:0}, Block{x:1, y:1}, Block{x:1, y:2}]\n\n ],\n\n color: ORANGE\n\n};\n\n\n\npub static O: Tetromino = Tetromino {\n\n configurations: [\n", "file_path": "src/tetromino.rs", "rank": 57, "score": 5.1503717905333355 }, { "content": "use piston_window::types::Color;\n\n\n\n// Lifted and modified from Conrod\n\nmacro_rules! make_color {\n\n ($r:expr, $g:expr, $b:expr) => (\n\n [$r as f32 / 255.0, $g as f32 / 255.0, $b as f32 / 255.0, 1.0]\n\n );\n\n ($r:expr, $g:expr, $b:expr, $a:expr) => (\n\n [$r as f32 / 255.0, $g as f32 / 255.0, $b as f32 / 255.0, $a as f32 / 255.0]\n\n );\n\n}\n\n\n\n// Tetromino colors\n\npub const CYAN: Color = make_color!(0x00, 0xff, 0xff);\n\npub const BLUE: Color = make_color!(0x34, 0x65, 0xA4);\n\npub const ORANGE: Color = make_color!(0xF5, 0x79, 0x00);\n\npub const YELLOW: Color = make_color!(0xED, 0xD4, 0x00);\n\npub const LIME: Color = make_color!(0x80, 0xFF, 0x00);\n\npub const PURPLE: Color = make_color!(0x75, 0x50, 0x7B);\n\npub const RED: Color = make_color!(0xCC, 0x00, 0x00);\n\n\n\n// Grid color\n\npub const GREY: Color = [0.15, 0.15, 0.15, 1.0];\n", "file_path": "src/colors.rs", "rank": 58, "score": 4.650176083387446 }, { "content": " {\n\n let stats = game.get_game_stats();\n\n Text::new(\n\n &format!(\"Final Score: {}\", stats.get_score().to_string()))\n\n .color(color::WHITE)\n\n .middle_of(FINAL_SCORE_CANVAS)\n\n .set(FINAL_SCORE_TEXT, ui);\n\n }\n\n\n\n Button::new()\n\n .label(\"New Game?\")\n\n .label_color(color::WHITE)\n\n .color(color::CHARCOAL)\n\n .middle_of(RESTART_CANVAS)\n\n .w_h(150.0, 30.0)\n\n .react(|| {\n\n game.reset();\n\n })\n\n .set(NEW_GAME_BUTTON, ui);\n\n}\n\n\n", "file_path": "src/ui.rs", "rank": 59, "score": 4.352555883657398 }, { "content": " self.history.push_back(ptype);\n\n if self.history.len() > 4 {\n\n self.history.pop_front();\n\n }\n\n debug_assert!(self.history.len() <= 4);\n\n }\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use tetromino::*;\n\n\n\n #[test]\n\n fn test_add_to_history_ensure_history_length() {\n\n let mut rand = Randomizer::new();\n\n assert_eq!(rand.history.len(), 4);\n\n rand.add_to_history(&L);\n\n let length = rand.history.len();\n\n assert_eq!(length, 4);\n\n assert_eq!(rand.history[length - 1], &L);\n\n }\n\n}\n", "file_path": "src/randomizer.rs", "rank": 60, "score": 4.182541475840722 }, { "content": " GameState::Playing => self.handle_playing_input(input),\n\n GameState::Paused => self.handle_paused_input(input),\n\n _ => {}\n\n }\n\n }\n\n\n\n pub fn on_update(&mut self, update_args: UpdateArgs) {\n\n match self.state {\n\n GameState::Playing => {\n\n self.time_since_moved += update_args.dt;\n\n let delay = self.drop_delay();\n\n if self.time_since_moved >= delay {\n\n self.time_since_moved -= delay;\n\n self.update();\n\n }\n\n },\n\n _ => {}\n\n }\n\n }\n\n\n", "file_path": "src/game.rs", "rank": 61, "score": 4.013564001706576 }, { "content": " assert_eq!(result.x, p.x - 1);\n\n }\n\n\n\n #[test]\n\n fn test_piece_moved_right() {\n\n let p = Piece::create(&Z);\n\n let result = p.moved(Direction::Right);\n\n assert_eq!(result.x, p.x + 1);\n\n }\n\n\n\n #[test]\n\n fn test_wall_kick_in_bounds() {\n\n let p = Piece::create(&I);\n\n let kicked_translation = p.wall_kick_translation();\n\n assert_eq!(kicked_translation, 0);\n\n }\n\n\n\n #[test]\n\n fn test_wall_kick_out_of_bounds_right() {\n\n let mut p = Piece::create(&I);\n", "file_path": "src/tetromino.rs", "rank": 62, "score": 3.823633180353399 }, { "content": " rotated = rotated.rotated();\n\n assert_eq!(rotated.rotation, 3);\n\n assert_eq!(original.x, rotated.x);\n\n rotated = rotated.rotated();\n\n assert_eq!(rotated.rotation, 0);\n\n assert_eq!(original.x, rotated.x);\n\n }\n\n\n\n #[test]\n\n fn test_piece_rotated_kicked() {\n\n let mut p = Piece::create(&I);\n\n p.x = 9;\n\n let rotated = p.rotated();\n\n assert_eq!(rotated.x, 7)\n\n }\n\n\n\n #[test]\n\n fn test_piece_moved_left() {\n\n let p = Piece::create(&Z);\n\n let result = p.moved(Direction::Left);\n", "file_path": "src/tetromino.rs", "rank": 63, "score": 3.7150609379315545 }, { "content": "# Rustris\n\n\n\nThis is my attempt at building a Tetris clone in Rust using the Piston game engine. The goal was to\n\ntry to emulate the rules of Standard Tetris as closely as possible. The scoring follows these\n\n[guidelines](http://tetris.wikia.com/wiki/Scoring#Recent_guideline_compatible_games), without the\n\nT-Spin rules. I also used [this guide](http://www.colinfahey.com/tetris/tetris.html) as a reference.\n\n\n\n![Rustris](/assets/images/rustris.png?raw=true)\n\n\n\n## Build Instructions\n\nCompiles with Rust 1.8.\n\n\n\nTo build and run the executable:\n\n\n\n cargo run --release\n\n\n\nTo build the executable:\n\n\n\n cargo build --release\n\n\n\nTo run the tests:\n\n\n\n cargo test\n\n\n\n## Key Bindings\n\n* Left and Right arrows move the tetromino left and right respectively\n\n* Up rotates the tetromino clockwise\n\n* Down increases the rate of decent of the tetromino (soft-drop)\n\n* Space snaps the piece immediately to the location of the ghost-piece (hard-drop)\n\n* P pauses the game\n\n* Escape quits\n\n\n", "file_path": "README.md", "rank": 64, "score": 3.6613360962440367 }, { "content": "\n\n #[test]\n\n fn test_score_soft_drop() {\n\n let mut stats = GameStats::new();\n\n stats.score_soft_drop();\n\n assert_eq!(stats.get_score(), 1);\n\n for _ in 0..20 {\n\n stats.score_soft_drop();\n\n }\n\n assert_eq!(stats.get_score(), 21);\n\n }\n\n\n\n #[test]\n\n fn test_score_hard_drop() {\n\n let mut stats = GameStats::new();\n\n stats.score_hard_drop(10);\n\n assert_eq!(stats.get_score(), 20);\n\n }\n\n\n\n #[test]\n", "file_path": "src/stats.rs", "rank": 65, "score": 3.478655731833223 }, { "content": " stats.score_completed_lines(1);\n\n assert_eq!(stats.get_score(), 100);\n\n assert_eq!(stats.get_level(), 1);\n\n assert_eq!(stats.get_lines(), 1);\n\n\n\n stats.score_completed_lines(2);\n\n assert_eq!(stats.get_score(), 400);\n\n assert_eq!(stats.get_level(), 1);\n\n assert_eq!(stats.get_lines(), 3);\n\n\n\n stats.score_completed_lines(3);\n\n assert_eq!(stats.get_score(), 900);\n\n assert_eq!(stats.get_level(), 1);\n\n assert_eq!(stats.get_lines(), 6);\n\n\n\n stats.score_completed_lines(4);\n\n assert_eq!(stats.get_score(), 1700);\n\n assert_eq!(stats.get_level(), 2);\n\n assert_eq!(stats.get_lines(), 10);\n\n }\n", "file_path": "src/stats.rs", "rank": 66, "score": 2.953360316767368 }, { "content": " .label(\"Resume\")\n\n .label_color(color::WHITE)\n\n .color(color::CHARCOAL)\n\n .middle_of(RESUME_CANVAS)\n\n .w_h(150.0, 30.0)\n\n .react(|| {\n\n game.set_game_state(GameState::Playing);\n\n })\n\n .set(RESUME_BUTTON, ui);\n\n}\n\n\n", "file_path": "src/ui.rs", "rank": 67, "score": 2.4707320854267585 }, { "content": " match event {\n\n Input(input) => {\n\n game.on_input(input);\n\n }\n\n Update(update_args) => {\n\n game.on_update(update_args);\n\n }\n\n Render(_) => {\n\n window.draw_2d(&event, |c, g| {\n\n clear([0.0, 0.0, 0.0, 1.0], g);\n\n ui.draw(c, g);\n\n if game.is_playing() {\n\n game.render(c, g);\n\n }\n\n });\n\n }\n\n _ => {}\n\n }\n\n }\n\n}\n", "file_path": "src/main.rs", "rank": 68, "score": 1.9313311808122702 } ]
Rust
src/text_input.rs
khyperia/scopie
af97b9e1286583c095f3e5f0c2665bdb326f8fe6
use crate::Result; use glutin::{self, event::VirtualKeyCode as Key}; use khygl::{render_text::TextRenderer, render_texture::TextureRenderer, Rect}; use std::{convert::TryInto, mem::replace}; pub struct TextInput { old_inputs: Vec<String>, old_input_index: usize, input_text: String, message: String, exec: bool, okay: bool, } impl TextInput { pub fn new() -> Self { Self { old_inputs: Vec::new(), old_input_index: 0, input_text: String::new(), message: String::new(), okay: true, exec: false, } } pub fn try_get_exec_cmd(&mut self) -> Option<String> { if self.exec { self.exec = false; let result = replace(&mut self.input_text, String::new()); if self.old_inputs.last() != Some(&result) { self.old_inputs.push(result.clone()); if self.old_inputs.len() > 100 { self.old_inputs.remove(0); } } self.old_input_index = self.old_inputs.len(); Some(result) } else { None } } pub fn set_exec_result(&mut self, message: String, okay: bool) { self.message = message; self.okay = okay; } pub fn key_down(&mut self, key: Key) { match key { Key::Back => { self.old_input_index = self.old_inputs.len(); self.input_text.pop(); } Key::Return => self.exec = true, Key::Up => { if self.old_input_index > 0 { self.old_input_index -= 1; } self.set_input_text(); } Key::Down => { self.old_input_index += 1; if self.old_input_index > self.old_inputs.len() { self.old_input_index = self.old_inputs.len(); } self.set_input_text(); } _ => (), } } fn set_input_text(&mut self) { self.input_text = self .old_inputs .get(self.old_input_index) .cloned() .unwrap_or_default(); } pub fn received_character(&mut self, ch: char) { if ch >= ' ' { self.old_input_index = self.old_inputs.len(); self.input_text.push(ch); } } pub fn render( &mut self, texture_renderer: &TextureRenderer, text_renderer: &mut TextRenderer, screen_size: (usize, usize), ) -> Result<usize> { let input_pos_y = screen_size.1 as isize - text_renderer.spacing as isize - 1; let input_pos_y = input_pos_y.try_into().unwrap_or(0); let input_pos = (10, input_pos_y); text_renderer.render( texture_renderer, &self.input_text, [1.0, 1.0, 1.0, 1.0], input_pos, screen_size, )?; let error_pos_y = screen_size.1 as isize - 2 * text_renderer.spacing as isize - 1; let error_pos_y = error_pos_y.try_into().unwrap_or(0); let error_pos = (10, error_pos_y); text_renderer.render( texture_renderer, &self.message, [1.0, 1.0, 1.0, 1.0], error_pos, screen_size, )?; let command_color = if self.okay { [0.5, 0.5, 0.5, 1.0] } else { [1.0, 0.5, 0.5, 1.0] }; texture_renderer.rect( Rect::new( input_pos.0, input_pos.1, (screen_size.0 as isize - input_pos.0 as isize * 2) .try_into() .unwrap_or(2), text_renderer.spacing, ), command_color, (screen_size.0 as f32, screen_size.1 as f32), )?; Ok(if self.message.is_empty() { input_pos_y } else { error_pos_y }) } }
use crate::Result; use glutin::{self, event::VirtualKeyCode as Key}; use khygl::{render_text::TextRenderer, render_texture::TextureRenderer, Rect}; use std::{convert::TryInto, mem::replace}; pub struct TextInput { old_inputs: Vec<String>, old_input_index: usize, input_text: String, message: String, exec: bool, okay: bool, } impl TextInput { pub fn new() -> Self { Self { old_inputs: Vec::new(), old_input_index: 0, input_text: String::new(), message: String::new(), okay: true, exec: false, } } pub fn try_get_exec_cmd(&mut self) -> Option<String> { if self.exec { self.exec = false; let result = replace(&mut self.input_text, String::new()); if self.old_inputs.last() != Some(&result) { self.old_inputs.push(result.clone()); if self.old_inputs.len() > 100 { self.old_inputs.remove(0); } } self.old_input_index = self.old_inputs.len(); Some(result) } else { None } } pub fn set_exec_result(&mut self, message: String, okay: bool) { self.message = message; self.okay = okay; } pub fn key_down(&mut self, key: Key) { match key { Key::Back => { self.old_input_index = self.old_inputs.len(); self.input_text.pop(); } Key::Return => self.exec = true, Key::Up => { if self.old_input_index > 0 { self.old_input_index -= 1; } self.set_input_text(); } Key::Down => { self.old_input_index += 1; if self.old_input_index > self.old_inputs.len() { self.old_input_index = self.old_inputs.len(); } self.set_input_text(); } _ => (), } } fn set_input_text(&mut self) { self.input_text = self .old_inputs .get(self.old_input_index) .cloned() .unwrap_or_default(); } pub fn received_character(&mut self, ch: char) { if ch >= ' ' { self.old_input_index = self.old_inputs.len(); self.input_text.push(ch); } }
}
pub fn render( &mut self, texture_renderer: &TextureRenderer, text_renderer: &mut TextRenderer, screen_size: (usize, usize), ) -> Result<usize> { let input_pos_y = screen_size.1 as isize - text_renderer.spacing as isize - 1; let input_pos_y = input_pos_y.try_into().unwrap_or(0); let input_pos = (10, input_pos_y); text_renderer.render( texture_renderer, &self.input_text, [1.0, 1.0, 1.0, 1.0], input_pos, screen_size, )?; let error_pos_y = screen_size.1 as isize - 2 * text_renderer.spacing as isize - 1; let error_pos_y = error_pos_y.try_into().unwrap_or(0); let error_pos = (10, error_pos_y); text_renderer.render( texture_renderer, &self.message, [1.0, 1.0, 1.0, 1.0], error_pos, screen_size, )?; let command_color = if self.okay { [0.5, 0.5, 0.5, 1.0] } else { [1.0, 0.5, 0.5, 1.0] }; texture_renderer.rect( Rect::new( input_pos.0, input_pos.1, (screen_size.0 as isize - input_pos.0 as isize * 2) .try_into() .unwrap_or(2), text_renderer.spacing, ), command_color, (screen_size.0 as f32, screen_size.1 as f32), )?; Ok(if self.message.is_empty() { input_pos_y } else { error_pos_y }) }
function_block-full_function
[ { "content": "pub fn autoconnect(live: bool) -> Result<Camera> {\n\n init_qhyccd_resource();\n\n let mut best = None;\n\n for id in 0..Camera::num_cameras() {\n\n let info = CameraInfo::new(id)?;\n\n let is163 = info.name.contains(\"163\");\n\n if best.is_none() || is163 {\n\n best = Some(info);\n\n if is163 {\n\n break;\n\n }\n\n }\n\n }\n\n if let Some(best) = best {\n\n Ok(best.open(live)?)\n\n } else {\n\n Err(\"No QHY cameras found\".into())\n\n }\n\n}\n\n\n", "file_path": "src/camera/interface.rs", "rank": 0, "score": 165962.91780491415 }, { "content": "pub fn autoconnect() -> Result<Mount> {\n\n for port in Mount::list() {\n\n let mut m = match Mount::new(&port) {\n\n Ok(ok) => ok,\n\n Err(_) => continue,\n\n };\n\n match m.get_ra_dec_mount() {\n\n Ok(_) => (),\n\n Err(_) => continue,\n\n };\n\n m.set_time(MountTime::now())?;\n\n return Ok(m);\n\n }\n\n Err(\"No mount found\".into())\n\n}\n\n\n\npub struct Mount {\n\n port: Box<dyn serialport::SerialPort>,\n\n radec_offset: (Angle, Angle),\n\n}\n", "file_path": "src/mount/interface.rs", "rank": 1, "score": 140132.67827074276 }, { "content": "pub fn mean(seq: impl IntoIterator<Item = f64>) -> f64 {\n\n let seq = seq.into_iter();\n\n let mut sum = 0.0;\n\n let mut count = 0;\n\n for item in seq {\n\n sum += item;\n\n count += 1;\n\n }\n\n sum / count as f64\n\n}\n\n\n", "file_path": "src/alg/mod.rs", "rank": 2, "score": 112630.58943090859 }, { "content": "fn main() -> Result<()> {\n\n let el = EventLoop::with_user_event();\n\n let wb = WindowBuilder::new()\n\n .with_title(\"clam5\")\n\n .with_inner_size(glutin::dpi::LogicalSize::new(800.0, 800.0));\n\n let windowed_context = ContextBuilder::new()\n\n .with_gl_profile(GlProfile::Core)\n\n .with_vsync(true)\n\n .build_windowed(wb, &el)?;\n\n\n\n let windowed_context = unsafe { windowed_context.make_current().map_err(|(_, e)| e)? };\n\n\n\n let initial_size = windowed_context.window().inner_size();\n\n\n\n gl::load_with(|symbol| windowed_context.get_proc_address(symbol) as *const _);\n\n\n\n if !gl::GetError::is_loaded() {\n\n return Err(\"glGetError not loaded\".into());\n\n }\n\n\n", "file_path": "src/main.rs", "rank": 3, "score": 111973.2865763978 }, { "content": "fn read_png(path: impl AsRef<Path>) -> Result<CpuTexture<u16>> {\n\n let mut decoder = png::Decoder::new(File::open(path)?);\n\n decoder.set_transformations(png::Transformations::IDENTITY);\n\n let (info, mut reader) = decoder.read_info()?;\n\n assert_eq!(info.bit_depth, png::BitDepth::Sixteen);\n\n assert_eq!(info.color_type, png::ColorType::Grayscale);\n\n let mut buf = vec![0; info.buffer_size()];\n\n reader.next_frame(&mut buf)?;\n\n let mut buf16 = vec![0; info.width as usize * info.height as usize];\n\n for i in 0..buf16.len() {\n\n buf16[i] = u16::from(buf[i * 2]) << 8 | u16::from(buf[i * 2 + 1]);\n\n }\n\n Ok(CpuTexture::new(\n\n buf16,\n\n (info.width as usize, info.height as usize),\n\n ))\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 4, "score": 110008.79566998155 }, { "content": "// Welford's online algorithm\n\npub fn mean_stdev(seq: impl IntoIterator<Item = f64>) -> (f64, f64) {\n\n let seq = seq.into_iter();\n\n let mut count = 0;\n\n let mut mean = 0.0;\n\n let mut M2 = 0.0;\n\n for item in seq {\n\n count += 1;\n\n let delta = item - mean;\n\n mean += delta / count as f64;\n\n let delta2 = item - mean;\n\n M2 += delta * delta2;\n\n }\n\n let variance = M2 / count as f64;\n\n (mean, variance.sqrt())\n\n}\n\n\n", "file_path": "src/alg/mod.rs", "rank": 5, "score": 105288.74478798451 }, { "content": "fn run_update(mount: &mut Mount, send: &SendUserUpdate) -> Result<bool> {\n\n let ra_dec_mount = mount.get_ra_dec_mount()?;\n\n let ra_dec_real = mount.mount_to_real(ra_dec_mount);\n\n let az_alt = mount.get_az_alt()?;\n\n let aligned = mount.aligned()?;\n\n let tracking_mode = mount.tracking_mode()?;\n\n let location = mount.location()?;\n\n let time = mount.time()?;\n\n let send_result = send.send_event(UserUpdate::MountUpdate(MountData {\n\n ra_dec_mount,\n\n ra_dec_real,\n\n az_alt,\n\n aligned,\n\n tracking_mode,\n\n location,\n\n time,\n\n }));\n\n match send_result {\n\n Ok(()) => Ok(true),\n\n Err(glutin::event_loop::EventLoopClosed(_)) => Ok(false),\n\n }\n\n}\n", "file_path": "src/mount/thread.rs", "rank": 6, "score": 105285.94749754264 }, { "content": "fn write_png(path: impl AsRef<Path>, img: &CpuTexture<u16>) -> Result<()> {\n\n let mut encoder = png::Encoder::new(File::create(path)?, img.size.0 as u32, img.size.1 as u32);\n\n encoder.set_color(png::ColorType::Grayscale);\n\n encoder.set_depth(png::BitDepth::Sixteen);\n\n let mut writer = encoder.write_header()?;\n\n let mut output = vec![0; img.size.0 * img.size.1 * 2];\n\n let data = img.data();\n\n for i in 0..(img.size.0 * img.size.1) {\n\n output[i * 2] = (data[i] >> 8) as u8;\n\n output[i * 2 + 1] = (data[i]) as u8;\n\n }\n\n writer.write_image_data(&output)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 7, "score": 105276.28281344588 }, { "content": "pub fn stdev(mean: f64, seq: impl IntoIterator<Item = f64>) -> f64 {\n\n let seq = seq.into_iter();\n\n let mut sum = 0.0;\n\n let mut count = 0;\n\n for item in seq {\n\n let diff = mean - item;\n\n sum += diff * diff;\n\n count += 1;\n\n }\n\n (sum / count as f64).sqrt()\n\n}\n\n\n", "file_path": "src/alg/mod.rs", "rank": 8, "score": 103348.47561187118 }, { "content": "pub fn platesolve(tex: &CpuTexture<u16>, send_user_update: SendUserUpdate) -> Result<()> {\n\n let linux_file_location = \"/tmp/image.png\";\n\n let (cmd, args) = if cfg!(windows) {\n\n let local_app_data = var(\"LOCALAPPDATA\")?;\n\n let mut windows_file_location = PathBuf::new();\n\n windows_file_location.push(&local_app_data);\n\n windows_file_location.push(\"cygwin_ansvr\");\n\n windows_file_location.push(\"tmp\");\n\n let ok = windows_file_location.is_dir();\n\n windows_file_location.push(\"image.png\");\n\n let mut bash_location = PathBuf::new();\n\n bash_location.push(&local_app_data);\n\n bash_location.push(\"cygwin_ansvr\");\n\n bash_location.push(\"bin\");\n\n bash_location.push(\"bash.exe\");\n\n\n\n if !ok {\n\n return Err(\"ANSVR not installed\".into());\n\n }\n\n\n", "file_path": "src/platesolve.rs", "rank": 9, "score": 102982.9644478139 }, { "content": "fn check(code: u32) -> ::std::result::Result<(), QhyError> {\n\n if code == 0 {\n\n Ok(())\n\n } else {\n\n Err(QhyError { code })\n\n }\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct ROIImage {\n\n pub image: CpuTexture<u16>,\n\n // the ROI, i.e. bounds of image\n\n pub location: Rect<usize>,\n\n // the original sensor size\n\n pub original: Rect<usize>,\n\n}\n\n\n\nimpl From<CpuTexture<u16>> for ROIImage {\n\n fn from(image: CpuTexture<u16>) -> ROIImage {\n\n let original = Rect::new(0, 0, image.size.0, image.size.1);\n\n ROIImage {\n\n image,\n\n location: original.clone(),\n\n original,\n\n }\n\n }\n\n}\n\n\n\nstatic INIT_QHYCCD_RESOURCE: Once = Once::new();\n\n\n", "file_path": "src/camera/interface.rs", "rank": 10, "score": 94637.2760110998 }, { "content": "pub fn floodfind<T: Copy>(\n\n img: &CpuTexture<T>,\n\n condition: impl Fn(T) -> bool,\n\n) -> Vec<Vec<(usize, usize)>> {\n\n let mut mask = CpuTexture::new_val(false, img.size);\n\n let mut results = Vec::new();\n\n for coord in img.iter_index() {\n\n if mask[coord] {\n\n continue;\n\n }\n\n if condition(img[coord]) {\n\n let result = floodfind_one(img, &condition, coord, &mut mask);\n\n results.push(result);\n\n }\n\n }\n\n results\n\n}\n", "file_path": "src/alg/mod.rs", "rank": 11, "score": 93318.91363144277 }, { "content": "pub fn median(seq: &mut [f64]) -> f64 {\n\n seq.sort_unstable_by(|l, r| l.partial_cmp(&r).unwrap());\n\n let idx = seq.len() / 2;\n\n if seq.len() % 2 == 0 {\n\n (seq[idx - 1] + seq[idx]) / 2.0\n\n } else {\n\n seq[idx]\n\n }\n\n}\n\n\n", "file_path": "src/alg/mod.rs", "rank": 12, "score": 82764.56768491628 }, { "content": "pub fn u16_to_f64(mut value: u16) -> f64 {\n\n let max_value = f64::from(u16::max_value());\n\n value as f64 / max_value\n\n}\n\n\n", "file_path": "src/alg/mod.rs", "rank": 13, "score": 80787.92102934176 }, { "content": "pub fn f64_to_u8(mut value: f64) -> u8 {\n\n let max_value = f64::from(u8::max_value());\n\n value *= max_value;\n\n if value >= max_value {\n\n u8::max_value()\n\n } else if value > 0.0 {\n\n value as u8\n\n } else {\n\n 0\n\n }\n\n}\n\n\n", "file_path": "src/alg/mod.rs", "rank": 14, "score": 80787.92102934176 }, { "content": "pub fn f64_to_u16(mut value: f64) -> u16 {\n\n let max_value = f64::from(u16::max_value());\n\n value *= max_value;\n\n if value >= max_value {\n\n u16::max_value()\n\n } else if value > 0.0 {\n\n value as u16\n\n } else {\n\n 0\n\n }\n\n}\n\n\n", "file_path": "src/alg/mod.rs", "rank": 15, "score": 80787.92102934176 }, { "content": "fn handle(control_flow: &mut ControlFlow, res: Result<()>) {\n\n match res {\n\n Ok(()) => (),\n\n Err(err) => {\n\n println!(\"{:?}\", err);\n\n *control_flow = ControlFlow::Exit;\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 16, "score": 80773.27646803153 }, { "content": "fn run(recv: mpsc::Receiver<CameraCommand>, send: SendUserUpdate) -> Result<()> {\n\n let mut camera = Some(camera::interface::autoconnect(false)?);\n\n let mut running = false;\n\n let mut exposure_duration = Duration::default();\n\n let mut cmd_status = String::new();\n\n let mut exposure_start = Instant::now();\n\n loop {\n\n let mut should_restart = false;\n\n let mut had_cmd = false;\n\n let mut had_bad_cmd = false;\n\n loop {\n\n match recv.try_recv() {\n\n Ok(cmd) => match run_one(&mut camera, cmd, &mut running, &mut should_restart) {\n\n Ok(()) => had_cmd = true,\n\n Err(err) => {\n\n had_bad_cmd = true;\n\n cmd_status = format!(\"{}\", err);\n\n }\n\n },\n\n Err(mpsc::TryRecvError::Empty) => break,\n", "file_path": "src/camera/thread.rs", "rank": 17, "score": 70018.28246037767 }, { "content": "fn run(recv: mpsc::Receiver<MountCommand>, send: SendUserUpdate) -> Result<()> {\n\n let mut mount = match autoconnect() {\n\n Ok(ok) => ok,\n\n Err(err) => {\n\n println!(\"Error connecting to mount: {}\", err);\n\n return Ok(());\n\n }\n\n };\n\n let update_rate = Duration::from_secs(1);\n\n let mut next_update = Instant::now() + update_rate;\n\n loop {\n\n let now = Instant::now();\n\n if now > next_update {\n\n next_update += update_rate;\n\n if now > next_update {\n\n // dropped frames\n\n next_update = now + update_rate;\n\n }\n\n if !run_update(&mut mount, &send)? {\n\n break Ok(());\n", "file_path": "src/mount/thread.rs", "rank": 18, "score": 70018.28246037767 }, { "content": "type MountCommand = Box<dyn FnOnce(&mut Mount) -> Result<()> + Send>;\n\n\n\n#[derive(Default, Clone, Debug)]\n\npub struct MountData {\n\n pub ra_dec_real: (Angle, Angle),\n\n pub ra_dec_mount: (Angle, Angle),\n\n pub az_alt: (Angle, Angle),\n\n pub aligned: bool,\n\n pub tracking_mode: TrackingMode,\n\n pub location: (Angle, Angle),\n\n pub time: MountTime,\n\n}\n\n\n\npub struct MountSendError {}\n\n\n\npub struct MountAsync {\n\n send: mpsc::Sender<MountCommand>,\n\n pub data: MountData,\n\n}\n\n\n", "file_path": "src/mount/thread.rs", "rank": 19, "score": 63837.16390101894 }, { "content": "fn main() {\n\n let target = env::var(\"TARGET\").expect(\"TARGET was not set\");\n\n let out_dir = env::var(\"CARGO_MANIFEST_DIR\").unwrap();\n\n println!(\"cargo:rustc-link-search=native={}\", out_dir);\n\n let files = [\n\n (\"lib/qhyccd/x64/ftd2xx.dll\", \"ftd2xx.dll\"),\n\n (\"lib/qhyccd/x64/msvcr90.dll\", \"msvcr90.dll\"),\n\n (\"lib/qhyccd/x64/qhyccd.dll\", \"qhyccd.dll\"),\n\n (\"lib/qhyccd/x64/qhyccd.lib\", \"qhyccd.lib\"),\n\n (\"lib/qhyccd/x64/tbb.dll\", \"tbb.dll\"),\n\n (\"lib/qhyccd/x64/winusb.dll\", \"winusb.dll\"),\n\n ];\n\n if target.contains(\"linux\") {\n\n println!(\"cargo:rustc-link-lib=usb-1.0\");\n\n println!(\"cargo:rustc-link-lib=static=stdc++\");\n\n }\n\n for (file, dest) in &files {\n\n println!(\"cargo:rerun-if-changed={}\", file);\n\n let mut path = PathBuf::new();\n\n path.push(&out_dir);\n\n path.push(dest);\n\n eprintln!(\"copying {:?} to {:?}\", file, path);\n\n copy(file, path).unwrap();\n\n }\n\n}\n", "file_path": "build.rs", "rank": 20, "score": 63511.53785366395 }, { "content": "pub fn find_stars(img: &CpuTexture<u16>, mean: f64, stdev: f64) -> Vec<Star> {\n\n let noise_sigma = 3.0;\n\n let floor = mean + stdev * noise_sigma;\n\n let floor_u16 = floor as u16;\n\n let min_size = 5;\n\n let max_size = 100;\n\n let mut stars = Vec::new();\n\n for star in floodfind(img, |v| v > floor_u16) {\n\n if star.len() > max_size {\n\n continue;\n\n }\n\n if star.len() < min_size {\n\n continue;\n\n }\n\n let mut sum = (0.0, 0.0);\n\n let mut total_flux_above = 0.0;\n\n for &pixel in &star {\n\n let flux_above = img[pixel] as f64 - floor;\n\n sum = (\n\n sum.0 + pixel.0 as f64 * flux_above,\n", "file_path": "src/alg/starfinder.rs", "rank": 21, "score": 63293.85386799673 }, { "content": "struct Display {\n\n camera_display: camera::display::CameraDisplay,\n\n mount_display: Option<mount::display::MountDisplay>,\n\n next_frequent_update: Instant,\n\n next_infrequent_update: Instant,\n\n window_size: (usize, usize),\n\n last_mouse: Option<PhysicalPosition<f64>>,\n\n pressed_mouse: HashSet<MouseButton>,\n\n status: String,\n\n old_status: String,\n\n text_input: text_input::TextInput,\n\n texture_renderer: TextureRenderer,\n\n text_renderer: TextRenderer,\n\n wasd_mount_mode: bool,\n\n wasd_camera_mode: bool,\n\n}\n\n\n\nimpl Display {\n\n fn window_size_f32(&self) -> (f32, f32) {\n\n (self.window_size.0 as f32, self.window_size.1 as f32)\n", "file_path": "src/main.rs", "rank": 22, "score": 62541.25572138303 }, { "content": "type Result<T> = std::result::Result<T, Box<dyn std::error::Error>>;\n\n\n\n#[derive(Debug)]\n\npub enum UserUpdate {\n\n MountUpdate(mount::thread::MountData),\n\n CameraUpdate(camera::thread::CameraData),\n\n CameraData(Arc<camera::interface::ROIImage>),\n\n SolveFinished(Angle, Angle),\n\n ProcessResult(alg::process::ProcessResult),\n\n}\n", "file_path": "src/main.rs", "rank": 23, "score": 60845.02613699982 }, { "content": "#[derive(Debug)]\n\nstruct QhyError {\n\n code: u32,\n\n}\n\n\n\nimpl fmt::Display for QhyError {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"QHY error: {}\", self.code as i32)\n\n }\n\n}\n\n\n\nimpl Error for QhyError {\n\n fn description(&self) -> &str {\n\n \"QHY error\"\n\n }\n\n}\n\n\n", "file_path": "src/camera/interface.rs", "rank": 24, "score": 59547.65363181314 }, { "content": "fn run_one(\n\n camera: &mut Option<camera::interface::Camera>,\n\n cmd: CameraCommand,\n\n running: &mut bool,\n\n restart: &mut bool,\n\n) -> Result<()> {\n\n match cmd {\n\n CameraCommand::SetControl(id, val) => {\n\n let camera = camera.as_mut().unwrap();\n\n cancel_for_modification(camera, running, restart)?;\n\n for control in camera.controls() {\n\n if control.id() == id {\n\n control.set(val)?;\n\n }\n\n }\n\n }\n\n CameraCommand::Start => {\n\n if !*running {\n\n *running = true;\n\n camera.as_mut().unwrap().start()?;\n", "file_path": "src/camera/thread.rs", "rank": 25, "score": 58863.87461231321 }, { "content": "fn cancel_for_modification(\n\n camera: &camera::interface::Camera,\n\n running: &mut bool,\n\n restart: &mut bool,\n\n) -> Result<()> {\n\n if *running && camera.use_live() {\n\n camera.stop()?;\n\n *restart = true;\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/camera/thread.rs", "rank": 26, "score": 58863.87461231321 }, { "content": "fn init_qhyccd_resource() {\n\n INIT_QHYCCD_RESOURCE.call_once(|| unsafe {\n\n check(qhy::InitQHYCCDResource()).expect(\"Failed to init QHY resources\")\n\n })\n\n}\n\n\n", "file_path": "src/camera/interface.rs", "rank": 27, "score": 57549.91294664411 }, { "content": "fn parse_regex() -> &'static Regex {\n\n unsafe {\n\n PARSE_ONCE.call_once(|| {\n\n PARSE_REGEX = Some(Regex::new(REGEX_STR).expect(\"parse_once regex is malformed\"))\n\n });\n\n PARSE_REGEX\n\n .as_ref()\n\n .expect(\"std::sync::Once didn't execute\")\n\n }\n\n}\n\n\n\n// note: missing filename at end, must append\n\nconst COMMAND: &[&str] = &[\n\n \"/usr/bin/solve-field\",\n\n \"-p\",\n\n \"-O\",\n\n \"-U\",\n\n \"none\",\n\n \"-B\",\n\n \"none\",\n", "file_path": "src/platesolve.rs", "rank": 28, "score": 49960.35504563691 }, { "content": "fn floodfind_one<T: Copy>(\n\n img: &CpuTexture<T>,\n\n condition: &impl Fn(T) -> bool,\n\n coord: (usize, usize),\n\n mask: &mut CpuTexture<bool>,\n\n) -> Vec<(usize, usize)> {\n\n let mut set = Vec::new();\n\n let mut next = Vec::new();\n\n mask[coord] = true;\n\n next.push(coord);\n\n set.push(coord);\n\n while let Some(coord) = next.pop() {\n\n let mut go = |dx, dy| {\n\n if let Some(coordnext) = offset(coord, (dx, dy), img.size) {\n\n if mask[coordnext] || !condition(img[coordnext]) {\n\n return;\n\n }\n\n mask[coordnext] = true;\n\n next.push(coordnext);\n\n set.push(coordnext);\n\n }\n\n };\n\n go(-1, 0);\n\n go(1, 0);\n\n go(0, -1);\n\n go(0, 1);\n\n }\n\n set\n\n}\n\n\n", "file_path": "src/alg/mod.rs", "rank": 29, "score": 48742.51121102687 }, { "content": "fn dms_parse_regex() -> &'static Regex {\n\n unsafe {\n\n DMS_PARSE_ONCE.call_once(|| {\n\n DMS_PARSE_REGEX =\n\n Some(Regex::new(DMS_PARSE_REGEX_STR).expect(\"dms_parse_once regex is malformed\"))\n\n });\n\n DMS_PARSE_REGEX\n\n .as_ref()\n\n .expect(\"std::sync::Once didn't execute\")\n\n }\n\n}\n\n\n\nstatic DMS_PARSE_ONCE: Once = Once::new();\n\nstatic mut DMS_PARSE_REGEX: Option<Regex> = None;\n\n\n\nimpl std::ops::Add for Angle {\n\n type Output = Self;\n\n fn add(self, rhs: Self) -> Self {\n\n Self::from_0to1(self.value_0to1() + rhs.value_0to1())\n\n }\n", "file_path": "src/dms.rs", "rank": 30, "score": 48742.51121102687 }, { "content": "fn mean(data: &[u16]) -> f64 {\n\n let mut sum = 0;\n\n for &datum in data {\n\n sum += u64::from(datum);\n\n }\n\n sum as f64 / data.len() as f64\n\n}\n\n\n", "file_path": "src/alg/process.rs", "rank": 31, "score": 46487.982989170065 }, { "content": "fn u16_to_f64(val: u16) -> f64 {\n\n val as f64 / f64::from(u16::max_value())\n\n}\n\n\n\nimpl ProcessResult {\n\n fn compute(image: &CpuTexture<u16>) -> Self {\n\n let begin = Instant::now();\n\n let mut sorted = image.data().to_vec();\n\n sorted.sort_unstable();\n\n let mean = mean(&sorted);\n\n let stdev = stdev(&sorted, mean);\n\n // let stars = find_stars(image, mean, stdev);\n\n let duration = Instant::now() - begin;\n\n Self {\n\n sorted,\n\n mean,\n\n stdev,\n\n duration,\n\n // stars,\n\n }\n", "file_path": "src/alg/process.rs", "rank": 32, "score": 45356.082428521666 }, { "content": "fn stdev(data: &[u16], mean: f64) -> f64 {\n\n let mut sum = 0.0;\n\n for &datum in data {\n\n let diff = mean - f64::from(datum);\n\n sum += diff * diff;\n\n }\n\n (sum / data.len() as f64).sqrt()\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct ProcessResult {\n\n sorted: Vec<u16>,\n\n mean: f64,\n\n stdev: f64,\n\n duration: Duration,\n\n // stars: Vec<Star>,\n\n}\n\n\n", "file_path": "src/alg/process.rs", "rank": 33, "score": 41232.686718521276 }, { "content": "fn remap_to_scale_offset(from_start: f64, from_end: f64, to_start: f64, to_end: f64) -> (f64, f64) {\n\n // lerp(to_start, to_end, invlerp(from_start, from_end, t))\n\n // invLerp(a, b, t) = (t - a) / (b - a)\n\n // lerp(a, b, t) = a + (b - a) * t\n\n // lerp(to_start, to_end, (t - from_start) / (from_end - from_start))\n\n // to_start + (to_end - to_start) * (t - from_start) / (from_end - from_start)\n\n // to_start + (to_end - to_start) * -from_start / (from_end - from_start) + (to_end - to_start) * t / (from_end - from_start)\n\n // to_start - from_start * (to_end - to_start) / (from_end - from_start) + t * (to_end - to_start) / (from_end - from_start)\n\n let slope = (to_end - to_start) / (from_end - from_start);\n\n let offset = to_start - from_start * slope;\n\n (slope, offset)\n\n}\n", "file_path": "src/image_display.rs", "rank": 34, "score": 31534.71840249805 }, { "content": "EXPORTC void STDCALL EnableQHYCCDMessage(bool enable);\n", "file_path": "lib/qhyccd/include/qhyccd.h", "rank": 35, "score": 30183.69333032419 }, { "content": " }\n\n _ => return Ok(false),\n\n }\n\n Ok(true)\n\n }\n\n\n\n fn camera_op(\n\n &mut self,\n\n op: impl FnOnce(&camera::thread::CameraAsync) -> std::result::Result<(), ()>,\n\n ) {\n\n let ok = match self.camera {\n\n Some(ref mut camera) => op(camera).is_ok(),\n\n None => true,\n\n };\n\n if !ok {\n\n self.camera = None;\n\n }\n\n }\n\n\n\n pub fn update(&mut self) -> bool {\n", "file_path": "src/camera/display.rs", "rank": 39, "score": 20.251513879860152 }, { "content": " pub fn info(&self) -> &CameraInfo {\n\n &self.info\n\n }\n\n\n\n pub fn use_live(&self) -> bool {\n\n self.use_live\n\n }\n\n\n\n pub fn effective_area(&self) -> Rect<usize> {\n\n self.effective_area.clone()\n\n }\n\n\n\n pub fn name(&self) -> &str {\n\n &self.info().name()\n\n }\n\n\n\n pub fn set_roi(&mut self, roi: Rect<usize>) -> Result<()> {\n\n self.current_roi = roi.clone();\n\n unsafe {\n\n Ok(check(qhy::SetQHYCCDResolution(\n", "file_path": "src/camera/interface.rs", "rank": 40, "score": 19.577521270223055 }, { "content": " // result.width < clamp.right() - result.x\n\n Rect::new(\n\n clampedx,\n\n clampedy,\n\n Self::clamp1(area.width, 1, clamp.right() - clampedx),\n\n Self::clamp1(area.height, 1, clamp.bottom() - clampedy),\n\n )\n\n }\n\n\n\n pub fn get_roi_unclamped(&self, reference: &Rect<usize>) -> Rect<isize> {\n\n let zerozero = self.tf_space((0.0, 0.0), reference);\n\n let oneone = self.tf_space((1.0, 1.0), reference);\n\n let size = (oneone.0 - zerozero.0, oneone.1 - zerozero.1);\n\n Rect::new(zerozero.0, zerozero.1, size.0, size.1)\n\n }\n\n\n\n #[allow(clippy::float_cmp)]\n\n pub fn update(&mut self) -> bool {\n\n let mut any_key = false;\n\n for (key, time) in &mut self.pressed_keys {\n", "file_path": "src/camera/display.rs", "rank": 41, "score": 18.700911075786678 }, { "content": "use crate::{dms::Angle, mount, Key, Result, UserUpdate};\n\nuse std::{collections::HashSet, fmt::Write};\n\n\n\npub struct MountDisplay {\n\n pub mount: mount::thread::MountAsync,\n\n pressed_keys: HashSet<Key>,\n\n slew_speed: u32,\n\n}\n\n\n\nimpl MountDisplay {\n\n pub fn new(mount: mount::thread::MountAsync) -> Self {\n\n Self {\n\n mount,\n\n pressed_keys: HashSet::new(),\n\n slew_speed: 1,\n\n }\n\n }\n\n\n\n pub fn cmd(\n\n &mut self,\n", "file_path": "src/mount/display.rs", "rank": 42, "score": 18.140807799195727 }, { "content": "use crate::{camera::interface::ROIImage, Result};\n\nuse khygl::{render_texture::TextureRenderer, texture::Texture, Rect};\n\nuse std::sync::Arc;\n\n\n\npub struct ImageDisplay {\n\n raw: Option<Arc<ROIImage>>,\n\n texture: Option<Texture<u16>>,\n\n displayer: TextureRenderer,\n\n pub scale_offset: (f32, f32),\n\n pub cross: bool,\n\n pub bin: bool,\n\n}\n\n\n\npub struct Mapping {\n\n pub scale: (f64, f64),\n\n pub offset: (f64, f64),\n\n}\n\n\n\nimpl ImageDisplay {\n\n pub fn new() -> Self {\n", "file_path": "src/image_display.rs", "rank": 43, "score": 17.669900258860544 }, { "content": " Err(mount::thread::MountSendError {}) => self.mount_display = None,\n\n }\n\n }\n\n if command_okay {\n\n Ok(())\n\n } else {\n\n Err(\"Unknown command\".into())\n\n }\n\n }\n\n\n\n fn try_run_cmd(&mut self, text: &str) {\n\n match self.run_cmd_impl(text) {\n\n Ok(()) => self.text_input.set_exec_result(String::new(), true),\n\n Err(err) => self.text_input.set_exec_result(format!(\"{}\", err), false),\n\n }\n\n }\n\n\n\n fn run_cmd(&mut self) {\n\n if let Some(cmd) = self.text_input.try_get_exec_cmd() {\n\n self.try_run_cmd(&cmd);\n", "file_path": "src/main.rs", "rank": 44, "score": 17.581962823834147 }, { "content": " ControlValue::new(self)\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug)]\n\npub struct ControlValue {\n\n pub id: ControlId,\n\n pub value: f64,\n\n pub min: f64,\n\n pub max: f64,\n\n pub step: f64,\n\n pub readonly: bool,\n\n pub interesting: bool,\n\n}\n\n\n\nimpl ControlValue {\n\n fn new(control: &Control) -> Self {\n\n Self {\n\n id: control.control,\n\n value: control.get(),\n", "file_path": "src/camera/interface.rs", "rank": 45, "score": 16.816792563945498 }, { "content": " .unwrap_or(1);\n\n let camera_rect = Rect::new(text_size.right(), 0, width, input_pos_y);\n\n self.camera_display\n\n .draw(camera_rect, &self.texture_renderer, window_size_f32)?;\n\n Ok(())\n\n }\n\n\n\n fn resize(&mut self, size: (usize, usize)) -> Result<()> {\n\n self.window_size = size;\n\n Ok(())\n\n }\n\n\n\n fn key_up(&mut self, key: Key) -> Result<()> {\n\n if self.wasd_mount_mode {\n\n if let Some(ref mut mount_display) = self.mount_display {\n\n match mount_display.key_up(key) {\n\n Ok(()) => (),\n\n Err(mount::thread::MountSendError {}) => self.mount_display = None,\n\n }\n\n }\n", "file_path": "src/main.rs", "rank": 46, "score": 16.568652259582795 }, { "content": " processor: process::Processor::new(send_user_update),\n\n roi_thing: ROIThing::new(),\n\n display_interesting: true,\n\n save: 0,\n\n folder: String::new(),\n\n solve_status: String::new(),\n\n cached_status: String::new(),\n\n }\n\n }\n\n\n\n pub fn cmd(&mut self, command: &[&str]) -> Result<bool> {\n\n if self.processor.cmd(command)? {\n\n return Ok(true);\n\n }\n\n match *command {\n\n [\"cross\"] => {\n\n self.image_display.cross = !self.image_display.cross;\n\n }\n\n [\"bin\"] => {\n\n self.image_display.bin = !self.image_display.bin;\n", "file_path": "src/camera/display.rs", "rank": 47, "score": 16.380458954762947 }, { "content": "\n\nimpl ROIThing {\n\n pub fn new() -> Self {\n\n Self {\n\n pressed_keys: HashMap::new(),\n\n position: (0.5, 0.5),\n\n zoom: 1.0,\n\n }\n\n }\n\n\n\n fn tf(&self, point: (f64, f64)) -> (f64, f64) {\n\n (\n\n (point.0 - 0.5) * self.zoom + self.position.0,\n\n (point.1 - 0.5) * self.zoom + self.position.1,\n\n )\n\n }\n\n\n\n fn tf_space(&self, point: (f64, f64), space: &Rect<usize>) -> (isize, isize) {\n\n let point = self.tf(point);\n\n (\n", "file_path": "src/camera/display.rs", "rank": 48, "score": 16.37459193990698 }, { "content": " Camera::open(self, live)\n\n }\n\n\n\n pub fn name(&self) -> &str {\n\n &self.name\n\n }\n\n}\n\n\n\npub struct Camera {\n\n handle: QHYCCD,\n\n info: CameraInfo,\n\n controls: Vec<Control>,\n\n use_live: bool,\n\n effective_area: Rect<usize>,\n\n current_roi: Rect<usize>,\n\n qhyccd_mem_length_u16: usize,\n\n}\n\n\n\nimpl Camera {\n\n pub fn num_cameras() -> u32 {\n", "file_path": "src/camera/interface.rs", "rank": 49, "score": 16.346998975564837 }, { "content": "#[derive(Clone)]\n\npub struct CameraInfo {\n\n name: String,\n\n}\n\n\n\nimpl CameraInfo {\n\n pub fn new(id: u32) -> Result<CameraInfo> {\n\n init_qhyccd_resource();\n\n let result = unsafe {\n\n let mut data = vec![0; 512];\n\n check(qhy::GetQHYCCDId(id, data.as_mut_ptr()))?;\n\n let name = str::from_utf8(&data[..data.iter().position(|&c| c == 0).unwrap()])\n\n .unwrap()\n\n .to_string();\n\n CameraInfo { name }\n\n };\n\n Ok(result)\n\n }\n\n\n\n pub fn open(self, live: bool) -> Result<Camera> {\n", "file_path": "src/camera/interface.rs", "rank": 50, "score": 16.009391663864335 }, { "content": " processor: process::Processor,\n\n roi_thing: ROIThing,\n\n display_interesting: bool,\n\n save: usize,\n\n folder: String,\n\n solve_status: String,\n\n cached_status: String,\n\n}\n\n\n\nimpl CameraDisplay {\n\n pub fn new(send_user_update: SendUserUpdate) -> Self {\n\n if let Ok(img) = crate::read_png(\"telescope.2019-11-21.19-39-54.png\") {\n\n send_user_update\n\n .send_event(UserUpdate::CameraData(std::sync::Arc::new(img.into())))\n\n .unwrap();\n\n }\n\n Self {\n\n camera: Some(camera::thread::CameraAsync::new(send_user_update.clone())),\n\n send_user_update: send_user_update.clone(),\n\n image_display: ImageDisplay::new(),\n", "file_path": "src/camera/display.rs", "rank": 51, "score": 15.808385233720868 }, { "content": "use crate::{dms::Angle, Result};\n\nuse std::{ffi::OsStr, fmt, fmt::Display, str, str::FromStr, time::Duration};\n\n\n\n#[derive(Clone, Debug)]\n\npub enum TrackingMode {\n\n Off,\n\n AltAz,\n\n Equatorial,\n\n SiderealPec,\n\n}\n\n\n\nimpl Default for TrackingMode {\n\n fn default() -> Self {\n\n TrackingMode::Off\n\n }\n\n}\n\n\n\nimpl Display for TrackingMode {\n\n fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {\n\n match *self {\n", "file_path": "src/mount/interface.rs", "rank": 52, "score": 15.621156757295154 }, { "content": "use regex::Regex;\n\nuse std::sync::Once;\n\n\n\n#[derive(Default, Clone, Copy, Debug)]\n\npub struct Angle {\n\n value: f64,\n\n}\n\n\n\nimpl Angle {\n\n pub fn from_0to1(value: f64) -> Self {\n\n Self {\n\n value: value.rem_euclid(1.0),\n\n }\n\n }\n\n\n\n pub fn value_0to1(self) -> f64 {\n\n self.value\n\n }\n\n\n\n pub fn from_u32(value: u32) -> Self {\n", "file_path": "src/dms.rs", "rank": 53, "score": 14.888155758585718 }, { "content": " // TODO: Cache new_binning()\n\n Self {\n\n raw: None,\n\n texture: None,\n\n displayer: TextureRenderer::new_binning()\n\n .expect(\"failed to build binning texture renderer\"),\n\n scale_offset: (1.0, 0.0),\n\n cross: false,\n\n bin: true,\n\n }\n\n }\n\n\n\n pub fn raw(&self) -> &Option<Arc<ROIImage>> {\n\n &self.raw\n\n }\n\n\n\n pub fn set_raw(&mut self, raw: Arc<ROIImage>) -> Result<()> {\n\n let create = self\n\n .texture\n\n .as_ref()\n", "file_path": "src/image_display.rs", "rank": 54, "score": 14.214647782212861 }, { "content": "pub struct CameraAsync {\n\n send: mpsc::Sender<CameraCommand>,\n\n pub data: CameraData,\n\n}\n\n\n\nimpl CameraAsync {\n\n pub fn new(send_user_update: SendUserUpdate) -> Self {\n\n let (send_cmd, recv_cmd) = mpsc::channel();\n\n spawn(move || match run(recv_cmd, send_user_update) {\n\n Ok(()) => (),\n\n Err(err) => println!(\"Camera thread error: {}\", err),\n\n });\n\n Self {\n\n send: send_cmd,\n\n data: CameraData {\n\n controls: Vec::new(),\n\n name: String::new(),\n\n cmd_status: String::new(),\n\n running: false,\n\n is_live: false,\n", "file_path": "src/camera/thread.rs", "rank": 55, "score": 13.844915658425435 }, { "content": " (point.0 * space.width as f64 + space.x as f64) as isize,\n\n (point.1 * space.height as f64 + space.y as f64) as isize,\n\n )\n\n }\n\n\n\n fn clamp1(val: isize, low: usize, high: usize) -> usize {\n\n if val < low as isize {\n\n low\n\n } else if val >= high as isize {\n\n high - 1\n\n } else {\n\n val as usize\n\n }\n\n }\n\n\n\n pub fn clamp(area: Rect<isize>, clamp: &Rect<usize>) -> Rect<usize> {\n\n let clampedx = Self::clamp1(area.x, clamp.x, clamp.right() - 1);\n\n let clampedy = Self::clamp1(area.y, clamp.y, clamp.bottom() - 1);\n\n // result.right() < clamp.right()\n\n // result.x + result.width < clamp.right()\n", "file_path": "src/camera/display.rs", "rank": 56, "score": 13.72731258255703 }, { "content": " }\n\n\n\n pub fn start(&self) -> Result<()> {\n\n if self.use_live {\n\n self.start_live()\n\n } else {\n\n self.start_single()\n\n }\n\n }\n\n\n\n pub fn stop(&self) -> Result<()> {\n\n if self.use_live {\n\n self.stop_live()\n\n } else {\n\n self.stop_single()\n\n }\n\n }\n\n}\n\n\n\nimpl Drop for Camera {\n", "file_path": "src/camera/interface.rs", "rank": 57, "score": 13.565146242131286 }, { "content": " ))?;\n\n let current_roi = Rect::new(x as usize, y as usize, width as usize, height as usize);\n\n\n\n let controls = Self::get_controls(handle);\n\n\n\n let len_u8 = qhy::GetQHYCCDMemLength(handle) as usize;\n\n let len_u16 = len_u8 / 2;\n\n\n\n Ok(Camera {\n\n handle,\n\n info,\n\n controls,\n\n use_live,\n\n effective_area: current_roi.clone(),\n\n current_roi,\n\n qhyccd_mem_length_u16: len_u16,\n\n })\n\n }\n\n }\n\n\n", "file_path": "src/camera/interface.rs", "rank": 58, "score": 13.276104941021167 }, { "content": "\n\nimpl Processor {\n\n pub fn new(send_user_update: SendUserUpdate) -> Self {\n\n let (send, recv) = mpsc::sync_channel::<Arc<ROIImage>>(1);\n\n spawn(move || {\n\n while let Ok(img) = recv.recv() {\n\n let result = UserUpdate::ProcessResult(ProcessResult::compute(&img.image));\n\n if send_user_update.send_event(result).is_err() {\n\n break;\n\n }\n\n }\n\n });\n\n Self {\n\n send,\n\n process_result: None,\n\n processor_type: ProcessorType::Median,\n\n\n\n clip: 0.01,\n\n median_location: 0.2,\n\n\n", "file_path": "src/alg/process.rs", "rank": 59, "score": 13.024537639716032 }, { "content": " &mut self.median_location,\n\n true,\n\n )\n\n || parse(key, value, \"sigma\", &mut self.sigma, false)\n\n || parse(key, value, \"mean_location\", &mut self.mean_location, true)\n\n || parse(key, value, \"scale\", &mut self.scale, false)\n\n || parse(key, value, \"offset\", &mut self.offset, false);\n\n return Ok(ok);\n\n }\n\n _ => return Ok(false),\n\n }\n\n Ok(true)\n\n }\n\n\n\n pub fn status(&self, status: &mut String) -> Result<()> {\n\n match self.processor_type {\n\n ProcessorType::Median => {\n\n writeln!(status, \"process: median (mean, linear)\")?;\n\n writeln!(\n\n status,\n", "file_path": "src/alg/process.rs", "rank": 60, "score": 12.978382153861654 }, { "content": " }\n\n\n\n fn run_cmd_impl(&mut self, text: &str) -> Result<()> {\n\n let cmd = text.split_whitespace().collect::<Vec<_>>();\n\n let mut command_okay = cmd.is_empty();\n\n match &cmd as &[&str] {\n\n [\"wasd\"] if self.mount_display.is_some() => {\n\n self.wasd_mount_mode = true;\n\n command_okay |= true;\n\n }\n\n [\"zoom\"] => {\n\n self.wasd_camera_mode = true;\n\n command_okay |= true;\n\n }\n\n _ => (),\n\n }\n\n command_okay |= self.camera_display.cmd(&cmd)?;\n\n if let Some(ref mut mount_display) = self.mount_display {\n\n match mount_display.cmd(&cmd) {\n\n Ok(ok) => command_okay |= ok,\n", "file_path": "src/main.rs", "rank": 61, "score": 12.929498178321696 }, { "content": " }\n\n\n\n fn rect_from_space(object: Rect<f64>, space: Rect<f64>) -> Rect<f64> {\n\n Rect::new(\n\n (object.x - space.x) / space.width,\n\n (object.y - space.y) / space.height,\n\n object.width / space.width,\n\n object.height / space.height,\n\n )\n\n }\n\n\n\n fn space_transform(object: Rect<f64>, from: Rect<f64>, to: Rect<f64>) -> Rect<f64> {\n\n Self::rect_into_space(Self::rect_from_space(object, from), to)\n\n }\n\n\n\n pub fn draw(\n\n &mut self,\n\n pos: Rect<usize>,\n\n displayer: &TextureRenderer,\n\n screen_size: (f32, f32),\n", "file_path": "src/image_display.rs", "rank": 62, "score": 12.720350822526445 }, { "content": " sigma: 3.0,\n\n mean_location: 0.2,\n\n\n\n scale: 1.0,\n\n offset: 0.0,\n\n }\n\n }\n\n\n\n // true if ok, false if dropped frame\n\n pub fn process(&self, image: Arc<ROIImage>) -> Result<bool> {\n\n match self.send.try_send(image) {\n\n Ok(()) => Ok(true),\n\n Err(mpsc::TrySendError::Full(_)) => Ok(false),\n\n Err(mpsc::TrySendError::Disconnected(_)) => {\n\n Err(\"Processing thread disconnected\".into())\n\n }\n\n }\n\n }\n\n\n\n pub fn cmd(&mut self, command: &[&str]) -> Result<bool> {\n", "file_path": "src/alg/process.rs", "rank": 63, "score": 12.598361367632114 }, { "content": " }\n\n\n\n pub fn to_hms(self) -> (bool, u32, u32, u32, f64) {\n\n Self::value_to_xms(self.hours())\n\n }\n\n\n\n pub fn fmt_degrees(self) -> String {\n\n let (sign, degrees, minutes, seconds, _) = self.to_dms();\n\n let sign = if sign { \"-\" } else { \"\" };\n\n format!(\"{}{}°{}′{}″\", sign, degrees, minutes, seconds)\n\n }\n\n\n\n pub fn fmt_hours(self) -> String {\n\n let (sign, degrees, minutes, seconds, _) = self.to_hms();\n\n let sign = if sign { \"-\" } else { \"\" };\n\n format!(\"{}{}h{}′{}″\", sign, degrees, minutes, seconds)\n\n }\n\n\n\n pub fn parse(val: &str) -> Option<Self> {\n\n let capture = match dms_parse_regex().captures(val) {\n", "file_path": "src/dms.rs", "rank": 64, "score": 12.016379056860513 }, { "content": "\n\nimpl Mount {\n\n pub fn list() -> Vec<String> {\n\n serialport::available_ports()\n\n .unwrap_or_else(|_| Vec::new())\n\n .into_iter()\n\n .map(|x| x.port_name)\n\n .collect()\n\n }\n\n\n\n pub fn new<T: AsRef<OsStr> + ?Sized>(path: &T) -> Result<Mount> {\n\n let mut port = serialport::open(path)?;\n\n port.set_timeout(Duration::from_secs(3))?;\n\n Ok(Mount {\n\n port,\n\n radec_offset: (Angle::from_0to1(0.0), Angle::from_0to1(0.0)),\n\n })\n\n }\n\n\n\n fn read(&mut self, mut data: impl AsMut<[u8]>) -> Result<()> {\n", "file_path": "src/mount/interface.rs", "rank": 65, "score": 11.909279094108165 }, { "content": "use super::floodfind;\n\nuse khygl::texture::CpuTexture;\n\n\n\n/*\n\nAlternate algorithm:\n\n\n\n1) Find brightest pixel\n\n2) Flood fill out to brightest neighbor, until certain number of pixels in size, or threshhold is reached (median of surroundings?)\n\n3) Mask out star, goto 1, until N stars are found\n\n*/\n\n\n\n#[derive(Debug)]\n\npub struct Star {\n\n pub x: f64,\n\n pub y: f64,\n\n // flux isn't *totally* correct, since the fringes of the star are cut off and not added\n\n pub flux: f64,\n\n pub hfr: f64,\n\n}\n\n\n\nimpl Star {\n\n pub fn new(x: f64, y: f64, flux: f64, hfr: f64) -> Self {\n\n Self { x, y, flux, hfr }\n\n }\n\n}\n\n\n", "file_path": "src/alg/starfinder.rs", "rank": 67, "score": 11.440585854542565 }, { "content": "impl FromStr for ControlId {\n\n type Err = ();\n\n\n\n fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {\n\n for &control in VALUES {\n\n if control.to_str().eq_ignore_ascii_case(s) {\n\n return Ok(control);\n\n }\n\n }\n\n Err(())\n\n }\n\n}\n\n\n\nimpl fmt::Display for ControlId {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}\", self.to_str())\n\n }\n\n}\n\n\n\npub const EXPOSURE_FACTOR: f64 = 1_000_000.0;\n", "file_path": "src/camera/qhycamera.rs", "rank": 68, "score": 11.330589988513667 }, { "content": " &self,\n\n cmd: impl FnOnce(&mut Mount) -> Result<()> + Send + 'static,\n\n ) -> std::result::Result<(), MountSendError> {\n\n match self.send.send(Box::new(cmd)) {\n\n Ok(()) => Ok(()),\n\n Err(mpsc::SendError(_)) => Err(MountSendError {}),\n\n }\n\n }\n\n\n\n pub fn slew_real(&self, ra: Angle, dec: Angle) -> std::result::Result<(), MountSendError> {\n\n self.send(move |mount| mount.slew_ra_dec_real(ra, dec))\n\n }\n\n pub fn sync_real(&self, ra: Angle, dec: Angle) -> std::result::Result<(), MountSendError> {\n\n self.send(move |mount| mount.sync_ra_dec_real(ra, dec))\n\n }\n\n pub fn set_real_to_mount(\n\n &self,\n\n ra: Angle,\n\n dec: Angle,\n\n ) -> std::result::Result<(), MountSendError> {\n", "file_path": "src/mount/thread.rs", "rank": 69, "score": 11.296718124872733 }, { "content": "use crate::{dms::Angle, Result, SendUserUpdate, UserUpdate};\n\nuse khygl::texture::CpuTexture;\n\nuse regex::Regex;\n\nuse std::{env::var, ffi::OsString, path::PathBuf, process::Command, sync::Once, thread};\n\n\n\nstatic PARSE_ONCE: Once = Once::new();\n\nstatic mut PARSE_REGEX: Option<Regex> = None;\n\nstatic REGEX_STR: &str = r\"RA,Dec = \\((\\d+\\.?\\d*),(\\d+\\.?\\d*)\\)\";\n", "file_path": "src/platesolve.rs", "rank": 70, "score": 11.188836533241197 }, { "content": "use crate::{\n\n camera::{\n\n qhycamera as qhy,\n\n qhycamera::{ControlId, QHYCCD},\n\n },\n\n Result,\n\n};\n\nuse khygl::{texture::CpuTexture, Rect};\n\nuse std::{error::Error, ffi::CString, fmt, str, sync::Once};\n\n\n\n#[derive(Debug)]\n", "file_path": "src/camera/interface.rs", "rank": 71, "score": 11.158024417944638 }, { "content": " if self.mount_display.is_none() {\n\n self.wasd_mount_mode = false;\n\n }\n\n } else if self.wasd_camera_mode {\n\n self.camera_display.key_up(key);\n\n }\n\n Ok(())\n\n }\n\n\n\n fn key_down(&mut self, key: Key) -> Result<()> {\n\n if self.wasd_mount_mode {\n\n if key == Key::Escape {\n\n self.wasd_mount_mode = false;\n\n } else if let Some(ref mut mount_display) = self.mount_display {\n\n match mount_display.key_down(key) {\n\n Ok(()) => (),\n\n Err(mount::thread::MountSendError {}) => self.mount_display = None,\n\n }\n\n if self.mount_display.is_none() {\n\n self.wasd_mount_mode = false;\n", "file_path": "src/main.rs", "rank": 72, "score": 10.870484854289643 }, { "content": " }\n\n }\n\n\n\n fn setup(\n\n window_size: (usize, usize),\n\n scale_factor: f64,\n\n send_user_update: SendUserUpdate,\n\n ) -> Result<Self> {\n\n let texture_renderer = TextureRenderer::new()?;\n\n let height = 20.0 * scale_factor as f32;\n\n let text_renderer = TextRenderer::new(height)?;\n\n let send_user_update_2 = send_user_update.clone();\n\n let camera_display = CameraDisplay::new(send_user_update_2);\n\n let mount_display = Some(MountDisplay::new(MountAsync::new(send_user_update)));\n\n let text_input = text_input::TextInput::new();\n\n Ok(Self {\n\n camera_display,\n\n mount_display,\n\n next_frequent_update: Instant::now(),\n\n next_infrequent_update: Instant::now(),\n", "file_path": "src/main.rs", "rank": 73, "score": 10.621207865283377 }, { "content": " fn parse(key: &str, value: &str, name: &str, data: &mut f64, perc: bool) -> bool {\n\n if key == name {\n\n if let Ok(v) = value.parse::<f64>() {\n\n *data = if perc { v / 100.0 } else { v };\n\n return true;\n\n } else {\n\n }\n\n }\n\n false\n\n }\n\n match *command {\n\n [\"median\"] => self.processor_type = ProcessorType::Median,\n\n [\"mean\"] => self.processor_type = ProcessorType::Mean,\n\n [\"linear\"] => self.processor_type = ProcessorType::Linear,\n\n [key, value] => {\n\n let ok = parse(key, value, \"clip\", &mut self.clip, true)\n\n || parse(\n\n key,\n\n value,\n\n \"median_location\",\n", "file_path": "src/alg/process.rs", "rank": 74, "score": 10.601770293018236 }, { "content": " }\n\n }\n\n}\n\n\n\n#[derive(Default, Clone, Debug)]\n\npub struct MountTime {\n\n hour: u8,\n\n minute: u8,\n\n second: u8,\n\n month: u8,\n\n day: u8,\n\n year: u8, // current year - 2000\n\n time_zone_offset: i8, // hours\n\n dst: bool,\n\n}\n\n\n\nimpl MountTime {\n\n pub fn now() -> Self {\n\n //let tm = time::now();\n\n let tm = time::OffsetDateTime::now_local();\n", "file_path": "src/mount/interface.rs", "rank": 75, "score": 10.579910252169666 }, { "content": " //println!(\"Dropped processing frame\");\n\n }\n\n }\n\n UserUpdate::ProcessResult(process_result) => self.processor.user_update(process_result),\n\n user_update => {\n\n if let Some(ref mut camera) = self.camera {\n\n camera.user_update(user_update);\n\n }\n\n }\n\n }\n\n Ok(())\n\n }\n\n\n\n #[allow(clippy::float_cmp)]\n\n pub fn key_down(&mut self, key: Key) {\n\n if key == Key::G {\n\n self.roi_thing.update();\n\n if self.roi_thing.zoom == 1.0 {\n\n self.camera_op(|c| c.set_roi(None));\n\n } else if let Some(raw) = self.image_display.raw() {\n", "file_path": "src/camera/display.rs", "rank": 76, "score": 10.434192246722734 }, { "content": "\n\n pub fn toggle_live(&self) -> std::result::Result<(), ()> {\n\n self.send.send(CameraCommand::ToggleLive).map_err(|_| ())\n\n }\n\n\n\n pub fn set_roi(&self, roi: Option<Rect<usize>>) -> std::result::Result<(), ()> {\n\n self.send.send(CameraCommand::SetROI(roi)).map_err(|_| ())\n\n }\n\n\n\n pub fn user_update(&mut self, user_update: UserUpdate) {\n\n if let UserUpdate::CameraUpdate(data) = user_update {\n\n self.data = data;\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/camera/thread.rs", "rank": 77, "score": 10.41236940495662 }, { "content": " window_size,\n\n last_mouse: None,\n\n pressed_mouse: HashSet::new(),\n\n status: String::new(),\n\n old_status: String::new(),\n\n text_input,\n\n texture_renderer,\n\n text_renderer,\n\n wasd_mount_mode: false,\n\n wasd_camera_mode: false,\n\n })\n\n }\n\n\n\n // (wait until, redraw now)\n\n fn update(&mut self) -> Result<(Instant, bool)> {\n\n let mut redraw = false;\n\n let now = Instant::now();\n\n let frequent_update_rate = Duration::from_millis(50);\n\n let is_next_frequent_update = now >= self.next_frequent_update;\n\n if is_next_frequent_update {\n", "file_path": "src/main.rs", "rank": 78, "score": 10.385913289460795 }, { "content": " }\n\n\n\n pub fn to_dms(self) -> (bool, u32, u32, u32, f64) {\n\n Self::value_to_xms(self.degrees())\n\n }\n\n\n\n pub fn from_hms(\n\n is_negative: bool,\n\n hours: f64,\n\n minutes: f64,\n\n seconds: f64,\n\n remainder_seconds: f64,\n\n ) -> Self {\n\n Self::from_hours(Self::merge_xms(\n\n is_negative,\n\n hours,\n\n minutes,\n\n seconds,\n\n remainder_seconds,\n\n ))\n", "file_path": "src/dms.rs", "rank": 79, "score": 10.170588960778677 }, { "content": " Ok(Self::parse_time(response))\n\n }\n\n\n\n pub fn set_time(&mut self, time: MountTime) -> Result<()> {\n\n let to_write = Self::format_time(b'H', time);\n\n self.interact0(to_write)?;\n\n Ok(())\n\n }\n\n\n\n pub fn aligned(&mut self) -> Result<bool> {\n\n self.write([b'J'])?;\n\n let mut response = [0];\n\n self.read(&mut response)?;\n\n Ok(response[0] != 0)\n\n }\n\n\n\n fn fixed_slew_command(&mut self, one: u8, two: u8, three: u8, rate: u8) -> Result<()> {\n\n let cmd = [b'P', one, two, three, rate, 0, 0, 0];\n\n self.interact0(cmd)?;\n\n Ok(())\n", "file_path": "src/mount/interface.rs", "rank": 80, "score": 9.715215224717486 }, { "content": "impl MountAsync {\n\n pub fn new(send_user_update: SendUserUpdate) -> Self {\n\n let (send_cmd, recv_cmd) = mpsc::channel();\n\n spawn(move || match run(recv_cmd, send_user_update) {\n\n Ok(()) => (),\n\n Err(err) => panic!(\"Mount thread error: {}\", err),\n\n });\n\n Self {\n\n send: send_cmd,\n\n data: MountData::default(),\n\n }\n\n }\n\n\n\n pub fn user_update(&mut self, user_update: UserUpdate) {\n\n if let UserUpdate::MountUpdate(mount_update) = user_update {\n\n self.data = mount_update;\n\n }\n\n }\n\n\n\n fn send(\n", "file_path": "src/mount/thread.rs", "rank": 81, "score": 9.518722099892548 }, { "content": " &mut height,\n\n &mut bpp,\n\n &mut channels,\n\n data.as_mut_ptr() as _,\n\n );\n\n if res != 0 {\n\n // function will fail if image isn't ready yet\n\n None\n\n } else {\n\n assert_eq!(bpp, 16);\n\n assert_eq!(channels, 1);\n\n assert_eq!(width as usize, self.current_roi.width);\n\n assert_eq!(height as usize, self.current_roi.height);\n\n Some(ROIImage {\n\n image: CpuTexture::new(data, (width as usize, height as usize)),\n\n location: self.current_roi.clone(),\n\n original: self.effective_area.clone(),\n\n })\n\n }\n\n }\n", "file_path": "src/camera/interface.rs", "rank": 82, "score": 9.484732853455442 }, { "content": " Key::D => self.mount.fixed_slew_ra(self.slew_speed as i32)?,\n\n Key::A => self.mount.fixed_slew_ra(-(self.slew_speed as i32))?,\n\n Key::W => self.mount.fixed_slew_dec(self.slew_speed as i32)?,\n\n Key::S => self.mount.fixed_slew_dec(-(self.slew_speed as i32))?,\n\n Key::R => self.slew_speed = (self.slew_speed + 1).min(9),\n\n Key::F => self.slew_speed = (self.slew_speed - 1).max(1),\n\n _ => (),\n\n }\n\n Ok(())\n\n }\n\n\n\n pub fn key_up(&mut self, key: Key) -> std::result::Result<(), mount::thread::MountSendError> {\n\n if !self.pressed_keys.remove(&key) {\n\n return Ok(());\n\n }\n\n match key {\n\n Key::D | Key::A => self.mount.fixed_slew_ra(0)?,\n\n Key::W | Key::S => self.mount.fixed_slew_dec(0)?,\n\n _ => (),\n\n }\n\n Ok(())\n\n }\n\n\n\n pub fn user_update(&mut self, user_update: UserUpdate) {\n\n self.mount.user_update(user_update)\n\n }\n\n}\n", "file_path": "src/mount/display.rs", "rank": 83, "score": 9.41165057715271 }, { "content": " is_live: camera.use_live(),\n\n exposure_start,\n\n exposure_duration,\n\n effective_area: Some(camera.effective_area()),\n\n };\n\n\n\n match send.send_event(UserUpdate::CameraUpdate(data)) {\n\n Ok(()) => (),\n\n Err(_) => return Ok(()),\n\n }\n\n if running {\n\n if camera.use_live() {\n\n loop {\n\n match camera.get_live() {\n\n Some(frame) => {\n\n send.send_event(UserUpdate::CameraData(Arc::new(frame)))?;\n\n break;\n\n }\n\n None => {\n\n let limit = Duration::from_millis(10);\n", "file_path": "src/camera/thread.rs", "rank": 84, "score": 9.072515202949825 }, { "content": "use crate::{camera, camera::qhycamera::ControlId, Result, SendUserUpdate, UserUpdate};\n\nuse khygl::Rect;\n\nuse std::{\n\n sync::{mpsc, Arc},\n\n thread::spawn,\n\n time::{Duration, Instant},\n\n};\n\n\n", "file_path": "src/camera/thread.rs", "rank": 85, "score": 8.948135977984837 }, { "content": " lat.fmt_degrees(),\n\n lon.fmt_degrees()\n\n )?;\n\n writeln!(status, \"time: {}\", data.time)?;\n\n writeln!(status, \"slew speed: {}\", self.slew_speed)?;\n\n writeln!(status, \"syncpos [ra] [dec]\")?;\n\n writeln!(status, \"slew [ra] [dec]\")?;\n\n writeln!(status, \"azaltslew [az] [alt]\")?;\n\n writeln!(status, \"cancel\")?;\n\n writeln!(status, \"mode [Off|AltAz|Equatorial|SiderealPec]\")?;\n\n writeln!(status, \"location [lat] [lon]\")?;\n\n writeln!(status, \"time now\")?;\n\n Ok(())\n\n }\n\n\n\n pub fn key_down(&mut self, key: Key) -> std::result::Result<(), mount::thread::MountSendError> {\n\n if !self.pressed_keys.insert(key) {\n\n return Ok(());\n\n }\n\n match key {\n", "file_path": "src/mount/display.rs", "rank": 86, "score": 8.805341283192332 }, { "content": " process_result.duration\n\n )?;\n\n }\n\n Ok(())\n\n }\n\n\n\n pub fn user_update(&mut self, process_result: ProcessResult) {\n\n self.process_result = Some(process_result);\n\n }\n\n\n\n pub fn get_scale_offset(&self) -> Option<(f64, f64)> {\n\n let process_result = self.process_result.as_ref()?;\n\n let result = match self.processor_type {\n\n ProcessorType::Median => {\n\n process_result.median_scale_offset(self.clip, self.median_location)\n\n }\n\n ProcessorType::Mean => process_result.mean_scale_offset(self.sigma, self.mean_location),\n\n ProcessorType::Linear => (self.scale, self.offset),\n\n };\n\n Some(result)\n\n }\n\n\n\n // pub fn get_stars(&self) -> Option<&[Star]> {\n\n // Some(&self.process_result.as_ref()?.stars)\n\n // }\n\n}\n", "file_path": "src/alg/process.rs", "rank": 87, "score": 8.728634403355624 }, { "content": " exposure_start: Instant::now(),\n\n exposure_duration: Duration::from_secs(0),\n\n effective_area: None,\n\n },\n\n }\n\n }\n\n\n\n pub fn set_control(&self, id: ControlId, value: f64) -> std::result::Result<(), ()> {\n\n self.send\n\n .send(CameraCommand::SetControl(id, value))\n\n .map_err(|_| ())\n\n }\n\n\n\n pub fn start(&self) -> std::result::Result<(), ()> {\n\n self.send.send(CameraCommand::Start).map_err(|_| ())\n\n }\n\n\n\n pub fn stop(&self) -> std::result::Result<(), ()> {\n\n self.send.send(CameraCommand::Stop).map_err(|_| ())\n\n }\n", "file_path": "src/camera/thread.rs", "rank": 88, "score": 8.711195057832894 }, { "content": "EXPORTC uint32_t STDCALL SendTwoLine2QHYCCDInterCamOled(qhyccd_handle *handle,char *messageTop,char *messageBottom);\n", "file_path": "lib/qhyccd/include/qhyccd.h", "rank": 89, "score": 8.679635213005012 }, { "content": " init_qhyccd_resource();\n\n unsafe { qhy::ScanQHYCCD() }\n\n }\n\n\n\n fn open(info: CameraInfo, use_live: bool) -> Result<Camera> {\n\n unsafe {\n\n let cstring = CString::new(&info.name as &str)?;\n\n let handle = qhy::OpenQHYCCD(cstring.as_ptr());\n\n if handle.is_null() {\n\n return Err(\"OpenQHYCCD returned null\".into());\n\n }\n\n\n\n check(qhy::SetQHYCCDStreamMode(\n\n handle,\n\n if use_live { 1 } else { 0 },\n\n ))?; // 0 == single, 1 == stream\n\n check(qhy::InitQHYCCD(handle))?;\n\n check(qhy::IsQHYCCDControlAvailable(\n\n handle,\n\n ControlId::ControlTransferbit,\n", "file_path": "src/camera/interface.rs", "rank": 90, "score": 8.667811362983734 }, { "content": " any_key\n\n }\n\n\n\n pub fn key_down(&mut self, key: Key) {\n\n self.pressed_keys.entry(key).or_insert_with(Instant::now);\n\n }\n\n\n\n pub fn key_up(&mut self, key: Key) {\n\n self.update();\n\n self.pressed_keys.remove(&key);\n\n }\n\n}\n", "file_path": "src/camera/display.rs", "rank": 91, "score": 8.640274306034481 }, { "content": " fn drop(&mut self) {\n\n check(unsafe { qhy::CloseQHYCCD(self.handle) }).expect(\"Failed to close QHY camera in Drop\")\n\n }\n\n}\n\n\n\npub struct Control {\n\n handle: QHYCCD,\n\n control: ControlId,\n\n min: f64,\n\n max: f64,\n\n step: f64,\n\n constant_value: f64,\n\n readonly: bool,\n\n interesting: bool,\n\n}\n\n\n\nimpl Control {\n\n fn new(handle: QHYCCD, control: ControlId) -> Control {\n\n unsafe {\n\n let mut min = 0.0;\n", "file_path": "src/camera/interface.rs", "rank": 92, "score": 8.444479199706784 }, { "content": "use crate::{\n\n alg::process,\n\n camera,\n\n camera::qhycamera::{ControlId, EXPOSURE_FACTOR},\n\n image_display::ImageDisplay,\n\n mount,\n\n platesolve::platesolve,\n\n Key, Result, SendUserUpdate, UserUpdate,\n\n};\n\nuse khygl::{render_texture::TextureRenderer, texture::CpuTexture, Rect};\n\nuse std::{\n\n collections::HashMap, convert::TryInto, fmt::Write, fs::create_dir_all, path::PathBuf,\n\n time::Instant,\n\n};\n\n\n\n// TODO: make saving images async\n\npub struct CameraDisplay {\n\n camera: Option<camera::thread::CameraAsync>,\n\n send_user_update: SendUserUpdate,\n\n image_display: ImageDisplay,\n", "file_path": "src/camera/display.rs", "rank": 93, "score": 8.33211999246845 }, { "content": " roi: &crate::camera::display::ROIThing,\n\n ) -> Result<(Rect<usize>, Mapping)> {\n\n if let (Some(texture), Some(raw)) = (self.texture.as_ref(), self.raw.as_ref()) {\n\n let roi_unclamped = roi.get_roi_unclamped(&raw.original);\n\n let roi_clamped =\n\n crate::camera::display::ROIThing::clamp(roi_unclamped.clone(), &raw.location);\n\n\n\n let scale = ((pos.width as f64) / (roi_unclamped.width as f64))\n\n .min((pos.height as f64) / (roi_unclamped.height as f64));\n\n let screenspace_width = (roi_unclamped.width as f64) * scale;\n\n let screenspace_height = (roi_unclamped.height as f64) * scale;\n\n let screenspace_area = Rect::new(\n\n (pos.x + pos.width) as f64 - screenspace_width,\n\n pos.y as f64,\n\n screenspace_width,\n\n screenspace_height,\n\n );\n\n\n\n let roi_clamped_f64 = Rect::new(\n\n roi_clamped.x as f64,\n", "file_path": "src/image_display.rs", "rank": 94, "score": 8.200124479029103 }, { "content": " Self::from_0to1(f64::from(value) / (f64::from(u32::max_value()) + 1.0))\n\n }\n\n\n\n pub fn u32(self) -> u32 {\n\n (self.value_0to1() * (f64::from(u32::max_value()) + 1.0)) as u32\n\n }\n\n\n\n pub fn from_degrees(deg: f64) -> Self {\n\n Self::from_0to1(deg / 360.0)\n\n }\n\n\n\n pub fn degrees(self) -> f64 {\n\n self.value * 360.0\n\n }\n\n\n\n pub fn from_hours(hours: f64) -> Self {\n\n Self::from_0to1(hours / 24.0)\n\n }\n\n\n\n pub fn hours(self) -> f64 {\n", "file_path": "src/dms.rs", "rank": 95, "score": 8.183395408957107 }, { "content": " let now = Instant::now();\n\n let dt = (now - *time).as_secs_f64();\n\n let mut handled_key = true;\n\n match *key {\n\n Key::D => self.position.0 += self.zoom * dt * 0.25,\n\n Key::A => self.position.0 -= self.zoom * dt * 0.25,\n\n Key::S => self.position.1 += self.zoom * dt * 0.25,\n\n Key::W => self.position.1 -= self.zoom * dt * 0.25,\n\n Key::R => self.zoom *= (-dt).exp2(),\n\n Key::F => {\n\n self.zoom = (self.zoom * dt.exp2()).min(1.0);\n\n if self.zoom == 1.0 {\n\n self.position = (0.5, 0.5);\n\n }\n\n }\n\n _ => handled_key = false,\n\n }\n\n any_key |= handled_key;\n\n *time = now;\n\n }\n", "file_path": "src/camera/display.rs", "rank": 96, "score": 8.116019560150303 }, { "content": " TrackingMode::Off => write!(f, \"Off\"),\n\n TrackingMode::AltAz => write!(f, \"AltAz\"),\n\n TrackingMode::Equatorial => write!(f, \"Equatorial\"),\n\n TrackingMode::SiderealPec => write!(f, \"SiderealPec\"),\n\n }\n\n }\n\n}\n\n\n\nimpl FromStr for TrackingMode {\n\n type Err = &'static str;\n\n fn from_str(s: &str) -> ::std::result::Result<Self, Self::Err> {\n\n Ok(match s {\n\n \"Off\" => TrackingMode::Off,\n\n \"AltAz\" => TrackingMode::AltAz,\n\n \"Equatorial\" => TrackingMode::Equatorial,\n\n \"SiderealPec\" => TrackingMode::SiderealPec,\n\n _ => return Err(\"Invalid TrackingMode\"),\n\n })\n\n }\n\n}\n", "file_path": "src/mount/interface.rs", "rank": 97, "score": 8.104307577949756 }, { "content": "}\n\n\n\nimpl std::ops::Sub for Angle {\n\n type Output = Self;\n\n fn sub(self, rhs: Self) -> Self {\n\n Self::from_0to1(self.value_0to1() - rhs.value_0to1())\n\n }\n\n}\n\n\n\nimpl std::ops::AddAssign for Angle {\n\n fn add_assign(&mut self, rhs: Self) {\n\n *self = *self + rhs;\n\n }\n\n}\n\n\n\nimpl std::ops::SubAssign for Angle {\n\n fn sub_assign(&mut self, rhs: Self) {\n\n *self = *self - rhs;\n\n }\n\n}\n", "file_path": "src/dms.rs", "rank": 98, "score": 7.8289535953363885 }, { "content": " crate::write_png(self.get_filename()?, data)?;\n\n Ok(())\n\n }\n\n\n\n pub fn draw(\n\n &mut self,\n\n pos: Rect<usize>,\n\n displayer: &TextureRenderer,\n\n screen_size: (f32, f32),\n\n ) -> Result<()> {\n\n if let Some(scale_offset) = self.processor.get_scale_offset() {\n\n self.image_display.scale_offset = (scale_offset.0 as f32, scale_offset.1 as f32);\n\n };\n\n self.roi_thing.update();\n\n self.image_display\n\n .draw(pos, displayer, screen_size, &self.roi_thing)?;\n\n Ok(())\n\n }\n\n\n\n pub fn user_update(\n", "file_path": "src/camera/display.rs", "rank": 99, "score": 7.78649419805957 } ]
Rust
src/app/lua/render.rs
nokevair/nokevair
40494bbe843394f3757f0f525bdcac5acfda4538
use hyper::{Response, Body}; use rlua::Value as LV; use std::fs; use std::path::PathBuf; use crate::conv; use crate::utils::SourceChain; use super::{Ctx, Version, Result, AppState}; pub fn with_entries<F: FnMut(String, PathBuf)>(app_ctx: &Ctx, mut f: F) { let dir = match fs::read_dir(&app_ctx.cfg.paths.render) { Ok(dir) => dir, Err(e) => { app_ctx.log.err(format_args!("failed to read render dir: {}", e)); return } }; for entry in dir { let entry = match entry { Ok(entry) => entry, Err(e) => { app_ctx.log.err(format_args!("failed while reading render dir: {}", e)); continue } }; let path = entry.path(); if !path.is_dir() { continue } let name = match entry.file_name().to_str() { Some(s) => s.to_string(), None => { app_ctx.log.err(format_args!( "failed to load focus at '{}': invalid UTF-8", path.display())); continue } }; f(name, path); } } impl super::Backend { pub(super) fn unload_focuses(&mut self) { self.focuses.clear(); self.lua.context(|ctx| ctx.expire_registry_values()); } pub(super) fn load_focuses(&mut self, app_ctx: &Ctx) { with_entries(app_ctx, |name, mut path| { path.push("focus.lua"); let code = match fs::read_to_string(&path) { Ok(code) => code, Err(e) => { app_ctx.log.err(format_args!( "failed to read file '{}': {}", path.display(), e )); return } }; let focuses = &mut self.focuses; let res = self.lua.context(|ctx| { let focus_fn = ctx.load(&code) .eval::<rlua::Function>()?; let key = ctx.create_registry_value(focus_fn)?; focuses.insert(name, key); Ok::<(), rlua::Error>(()) }); if let Err(e) = res { app_ctx.log.err(format_args!( "lua ('{}' -> focus):\n{}", path.display(), SourceChain(e), )); } }); let len = self.focuses.len(); app_ctx.log.info(format_args!( "loaded {} focus function{}", len, if len == 1 { "" } else { "s" } )); } pub(super) fn render( &mut self, ver: Version, name: &str, query_param: Option<String>, app_state: &AppState, ) -> Result<Response<Body>> { macro_rules! render_call { () => { match &query_param { Some(param) => format!("'{}' with arg '{}'", name, param), None => format!("'{}'", name), } } } self.ensure_loaded(ver, &app_state.ctx); self.lua.context(|ctx| { let focus_fn_key = self.focuses.get(name) .ok_or(()) .or_else(|_| app_state.error_404())?; let focus_fn: rlua::Function = ctx.registry_value(focus_fn_key) .or_else(|_| app_state.error_500("invalid focus fn key"))?; let state_key = self.state_versions.get(ver.as_usize()) .ok_or(()) .or_else(|_| app_state.error_404_no_state(ver))?; let state: LV = ctx.registry_value(state_key) .or_else(|_| app_state.error_500("invalid state key"))?; let ctx: Option<rlua::Table> = focus_fn.call((state, query_param.clone())) .or_else(|e| app_state.error_500(format_args!( "lua (focus {}):\n{}", render_call!(), SourceChain(e), )))?; let ctx = ctx.ok_or(()).or_else(|_| app_state.error_404())?; let ctx = conv::lua_to_json(LV::Table(ctx)) .or_else(|e| app_state.error_500(format_args!( "lua (focus {} -> JSON):\n{}", render_call!(), SourceChain(e) )))?; let ctx = tera::Context::from_serialize(ctx) .or_else(|e| app_state.error_500(format_args!( "tera (focus {} -> Tera ctx):\n{}", render_call!(), SourceChain(e), )))?; let template = format!("render/{}.html", name); app_state.render(&template, &ctx) }) } }
use hyper::{Response, Body}; use rlua::Value as LV; use std::fs; use std::path::PathBuf; use crate::conv; use crate::utils::SourceChain; use super::{Ctx, Version, Result, AppState}; pub fn with_entries<F: FnMut(String, PathBuf)>(app_ctx: &Ctx, mut f: F) { let dir = match fs::read_dir(&app_ctx.cfg.paths.render) { Ok(dir) => dir, Err(e) => { app_ctx.log.err(format_args!("failed to read render dir: {}", e)); return } }; for entry in dir { let entry = match entry { Ok(entry) => entry, Err(e) => { app_ctx.log.err(format_args!("failed while reading render dir: {}", e)); continue } }; let path = entry.path(); if !path.is_dir() { continue } let name = match entry.file_name().to_str() { Some(s) => s.to_string(), None => { app_ctx.log.err(format_args!( "failed to load focus at '{}': invalid UTF-8", path.display())); continue } }; f(name, path); } } impl super::Backend { pub(super) fn unload_focuses(&mut self) { self.focuses.clear()
ocus.lua"); let code = match fs::read_to_string(&path) { Ok(code) => code, Err(e) => { app_ctx.log.err(format_args!( "failed to read file '{}': {}", path.display(), e )); return } }; let focuses = &mut self.focuses; let res = self.lua.context(|ctx| { let focus_fn = ctx.load(&code) .eval::<rlua::Function>()?; let key = ctx.create_registry_value(focus_fn)?; focuses.insert(name, key); Ok::<(), rlua::Error>(()) }); if let Err(e) = res { app_ctx.log.err(format_args!( "lua ('{}' -> focus):\n{}", path.display(), SourceChain(e), )); } }); let len = self.focuses.len(); app_ctx.log.info(format_args!( "loaded {} focus function{}", len, if len == 1 { "" } else { "s" } )); } pub(super) fn render( &mut self, ver: Version, name: &str, query_param: Option<String>, app_state: &AppState, ) -> Result<Response<Body>> { macro_rules! render_call { () => { match &query_param { Some(param) => format!("'{}' with arg '{}'", name, param), None => format!("'{}'", name), } } } self.ensure_loaded(ver, &app_state.ctx); self.lua.context(|ctx| { let focus_fn_key = self.focuses.get(name) .ok_or(()) .or_else(|_| app_state.error_404())?; let focus_fn: rlua::Function = ctx.registry_value(focus_fn_key) .or_else(|_| app_state.error_500("invalid focus fn key"))?; let state_key = self.state_versions.get(ver.as_usize()) .ok_or(()) .or_else(|_| app_state.error_404_no_state(ver))?; let state: LV = ctx.registry_value(state_key) .or_else(|_| app_state.error_500("invalid state key"))?; let ctx: Option<rlua::Table> = focus_fn.call((state, query_param.clone())) .or_else(|e| app_state.error_500(format_args!( "lua (focus {}):\n{}", render_call!(), SourceChain(e), )))?; let ctx = ctx.ok_or(()).or_else(|_| app_state.error_404())?; let ctx = conv::lua_to_json(LV::Table(ctx)) .or_else(|e| app_state.error_500(format_args!( "lua (focus {} -> JSON):\n{}", render_call!(), SourceChain(e) )))?; let ctx = tera::Context::from_serialize(ctx) .or_else(|e| app_state.error_500(format_args!( "tera (focus {} -> Tera ctx):\n{}", render_call!(), SourceChain(e), )))?; let template = format!("render/{}.html", name); app_state.render(&template, &ctx) }) } }
; self.lua.context(|ctx| ctx.expire_registry_values()); } pub(super) fn load_focuses(&mut self, app_ctx: &Ctx) { with_entries(app_ctx, |name, mut path| { path.push("f
random
[]
Rust
src/bin/debugger.rs
Hexilee/tifs
184363cf1cc8ece62421d376d8cd97eb001b25a8
use std::fmt::Debug; use std::io::{stdin, stdout, BufRead, BufReader, Write}; use anyhow::{anyhow, Result}; use clap::{crate_version, App, Arg}; use tifs::fs::inode::Inode; use tifs::fs::key::{ScopedKey, ROOT_INODE}; use tifs::fs::tikv_fs::TiFs; use tifs::fs::transaction::Txn; use tikv_client::TransactionClient; use tracing_subscriber::EnvFilter; #[async_std::main] async fn main() -> Result<()> { let matches = App::new("TiFS Debugger") .version(crate_version!()) .author("Hexi Lee") .arg( Arg::with_name("pd") .long("pd-endpoints") .multiple(true) .value_name("ENDPOINTS") .default_value("127.0.0.1:2379") .help("set all pd endpoints of the tikv cluster") .takes_value(true), ) .get_matches(); tracing_subscriber::fmt() .with_env_filter(EnvFilter::from_default_env()) .try_init() .unwrap(); let endpoints: Vec<&str> = matches .values_of("pd") .unwrap_or_default() .to_owned() .collect(); let console = Console::construct(endpoints).await?; loop { match console.interact().await { Ok(true) => break Ok(()), Err(err) => eprintln!("{}", err), _ => continue, } } } struct Console { pd_endpoints: Vec<String>, client: TransactionClient, } impl Console { async fn construct<S>(pd_endpoints: Vec<S>) -> Result<Self> where S: Clone + Debug + Into<String>, { let client = TransactionClient::new_with_config(pd_endpoints.clone(), Default::default()) .await .map_err(|err| anyhow!("{}", err))?; Ok(Self { client, pd_endpoints: pd_endpoints.into_iter().map(Into::into).collect(), }) } async fn interact(&self) -> Result<bool> { let mut txn = Txn::begin_optimistic( &self.client, TiFs::DEFAULT_BLOCK_SIZE, None, TiFs::MAX_NAME_LEN, ) .await?; match self.interact_with_txn(&mut txn).await { Ok(exit) => { txn.commit().await?; Ok(exit) } Err(err) => { txn.rollback().await?; Err(err) } } } async fn interact_with_txn(&self, txn: &mut Txn) -> Result<bool> { print!("{:?}> ", &self.pd_endpoints); stdout().flush()?; let mut buffer = String::new(); BufReader::new(stdin()).read_line(&mut buffer)?; let commands: Vec<&str> = buffer.split(" ").map(|seg| seg.trim()).collect(); if commands.len() == 0 { return Ok(false); } match commands[0] { "exit" => return Ok(true), "reset" => self.reset(txn).await?, "get" => self.get_block(txn, &commands[1..]).await?, "get_str" => self.get_block_str(txn, &commands[1..]).await?, "get_attr" => self.get_attr(txn, &commands[1..]).await?, "get_raw" => self.get_attr_raw(txn, &commands[1..]).await?, "get_inline" => self.get_inline(txn, &commands[1..]).await?, "rm" => self.delete_block(txn, &commands[1..]).await?, cmd => return Err(anyhow!("unknow command `{}`", cmd)), } Ok(false) } async fn reset(&self, txn: &mut Txn) -> Result<()> { let next_inode = txn .read_meta() .await? .map(|meta| meta.inode_next) .unwrap_or(ROOT_INODE); for inode in txn .scan( ScopedKey::inode_range(ROOT_INODE..next_inode), (next_inode - ROOT_INODE) as u32, ) .await? .map(|pair| Inode::deserialize(pair.value())) { let inode = inode?; txn.clear_data(inode.ino).await?; txn.remove_inode(inode.ino).await?; } txn.delete(ScopedKey::meta()).await?; Ok(()) } async fn get_block(&self, txn: &mut Txn, args: &[&str]) -> Result<()> { if args.len() < 2 { return Err(anyhow!("invalid arguments `{:?}`", args)); } match txn .get(ScopedKey::block(args[0].parse()?, args[1].parse()?)) .await? { Some(value) => println!("{:?}", &value[args.get(2).unwrap_or(&"0").parse()?..]), None => println!("Not Found"), } Ok(()) } async fn get_block_str(&self, txn: &mut Txn, args: &[&str]) -> Result<()> { if args.len() < 2 { return Err(anyhow!("invalid arguments `{:?}`", args)); } match txn .get(ScopedKey::block(args[0].parse()?, args[1].parse()?)) .await? { Some(value) => println!("{:?}", String::from_utf8_lossy(&value)), None => println!("Not Found"), } Ok(()) } async fn get_attr(&self, txn: &mut Txn, args: &[&str]) -> Result<()> { if args.len() < 1 { return Err(anyhow!("invalid arguments `{:?}`", args)); } match txn.get(ScopedKey::inode(args[0].parse()?)).await? { Some(value) => println!("{:?}", Inode::deserialize(&value)?), None => println!("Not Found"), } Ok(()) } async fn get_attr_raw(&self, txn: &mut Txn, args: &[&str]) -> Result<()> { if args.len() < 1 { return Err(anyhow!("invalid arguments `{:?}`", args)); } match txn.get(ScopedKey::inode(args[0].parse()?)).await? { Some(value) => println!("{}", &*String::from_utf8_lossy(&value)), None => println!("Not Found"), } Ok(()) } async fn get_inline(&self, txn: &mut Txn, args: &[&str]) -> Result<()> { if args.len() < 1 { return Err(anyhow!("invalid arguments `{:?}`", args)); } match txn.get(ScopedKey::inode(args[0].parse()?)).await? { Some(value) => { let inline = Inode::deserialize(&value)? .inline_data .unwrap_or_else(Vec::new); println!("{}", String::from_utf8_lossy(&inline)); } None => println!("Not Found"), } Ok(()) } async fn delete_block(&self, txn: &mut Txn, args: &[&str]) -> Result<()> { if args.len() < 2 { return Err(anyhow!("invalid arguments `{:?}`", args)); } txn.delete(ScopedKey::block(args[0].parse()?, args[1].parse()?)) .await?; Ok(()) } }
use std::fmt::Debug; use std::io::{stdin, stdout, BufRead, BufReader, Write}; use anyhow::{anyhow, Result}; use clap::{crate_version, App, Arg}; use tifs::fs::inode::Inode; use tifs::fs::key::{ScopedKey, ROOT_INODE}; use tifs::fs::tikv_fs::TiFs; use tifs::fs::transaction::Txn; use tikv_client::TransactionClient; use tracing_subscriber::EnvFilter; #[async_std::main] async fn main() -> Result<()> { let matches = App::new("TiFS Debugger") .version(crate_version!()) .author("Hexi Lee") .arg( Arg::with_name("pd") .long("pd-endpoints") .multiple(true) .value
=> return Ok(true), "reset" => self.reset(txn).await?, "get" => self.get_block(txn, &commands[1..]).await?, "get_str" => self.get_block_str(txn, &commands[1..]).await?, "get_attr" => self.get_attr(txn, &commands[1..]).await?, "get_raw" => self.get_attr_raw(txn, &commands[1..]).await?, "get_inline" => self.get_inline(txn, &commands[1..]).await?, "rm" => self.delete_block(txn, &commands[1..]).await?, cmd => return Err(anyhow!("unknow command `{}`", cmd)), } Ok(false) } async fn reset(&self, txn: &mut Txn) -> Result<()> { let next_inode = txn .read_meta() .await? .map(|meta| meta.inode_next) .unwrap_or(ROOT_INODE); for inode in txn .scan( ScopedKey::inode_range(ROOT_INODE..next_inode), (next_inode - ROOT_INODE) as u32, ) .await? .map(|pair| Inode::deserialize(pair.value())) { let inode = inode?; txn.clear_data(inode.ino).await?; txn.remove_inode(inode.ino).await?; } txn.delete(ScopedKey::meta()).await?; Ok(()) } async fn get_block(&self, txn: &mut Txn, args: &[&str]) -> Result<()> { if args.len() < 2 { return Err(anyhow!("invalid arguments `{:?}`", args)); } match txn .get(ScopedKey::block(args[0].parse()?, args[1].parse()?)) .await? { Some(value) => println!("{:?}", &value[args.get(2).unwrap_or(&"0").parse()?..]), None => println!("Not Found"), } Ok(()) } async fn get_block_str(&self, txn: &mut Txn, args: &[&str]) -> Result<()> { if args.len() < 2 { return Err(anyhow!("invalid arguments `{:?}`", args)); } match txn .get(ScopedKey::block(args[0].parse()?, args[1].parse()?)) .await? { Some(value) => println!("{:?}", String::from_utf8_lossy(&value)), None => println!("Not Found"), } Ok(()) } async fn get_attr(&self, txn: &mut Txn, args: &[&str]) -> Result<()> { if args.len() < 1 { return Err(anyhow!("invalid arguments `{:?}`", args)); } match txn.get(ScopedKey::inode(args[0].parse()?)).await? { Some(value) => println!("{:?}", Inode::deserialize(&value)?), None => println!("Not Found"), } Ok(()) } async fn get_attr_raw(&self, txn: &mut Txn, args: &[&str]) -> Result<()> { if args.len() < 1 { return Err(anyhow!("invalid arguments `{:?}`", args)); } match txn.get(ScopedKey::inode(args[0].parse()?)).await? { Some(value) => println!("{}", &*String::from_utf8_lossy(&value)), None => println!("Not Found"), } Ok(()) } async fn get_inline(&self, txn: &mut Txn, args: &[&str]) -> Result<()> { if args.len() < 1 { return Err(anyhow!("invalid arguments `{:?}`", args)); } match txn.get(ScopedKey::inode(args[0].parse()?)).await? { Some(value) => { let inline = Inode::deserialize(&value)? .inline_data .unwrap_or_else(Vec::new); println!("{}", String::from_utf8_lossy(&inline)); } None => println!("Not Found"), } Ok(()) } async fn delete_block(&self, txn: &mut Txn, args: &[&str]) -> Result<()> { if args.len() < 2 { return Err(anyhow!("invalid arguments `{:?}`", args)); } txn.delete(ScopedKey::block(args[0].parse()?, args[1].parse()?)) .await?; Ok(()) } }
_name("ENDPOINTS") .default_value("127.0.0.1:2379") .help("set all pd endpoints of the tikv cluster") .takes_value(true), ) .get_matches(); tracing_subscriber::fmt() .with_env_filter(EnvFilter::from_default_env()) .try_init() .unwrap(); let endpoints: Vec<&str> = matches .values_of("pd") .unwrap_or_default() .to_owned() .collect(); let console = Console::construct(endpoints).await?; loop { match console.interact().await { Ok(true) => break Ok(()), Err(err) => eprintln!("{}", err), _ => continue, } } } struct Console { pd_endpoints: Vec<String>, client: TransactionClient, } impl Console { async fn construct<S>(pd_endpoints: Vec<S>) -> Result<Self> where S: Clone + Debug + Into<String>, { let client = TransactionClient::new_with_config(pd_endpoints.clone(), Default::default()) .await .map_err(|err| anyhow!("{}", err))?; Ok(Self { client, pd_endpoints: pd_endpoints.into_iter().map(Into::into).collect(), }) } async fn interact(&self) -> Result<bool> { let mut txn = Txn::begin_optimistic( &self.client, TiFs::DEFAULT_BLOCK_SIZE, None, TiFs::MAX_NAME_LEN, ) .await?; match self.interact_with_txn(&mut txn).await { Ok(exit) => { txn.commit().await?; Ok(exit) } Err(err) => { txn.rollback().await?; Err(err) } } } async fn interact_with_txn(&self, txn: &mut Txn) -> Result<bool> { print!("{:?}> ", &self.pd_endpoints); stdout().flush()?; let mut buffer = String::new(); BufReader::new(stdin()).read_line(&mut buffer)?; let commands: Vec<&str> = buffer.split(" ").map(|seg| seg.trim()).collect(); if commands.len() == 0 { return Ok(false); } match commands[0] { "exit"
random
[ { "content": "fn default_tls_config_path() -> anyhow::Result<PathBuf> {\n\n Ok(DEFAULT_TLS_CONFIG_PATH.parse()?)\n\n}\n\n\n\nmacro_rules! define_options {\n\n {\n\n $name: ident ($type: ident) {\n\n $(builtin $($optname: literal)? $opt: ident,)*\n\n $(define $($newoptname: literal)? $newopt: ident $( ( $optval: ident ) )? ,)*\n\n }\n\n } =>\n\n {\n\n #[derive(Debug,Clone,PartialEq)]\n\n pub enum $name {\n\n Unknown(String),\n\n $($opt,)*\n\n $($newopt $(($optval))?,)*\n\n }\n\n impl $name {\n\n pub fn to_vec<'a, I: Iterator<Item=&'a str>>(iter: I) -> Vec<Self> {\n", "file_path": "src/lib.rs", "rank": 0, "score": 66566.55959683115 }, { "content": "pub fn decode(bytes: &[u8]) -> Result<Directory> {\n\n deserialize(bytes).map_err(|err| FsError::Serialize {\n\n target: \"directory\",\n\n typ: ENCODING,\n\n msg: err.to_string(),\n\n })\n\n}\n\n\n", "file_path": "src/fs/dir.rs", "rank": 1, "score": 65768.87956845023 }, { "content": "pub fn encode(dir: &Directory) -> Result<Vec<u8>> {\n\n serialize(dir).map_err(|err| FsError::Serialize {\n\n target: \"directory\",\n\n typ: ENCODING,\n\n msg: err.to_string(),\n\n })\n\n}\n\n\n", "file_path": "src/fs/dir.rs", "rank": 2, "score": 62930.42011276583 }, { "content": "pub fn decode_item(bytes: &[u8]) -> Result<DirItem> {\n\n deserialize(bytes).map_err(|err| FsError::Serialize {\n\n target: \"dir item\",\n\n typ: ENCODING,\n\n msg: err.to_string(),\n\n })\n\n}\n", "file_path": "src/fs/dir.rs", "rank": 3, "score": 62367.265106168896 }, { "content": "pub fn encode_item(item: &DirItem) -> Result<Vec<u8>> {\n\n serialize(item).map_err(|err| FsError::Serialize {\n\n target: \"dir item\",\n\n typ: ENCODING,\n\n msg: err.to_string(),\n\n })\n\n}\n\n\n", "file_path": "src/fs/dir.rs", "rank": 4, "score": 59775.655736610104 }, { "content": "pub fn spawn_reply<F, R, V>(id: u64, reply: R, f: F)\n\nwhere\n\n F: Future<Output = Result<V>> + Send + 'static,\n\n R: FsReply<V> + Send + 'static,\n\n V: Debug,\n\n{\n\n spawn(async move {\n\n trace!(\"reply to request({})\", id);\n\n let result = f.await;\n\n reply.reply(id, result);\n\n });\n\n}\n\n\n", "file_path": "src/fs/async_fs.rs", "rank": 5, "score": 42833.64087726685 }, { "content": "pub fn get_time() -> Duration {\n\n SystemTime::now().duration_since(UNIX_EPOCH).unwrap()\n\n}\n\n\n\n#[derive(Debug)]\n\npub struct Entry {\n\n pub time: Duration,\n\n pub stat: FileAttr,\n\n pub generation: u64,\n\n}\n\n\n\nimpl Entry {\n\n pub fn new(stat: FileAttr, generation: u64) -> Self {\n\n Self {\n\n time: get_time(),\n\n stat,\n\n generation,\n\n }\n\n }\n\n}\n", "file_path": "src/fs/reply.rs", "rank": 6, "score": 34945.12932038944 }, { "content": "use clap::{crate_version, App, Arg};\n\nuse tifs::{mount_tifs, MountOption};\n\nuse tracing_subscriber::EnvFilter;\n\n\n\n#[async_std::main]\n\nasync fn main() {\n\n let matches = App::new(\"TiFS\")\n\n .version(crate_version!())\n\n .author(\"Hexi Lee\")\n\n .arg(\n\n Arg::with_name(\"pd\")\n\n .long(\"pd-endpoints\")\n\n .short(\"p\")\n\n .multiple(true)\n\n .value_name(\"ENDPOINTS\")\n\n .default_value(\"127.0.0.1:2379\")\n\n .help(\"set all pd endpoints of the tikv cluster\")\n\n .takes_value(true),\n\n )\n\n .arg(\n", "file_path": "src/main.rs", "rank": 7, "score": 32148.75215834814 }, { "content": " Arg::with_name(\"mount-point\")\n\n .long(\"mount-point\")\n\n .short(\"m\")\n\n .value_name(\"MOUNT_POINT\")\n\n .required(true)\n\n .help(\"Act as a client, and mount FUSE at given path\")\n\n .takes_value(true),\n\n )\n\n .arg(\n\n Arg::with_name(\"options\")\n\n .value_name(\"OPTION\")\n\n .long(\"option\")\n\n .short(\"o\")\n\n .multiple(true)\n\n .help(\"filesystem mount options\"),\n\n )\n\n .get_matches();\n\n\n\n tracing_subscriber::fmt()\n\n .with_env_filter(EnvFilter::from_default_env())\n", "file_path": "src/main.rs", "rank": 8, "score": 32134.405882324605 }, { "content": " .try_init()\n\n .unwrap();\n\n\n\n let endpoints: Vec<&str> = matches\n\n .values_of(\"pd\")\n\n .unwrap_or_default()\n\n .to_owned()\n\n .collect();\n\n\n\n let mountpoint: String = matches.value_of(\"mount-point\").unwrap().to_string();\n\n let options = MountOption::to_vec(matches.values_of(\"options\").unwrap_or_default());\n\n\n\n mount_tifs(mountpoint, endpoints, options).await.unwrap();\n\n}\n", "file_path": "src/main.rs", "rank": 9, "score": 32132.63528933334 }, { "content": "#[cfg(target_os = \"linux\")]\n\npub fn as_file_kind(mode: u32) -> FileType {\n\n use FileType::*;\n\n\n\n match mode & libc::S_IFMT as u32 {\n\n libc::S_IFREG => RegularFile,\n\n libc::S_IFLNK => Symlink,\n\n libc::S_IFDIR => Directory,\n\n libc::S_IFIFO => NamedPipe,\n\n libc::S_IFBLK => BlockDevice,\n\n libc::S_IFCHR => CharDevice,\n\n libc::S_IFSOCK => Socket,\n\n _ => unimplemented!(\"{}\", mode),\n\n }\n\n}\n\n\n", "file_path": "src/fs/mode.rs", "rank": 10, "score": 30854.679864920312 }, { "content": "pub fn empty_block(block_size: u64) -> Block {\n\n vec![0; block_size as usize]\n\n}\n", "file_path": "src/fs/block.rs", "rank": 11, "score": 30854.679864920312 }, { "content": "#[async_trait]\n\npub trait AsyncFileSystem: Send + Sync {\n\n /// Initialize filesystem.\n\n /// Called before any other filesystem method.\n\n /// The kernel module connection can be configured using the KernelConfig object\n\n async fn init(&self, _gid: u32, _uid: u32, _config: &mut KernelConfig) -> Result<()> {\n\n Ok(())\n\n }\n\n\n\n /// Clean up filesystem.\n\n /// Called on filesystem exit.\n\n async fn destroy(&self) {}\n\n\n\n /// Look up a directory entry by name and get its attributes.\n\n async fn lookup(&self, _parent: u64, _name: ByteString) -> Result<Entry> {\n\n Err(FsError::unimplemented())\n\n }\n\n\n\n /// Forget about an inode.\n\n /// The nlookup parameter indicates the number of lookups previously performed on\n\n /// this inode. If the filesystem implements inode lifetimes, it is recommended that\n", "file_path": "src/fs/async_fs.rs", "rank": 12, "score": 30115.074438871772 }, { "content": "#[cfg(target_os = \"linux\")]\n\npub fn make_mode(tpy: FileType, perm: u16) -> u32 {\n\n use FileType::*;\n\n\n\n let kind = match tpy {\n\n RegularFile => libc::S_IFREG,\n\n Symlink => libc::S_IFLNK,\n\n Directory => libc::S_IFDIR,\n\n NamedPipe => libc::S_IFIFO,\n\n BlockDevice => libc::S_IFBLK,\n\n CharDevice => libc::S_IFCHR,\n\n Socket => libc::S_IFSOCK,\n\n };\n\n\n\n kind | perm as u32\n\n}\n", "file_path": "src/fs/mode.rs", "rank": 13, "score": 28327.435208808027 }, { "content": " /// which case the return value of the write system call will reflect the return\n\n /// value of this operation. fh will contain the value set by the open method, or\n\n /// will be undefined if the open method didn't set any value.\n\n ///\n\n /// write_flags: will contain FUSE_WRITE_CACHE, if this write is from the page cache. If set,\n\n /// the pid, uid, gid, and fh may not match the value that would have been sent if write cachin\n\n /// is disabled\n\n /// flags: these are the file flags, such as O_SYNC. Only supported with ABI >= 7.9\n\n /// lock_owner: only supported with ABI >= 7.9\n\n async fn write(\n\n &self,\n\n _ino: u64,\n\n _fh: u64,\n\n _offset: i64,\n\n _data: Vec<u8>,\n\n _write_flags: u32,\n\n _flags: i32,\n\n _lock_owner: Option<u64>,\n\n ) -> Result<Write> {\n\n Err(FsError::unimplemented())\n", "file_path": "src/fs/async_fs.rs", "rank": 14, "score": 27687.786103375718 }, { "content": "use std::ffi::OsStr;\n\nuse std::fmt::Debug;\n\nuse std::future::Future;\n\nuse std::path::Path;\n\nuse std::sync::Arc;\n\nuse std::time::SystemTime;\n\n\n\nuse async_std::task::{block_on, spawn};\n\nuse async_trait::async_trait;\n\nuse bytestring::ByteString;\n\nuse fuser::{\n\n Filesystem, KernelConfig, ReplyAttr, ReplyBmap, ReplyCreate, ReplyData, ReplyDirectory,\n\n ReplyDirectoryPlus, ReplyEmpty, ReplyEntry, ReplyLock, ReplyLseek, ReplyOpen, ReplyStatfs,\n\n ReplyWrite, ReplyXattr, Request, TimeOrNow,\n\n};\n\nuse tracing::trace;\n\n\n\nuse super::error::{FsError, Result};\n\nuse super::reply::{\n\n Attr, Bmap, Create, Data, Dir, DirPlus, Entry, FsReply, Lock, Lseek, Open, StatFs, Write, Xattr,\n\n};\n\n\n", "file_path": "src/fs/async_fs.rs", "rank": 15, "score": 27685.496429161914 }, { "content": " }\n\n\n\n /// Read directory.\n\n /// Send a buffer filled using buffer.fill(), with size not exceeding the\n\n /// requested size. Send an empty buffer on end of stream. fh will contain the\n\n /// value set by the opendir method, or will be undefined if the opendir method\n\n /// didn't set any value.\n\n async fn readdirplus(&self, _ino: u64, _fh: u64, offset: i64) -> Result<DirPlus> {\n\n Ok(DirPlus::offset(offset as usize))\n\n }\n\n\n\n /// Release an open directory.\n\n /// For every opendir call there will be exactly one releasedir call. fh will\n\n /// contain the value set by the opendir method, or will be undefined if the\n\n /// opendir method didn't set any value.\n\n async fn releasedir(&self, _ino: u64, _fh: u64, _flags: i32) -> Result<()> {\n\n Err(FsError::unimplemented())\n\n }\n\n\n\n /// Synchronize directory contents.\n", "file_path": "src/fs/async_fs.rs", "rank": 16, "score": 27684.63217927724 }, { "content": " /// operation. fh will contain the value set by the open method, or will be undefined\n\n /// if the open method didn't set any value.\n\n ///\n\n /// flags: these are the file flags, such as O_SYNC. Only supported with ABI >= 7.9\n\n /// lock_owner: only supported with ABI >= 7.9\n\n async fn read(\n\n &self,\n\n _ino: u64,\n\n _fh: u64,\n\n _offset: i64,\n\n _size: u32,\n\n _flags: i32,\n\n _lock_owner: Option<u64>,\n\n ) -> Result<Data> {\n\n Err(FsError::unimplemented())\n\n }\n\n\n\n /// Write data.\n\n /// Write should return exactly the number of bytes requested except on error. An\n\n /// exception to this is when the file has been opened in 'direct_io' mode, in\n", "file_path": "src/fs/async_fs.rs", "rank": 17, "score": 27684.412114500094 }, { "content": " }\n\n\n\n /// Open a directory.\n\n /// Filesystem may store an arbitrary file handle (pointer, index, etc) in fh, and\n\n /// use this in other all other directory stream operations (readdir, releasedir,\n\n /// fsyncdir). Filesystem may also implement stateless directory I/O and not store\n\n /// anything in fh, though that makes it impossible to implement standard conforming\n\n /// directory stream operations in case the contents of the directory can change\n\n /// between opendir and releasedir.\n\n async fn opendir(&self, _ino: u64, _flags: i32) -> Result<Open> {\n\n Ok(Open::new(0, 0))\n\n }\n\n\n\n /// Read directory.\n\n /// Send a buffer filled using buffer.fill(), with size not exceeding the\n\n /// requested size. Send an empty buffer on end of stream. fh will contain the\n\n /// value set by the opendir method, or will be undefined if the opendir method\n\n /// didn't set any value.\n\n async fn readdir(&self, _ino: u64, _fh: u64, offset: i64) -> Result<Dir> {\n\n Ok(Dir::offset(offset as usize))\n", "file_path": "src/fs/async_fs.rs", "rank": 18, "score": 27684.065824109777 }, { "content": " ) -> Result<()> {\n\n Err(FsError::unimplemented())\n\n }\n\n\n\n /// Get an extended attribute.\n\n /// If `size` is 0, the size of the value should be sent with `reply.size()`.\n\n /// If `size` is not 0, and the value fits, send it with `reply.data()`, or\n\n /// `reply.error(ERANGE)` if it doesn't.\n\n async fn getxattr(&self, _ino: u64, _name: ByteString, _size: u32) -> Result<Xattr> {\n\n Err(FsError::unimplemented())\n\n }\n\n\n\n /// List extended attribute names.\n\n /// If `size` is 0, the size of the value should be sent with `reply.size()`.\n\n /// If `size` is not 0, and the value fits, send it with `reply.data()`, or\n\n /// `reply.error(ERANGE)` if it doesn't.\n\n async fn listxattr(&self, _ino: u64, _size: u32) -> Result<Xattr> {\n\n Err(FsError::unimplemented())\n\n }\n\n\n", "file_path": "src/fs/async_fs.rs", "rank": 19, "score": 27684.060336488117 }, { "content": " /// If the datasync parameter is set, then only the directory contents should\n\n /// be flushed, not the meta data. fh will contain the value set by the opendir\n\n /// method, or will be undefined if the opendir method didn't set any value.\n\n async fn fsyncdir(&self, _ino: u64, _fh: u64, _datasync: bool) -> Result<()> {\n\n Err(FsError::unimplemented())\n\n }\n\n\n\n /// Get file system statistics.\n\n async fn statfs(&self, _ino: u64) -> Result<StatFs> {\n\n Ok(StatFs::new(0, 0, 0, 0, 0, 512, 255, 0))\n\n }\n\n\n\n /// Set an extended attribute.\n\n async fn setxattr(\n\n &self,\n\n _ino: u64,\n\n _name: ByteString,\n\n _value: Vec<u8>,\n\n _flags: i32,\n\n _position: u32,\n", "file_path": "src/fs/async_fs.rs", "rank": 20, "score": 27683.88047842129 }, { "content": " /// error, but error values are not returned to close() or munmap() which triggered\n\n /// the release. fh will contain the value set by the open method, or will be undefined\n\n /// if the open method didn't set any value. flags will contain the same flags as for\n\n /// open.\n\n async fn release(\n\n &self,\n\n _ino: u64,\n\n _fh: u64,\n\n _flags: i32,\n\n _lock_owner: Option<u64>,\n\n _flush: bool,\n\n ) -> Result<()> {\n\n Ok(())\n\n }\n\n\n\n /// Synchronize file contents.\n\n /// If the datasync parameter is non-zero, then only the user data should be flushed,\n\n /// not the meta data.\n\n async fn fsync(&self, _ino: u64, _fh: u64, _datasync: bool) -> Result<()> {\n\n Err(FsError::unimplemented())\n", "file_path": "src/fs/async_fs.rs", "rank": 21, "score": 27683.387975602625 }, { "content": " _offset_in: i64,\n\n _ino_out: u64,\n\n _fh_out: u64,\n\n _offset_out: i64,\n\n _len: u64,\n\n _flags: u32,\n\n ) -> Result<Write> {\n\n Err(FsError::unimplemented())\n\n }\n\n}\n\n\n\npub struct AsyncFs<T>(Arc<T>);\n\n\n\nimpl<T: AsyncFileSystem> From<T> for AsyncFs<T> {\n\n fn from(inner: T) -> Self {\n\n Self(Arc::new(inner))\n\n }\n\n}\n\n\n\nimpl<T: Debug> Debug for AsyncFs<T> {\n", "file_path": "src/fs/async_fs.rs", "rank": 22, "score": 27682.890909366255 }, { "content": " Err(FsError::unimplemented())\n\n }\n\n\n\n /// Open a file.\n\n /// Open flags (with the exception of O_CREAT, O_EXCL, O_NOCTTY and O_TRUNC) are\n\n /// available in flags. Filesystem may store an arbitrary file handle (pointer, index,\n\n /// etc) in fh, and use this in other all other file operations (read, write, flush,\n\n /// release, fsync). Filesystem may also implement stateless file I/O and not store\n\n /// anything in fh. There are also some flags (direct_io, keep_cache) which the\n\n /// filesystem may set, to change the way the file is opened. See fuse_file_info\n\n /// structure in <fuse_common.h> for more details.\n\n async fn open(&self, _ino: u64, _flags: i32) -> Result<Open> {\n\n Ok(Open::new(0, 0))\n\n }\n\n\n\n /// Read data.\n\n /// Read should send exactly the number of bytes requested except on EOF or error,\n\n /// otherwise the rest of the data will be substituted with zeroes. An exception to\n\n /// this is when the file has been opened in 'direct_io' mode, in which case the\n\n /// return value of the read system call will reflect the return value of this\n", "file_path": "src/fs/async_fs.rs", "rank": 23, "score": 27682.839358923324 }, { "content": " &self,\n\n _ino: u64,\n\n _fh: u64,\n\n _offset: i64,\n\n _length: i64,\n\n _mode: i32,\n\n ) -> Result<()> {\n\n Err(FsError::unimplemented())\n\n }\n\n\n\n /// Reposition read/write file offset\n\n async fn lseek(&self, _ino: u64, _fh: u64, _offset: i64, _whence: i32) -> Result<Lseek> {\n\n Err(FsError::unimplemented())\n\n }\n\n\n\n /// Copy the specified range from the source inode to the destination inode\n\n async fn copy_file_range(\n\n &self,\n\n _ino_in: u64,\n\n _fh_in: u64,\n", "file_path": "src/fs/async_fs.rs", "rank": 24, "score": 27682.804412889618 }, { "content": " }\n\n\n\n /// Flush method.\n\n /// This is called on each close() of the opened file. Since file descriptors can\n\n /// be duplicated (dup, dup2, fork), for one open call there may be many flush\n\n /// calls. Filesystems shouldn't assume that flush will always be called after some\n\n /// writes, or that if will be called at all. fh will contain the value set by the\n\n /// open method, or will be undefined if the open method didn't set any value.\n\n /// NOTE: the name of the method is misleading, since (unlike fsync) the filesystem\n\n /// is not forced to flush pending writes. One reason to flush data, is if the\n\n /// filesystem wants to return write errors. If the filesystem supports file locking\n\n /// operations (setlk, getlk) it should remove all locks belonging to 'lock_owner'.\n\n async fn flush(&self, _ino: u64, _fh: u64, _lock_owner: u64) -> Result<()> {\n\n Err(FsError::unimplemented())\n\n }\n\n\n\n /// Release an open file.\n\n /// Release is called when there are no more references to an open file: all file\n\n /// descriptors are closed and all memory mappings are unmapped. For every open\n\n /// call there will be exactly one release call. The filesystem may reply with an\n", "file_path": "src/fs/async_fs.rs", "rank": 25, "score": 27682.557508015525 }, { "content": " /// Remove an extended attribute.\n\n async fn removexattr(&self, _ino: u64, _name: ByteString) -> Result<()> {\n\n Err(FsError::unimplemented())\n\n }\n\n\n\n /// Check file access permissions.\n\n /// This will be called for the access() system call. If the 'default_permissions'\n\n /// mount option is given, this method is not called. This method is not called\n\n /// under Linux kernel versions 2.4.x\n\n async fn access(&self, _ino: u64, _mask: i32) -> Result<()> {\n\n Err(FsError::unimplemented())\n\n }\n\n\n\n /// Create and open a file.\n\n /// If the file does not exist, first create it with the specified mode, and then\n\n /// open it. Open flags (with the exception of O_NOCTTY) are available in flags.\n\n /// Filesystem may store an arbitrary file handle (pointer, index, etc) in fh,\n\n /// and use this in other all other file operations (read, write, flush, release,\n\n /// fsync). There are also some flags (direct_io, keep_cache) which the\n\n /// filesystem may set, to change the way the file is opened. See fuse_file_info\n", "file_path": "src/fs/async_fs.rs", "rank": 26, "score": 27682.393464487806 }, { "content": " fn write(\n\n &mut self,\n\n req: &Request,\n\n ino: u64,\n\n fh: u64,\n\n offset: i64,\n\n data: &[u8],\n\n write_flags: u32,\n\n flags: i32,\n\n lock_owner: Option<u64>,\n\n reply: ReplyWrite,\n\n ) {\n\n let async_impl = self.0.clone();\n\n let data = data.to_owned();\n\n spawn_reply(req.unique(), reply, async move {\n\n async_impl\n\n .write(ino, fh, offset, data, write_flags, flags, lock_owner)\n\n .await\n\n });\n\n }\n", "file_path": "src/fs/async_fs.rs", "rank": 27, "score": 27682.28463649947 }, { "content": " value: &[u8],\n\n flags: i32,\n\n position: u32,\n\n reply: ReplyEmpty,\n\n ) {\n\n let async_impl = self.0.clone();\n\n let name = name.to_string_lossy().to_string().into();\n\n let value = value.to_owned();\n\n spawn_reply(req.unique(), reply, async move {\n\n async_impl.setxattr(ino, name, value, flags, position).await\n\n });\n\n }\n\n\n\n fn getxattr(&mut self, req: &Request, ino: u64, name: &OsStr, size: u32, reply: ReplyXattr) {\n\n let async_impl = self.0.clone();\n\n let name = name.to_string_lossy().to_string().into();\n\n spawn_reply(req.unique(), reply, async move {\n\n async_impl.getxattr(ino, name, size).await\n\n });\n\n }\n", "file_path": "src/fs/async_fs.rs", "rank": 28, "score": 27682.142160376203 }, { "content": " ) -> Result<Entry> {\n\n Err(FsError::unimplemented())\n\n }\n\n\n\n /// Remove a file.\n\n async fn unlink(&self, _parent: u64, _name: ByteString) -> Result<()> {\n\n Err(FsError::unimplemented())\n\n }\n\n\n\n /// Remove a directory.\n\n async fn rmdir(&self, _parent: u64, _name: ByteString) -> Result<()> {\n\n Err(FsError::unimplemented())\n\n }\n\n\n\n /// Create a symbolic link.\n\n async fn symlink(\n\n &self,\n\n _gid: u32,\n\n _uid: u32,\n\n _parent: u64,\n", "file_path": "src/fs/async_fs.rs", "rank": 29, "score": 27681.62457179926 }, { "content": " _fh: u64,\n\n _lock_owner: u64,\n\n _start: u64,\n\n _end: u64,\n\n _typ: i32,\n\n _pid: u32,\n\n ) -> Result<Lock> {\n\n Err(FsError::unimplemented())\n\n }\n\n\n\n /// Acquire, modify or release a POSIX file lock.\n\n /// For POSIX threads (NPTL) there's a 1-1 relation between pid and owner, but\n\n /// otherwise this is not always the case. For checking lock ownership,\n\n /// 'fi->owner' must be used. The l_pid field in 'struct flock' should only be\n\n /// used to fill in this field in getlk(). Note: if the locking methods are not\n\n /// implemented, the kernel will still allow file locking to work locally.\n\n /// Hence these are only interesting for network filesystems and similar.\n\n async fn setlk(\n\n &self,\n\n _ino: u64,\n", "file_path": "src/fs/async_fs.rs", "rank": 30, "score": 27681.24045877271 }, { "content": " _name: ByteString,\n\n _link: ByteString,\n\n ) -> Result<Entry> {\n\n Err(FsError::unimplemented())\n\n }\n\n\n\n /// Rename a file.\n\n async fn rename(\n\n &self,\n\n _parent: u64,\n\n _name: ByteString,\n\n _newparent: u64,\n\n _newname: ByteString,\n\n _flags: u32,\n\n ) -> Result<()> {\n\n Err(FsError::unimplemented())\n\n }\n\n\n\n /// Create a hard link.\n\n async fn link(&self, _ino: u64, _newparent: u64, _newname: ByteString) -> Result<Entry> {\n", "file_path": "src/fs/async_fs.rs", "rank": 31, "score": 27681.142601587308 }, { "content": " fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {\n\n self.0.fmt(f)\n\n }\n\n}\n\n\n\nimpl<T: AsyncFileSystem + 'static> Filesystem for AsyncFs<T> {\n\n fn init(\n\n &mut self,\n\n req: &Request,\n\n config: &mut KernelConfig,\n\n ) -> std::result::Result<(), libc::c_int> {\n\n let uid = req.uid();\n\n let gid = req.gid();\n\n\n\n block_on(self.0.init(gid, uid, config)).map_err(|err| err.into())\n\n }\n\n\n\n fn destroy(&mut self, _req: &Request) {\n\n block_on(self.0.destroy())\n\n }\n", "file_path": "src/fs/async_fs.rs", "rank": 32, "score": 27680.947354615575 }, { "content": " });\n\n }\n\n\n\n fn copy_file_range(\n\n &mut self,\n\n req: &Request,\n\n ino_in: u64,\n\n fh_in: u64,\n\n offset_in: i64,\n\n ino_out: u64,\n\n fh_out: u64,\n\n offset_out: i64,\n\n len: u64,\n\n flags: u32,\n\n reply: ReplyWrite,\n\n ) {\n\n let async_impl = self.0.clone();\n\n spawn_reply(req.unique(), reply, async move {\n\n async_impl\n\n .copy_file_range(\n\n ino_in, fh_in, offset_in, ino_out, fh_out, offset_out, len, flags,\n\n )\n\n .await\n\n });\n\n }\n\n}\n", "file_path": "src/fs/async_fs.rs", "rank": 33, "score": 27680.450817743473 }, { "content": " _fh: u64,\n\n _lock_owner: u64,\n\n _start: u64,\n\n _end: u64,\n\n _typ: i32,\n\n _pid: u32,\n\n _sleep: bool,\n\n ) -> Result<()> {\n\n Err(FsError::unimplemented())\n\n }\n\n\n\n /// Map block index within file to block index within device.\n\n /// Note: This makes sense only for block device backed filesystems mounted\n\n /// with the 'blkdev' option\n\n async fn bmap(&self, _ino: u64, _blocksize: u32, _idx: u64) -> Result<Bmap> {\n\n Err(FsError::unimplemented())\n\n }\n\n\n\n /// Preallocate or deallocate space to a file\n\n async fn fallocate(\n", "file_path": "src/fs/async_fs.rs", "rank": 34, "score": 27680.344150313555 }, { "content": " _mtime: Option<TimeOrNow>,\n\n _ctime: Option<SystemTime>,\n\n _fh: Option<u64>,\n\n _crtime: Option<SystemTime>,\n\n _chgtime: Option<SystemTime>,\n\n _bkuptime: Option<SystemTime>,\n\n _flags: Option<u32>,\n\n ) -> Result<Attr> {\n\n Err(FsError::unimplemented())\n\n }\n\n\n\n /// Read symbolic link.\n\n async fn readlink(&self, _ino: u64) -> Result<Data> {\n\n Err(FsError::unimplemented())\n\n }\n\n\n\n /// Create file node.\n\n /// Create a regular file, character device, block device, fifo or socket node.\n\n async fn mknod(\n\n &self,\n", "file_path": "src/fs/async_fs.rs", "rank": 35, "score": 27680.282055794814 }, { "content": " /// structure in <fuse_common.h> for more details. If this method is not\n\n /// implemented or under Linux kernel versions earlier than 2.6.15, the mknod()\n\n /// and open() methods will be called instead.\n\n async fn create(\n\n &self,\n\n _uid: u32,\n\n _gid: u32,\n\n _parent: u64,\n\n _name: ByteString,\n\n _mode: u32,\n\n _umask: u32,\n\n _flags: i32,\n\n ) -> Result<Create> {\n\n Err(FsError::unimplemented())\n\n }\n\n\n\n /// Test for a POSIX file lock.\n\n async fn getlk(\n\n &self,\n\n _ino: u64,\n", "file_path": "src/fs/async_fs.rs", "rank": 36, "score": 27679.902187403553 }, { "content": " /// inodes acquire a single reference on each lookup, and lose nlookup references on\n\n /// each forget. The filesystem may ignore forget calls, if the inodes don't need to\n\n /// have a limited lifetime. On unmount it is not guaranteed, that all referenced\n\n /// inodes will receive a forget message.\n\n async fn forget(&self, _ino: u64, _nlookup: u64) {}\n\n\n\n /// Get file attributes.\n\n async fn getattr(&self, _ino: u64) -> Result<Attr> {\n\n Err(FsError::unimplemented())\n\n }\n\n\n\n /// Set file attributes.\n\n async fn setattr(\n\n &self,\n\n _ino: u64,\n\n _mode: Option<u32>,\n\n _uid: Option<u32>,\n\n _gid: Option<u32>,\n\n _size: Option<u64>,\n\n _atime: Option<TimeOrNow>,\n", "file_path": "src/fs/async_fs.rs", "rank": 37, "score": 27679.689628760585 }, { "content": " _parent: u64,\n\n _name: ByteString,\n\n _mode: u32,\n\n _gid: u32,\n\n _uid: u32,\n\n _umask: u32,\n\n _rdev: u32,\n\n ) -> Result<Entry> {\n\n Err(FsError::unimplemented())\n\n }\n\n\n\n /// Create a directory.\n\n async fn mkdir(\n\n &self,\n\n _parent: u64,\n\n _name: ByteString,\n\n _mode: u32,\n\n _gid: u32,\n\n _uid: u32,\n\n _umask: u32,\n", "file_path": "src/fs/async_fs.rs", "rank": 38, "score": 27679.47787362592 }, { "content": " let async_impl = self.0.clone();\n\n spawn_reply(req.unique(), reply, async move {\n\n async_impl.fsyncdir(ino, fh, datasync).await\n\n });\n\n }\n\n\n\n fn statfs(&mut self, req: &Request, ino: u64, reply: ReplyStatfs) {\n\n let async_impl = self.0.clone();\n\n spawn_reply(\n\n req.unique(),\n\n reply,\n\n async move { async_impl.statfs(ino).await },\n\n );\n\n }\n\n\n\n fn setxattr(\n\n &mut self,\n\n req: &Request,\n\n ino: u64,\n\n name: &OsStr,\n", "file_path": "src/fs/async_fs.rs", "rank": 39, "score": 27678.74212179403 }, { "content": "\n\n fn listxattr(&mut self, req: &Request, ino: u64, size: u32, reply: ReplyXattr) {\n\n let async_impl = self.0.clone();\n\n spawn_reply(req.unique(), reply, async move {\n\n async_impl.listxattr(ino, size).await\n\n });\n\n }\n\n\n\n fn removexattr(&mut self, req: &Request, ino: u64, name: &OsStr, reply: ReplyEmpty) {\n\n let async_impl = self.0.clone();\n\n let name = name.to_string_lossy().to_string().into();\n\n spawn_reply(req.unique(), reply, async move {\n\n async_impl.removexattr(ino, name).await\n\n });\n\n }\n\n fn access(&mut self, req: &Request, ino: u64, mask: i32, reply: ReplyEmpty) {\n\n let async_impl = self.0.clone();\n\n spawn_reply(req.unique(), reply, async move {\n\n async_impl.access(ino, mask).await\n\n });\n", "file_path": "src/fs/async_fs.rs", "rank": 40, "score": 27678.710406597773 }, { "content": " reply: ReplyEmpty,\n\n ) {\n\n let async_impl = self.0.clone();\n\n spawn_reply(req.unique(), reply, async move {\n\n async_impl.fallocate(ino, fh, offset, length, mode).await\n\n });\n\n }\n\n\n\n fn lseek(\n\n &mut self,\n\n req: &Request,\n\n ino: u64,\n\n fh: u64,\n\n offset: i64,\n\n whence: i32,\n\n reply: ReplyLseek,\n\n ) {\n\n let async_impl = self.0.clone();\n\n spawn_reply(req.unique(), reply, async move {\n\n async_impl.lseek(ino, fh, offset, whence).await\n", "file_path": "src/fs/async_fs.rs", "rank": 41, "score": 27678.690389497977 }, { "content": " async_impl.release(ino, fh, flags, lock_owner, flush).await\n\n });\n\n }\n\n\n\n fn fsync(&mut self, req: &Request, ino: u64, fh: u64, datasync: bool, reply: ReplyEmpty) {\n\n let async_impl = self.0.clone();\n\n spawn_reply(req.unique(), reply, async move {\n\n async_impl.fsync(ino, fh, datasync).await\n\n });\n\n }\n\n\n\n fn opendir(&mut self, req: &Request, ino: u64, flags: i32, reply: ReplyOpen) {\n\n let async_impl = self.0.clone();\n\n spawn_reply(req.unique(), reply, async move {\n\n async_impl.opendir(ino, flags).await\n\n });\n\n }\n\n\n\n fn readdir(&mut self, req: &Request, ino: u64, fh: u64, offset: i64, reply: ReplyDirectory) {\n\n let async_impl = self.0.clone();\n", "file_path": "src/fs/async_fs.rs", "rank": 42, "score": 27678.65463976981 }, { "content": "\n\n fn lookup(&mut self, req: &Request, parent: u64, name: &OsStr, reply: ReplyEntry) {\n\n let async_impl = self.0.clone();\n\n let name = name.to_string_lossy().to_string().into();\n\n spawn_reply(req.unique(), reply, async move {\n\n async_impl.lookup(parent, name).await\n\n });\n\n }\n\n\n\n fn forget(&mut self, _req: &Request, ino: u64, nlookup: u64) {\n\n let async_impl = self.0.clone();\n\n\n\n // TODO: union the spawn function for request without reply\n\n spawn(async move {\n\n async_impl.forget(ino, nlookup).await;\n\n });\n\n }\n\n\n\n fn getattr(&mut self, req: &Request, ino: u64, reply: ReplyAttr) {\n\n let async_impl = self.0.clone();\n", "file_path": "src/fs/async_fs.rs", "rank": 43, "score": 27678.64410091075 }, { "content": " &mut self,\n\n req: &Request,\n\n ino: u64,\n\n newparent: u64,\n\n newname: &OsStr,\n\n reply: ReplyEntry,\n\n ) {\n\n let async_impl = self.0.clone();\n\n let newname = newname.to_string_lossy().to_string().into();\n\n spawn_reply(req.unique(), reply, async move {\n\n async_impl.link(ino, newparent, newname).await\n\n });\n\n }\n\n\n\n fn open(&mut self, req: &Request, ino: u64, flags: i32, reply: ReplyOpen) {\n\n let async_impl = self.0.clone();\n\n spawn_reply(req.unique(), reply, async move {\n\n async_impl.open(ino, flags).await\n\n });\n\n }\n", "file_path": "src/fs/async_fs.rs", "rank": 44, "score": 27678.620288635546 }, { "content": " async_impl.mkdir(parent, name, mode, gid, uid, umask).await\n\n });\n\n }\n\n\n\n fn unlink(&mut self, req: &Request, parent: u64, name: &OsStr, reply: ReplyEmpty) {\n\n let async_impl = self.0.clone();\n\n let name = name.to_string_lossy().to_string().into();\n\n spawn_reply(req.unique(), reply, async move {\n\n async_impl.unlink(parent, name).await\n\n });\n\n }\n\n\n\n fn rmdir(&mut self, req: &Request, parent: u64, name: &OsStr, reply: ReplyEmpty) {\n\n let async_impl = self.0.clone();\n\n let name = name.to_string_lossy().to_string().into();\n\n spawn_reply(req.unique(), reply, async move {\n\n async_impl.rmdir(parent, name).await\n\n });\n\n }\n\n\n", "file_path": "src/fs/async_fs.rs", "rank": 45, "score": 27678.610282580892 }, { "content": " chgtime: Option<SystemTime>,\n\n bkuptime: Option<SystemTime>,\n\n flags: Option<u32>,\n\n reply: ReplyAttr,\n\n ) {\n\n let async_impl = self.0.clone();\n\n spawn_reply(req.unique(), reply, async move {\n\n async_impl\n\n .setattr(\n\n ino, mode, uid, gid, size, atime, mtime, ctime, fh, crtime, chgtime, bkuptime,\n\n flags,\n\n )\n\n .await\n\n });\n\n }\n\n\n\n fn readlink(&mut self, req: &Request, ino: u64, reply: ReplyData) {\n\n let async_impl = self.0.clone();\n\n spawn_reply(req.unique(), reply, async move {\n\n async_impl.readlink(ino).await\n", "file_path": "src/fs/async_fs.rs", "rank": 46, "score": 27678.591042497912 }, { "content": " spawn_reply(req.unique(), reply, async move {\n\n async_impl.readdir(ino, fh, offset).await\n\n });\n\n }\n\n\n\n fn readdirplus(\n\n &mut self,\n\n req: &Request,\n\n ino: u64,\n\n fh: u64,\n\n offset: i64,\n\n reply: ReplyDirectoryPlus,\n\n ) {\n\n let async_impl = self.0.clone();\n\n spawn_reply(req.unique(), reply, async move {\n\n async_impl.readdirplus(ino, fh, offset).await\n\n });\n\n }\n\n\n\n fn fsyncdir(&mut self, req: &Request, ino: u64, fh: u64, datasync: bool, reply: ReplyEmpty) {\n", "file_path": "src/fs/async_fs.rs", "rank": 47, "score": 27678.519268091582 }, { "content": "\n\n fn flush(&mut self, req: &Request, ino: u64, fh: u64, lock_owner: u64, reply: ReplyEmpty) {\n\n let async_impl = self.0.clone();\n\n spawn_reply(req.unique(), reply, async move {\n\n async_impl.flush(ino, fh, lock_owner).await\n\n });\n\n }\n\n\n\n fn release(\n\n &mut self,\n\n req: &Request,\n\n ino: u64,\n\n fh: u64,\n\n flags: i32,\n\n lock_owner: Option<u64>,\n\n flush: bool,\n\n reply: ReplyEmpty,\n\n ) {\n\n let async_impl = self.0.clone();\n\n spawn_reply(req.unique(), reply, async move {\n", "file_path": "src/fs/async_fs.rs", "rank": 48, "score": 27678.48612001886 }, { "content": " .await\n\n });\n\n }\n\n\n\n fn setlk(\n\n &mut self,\n\n req: &Request,\n\n ino: u64,\n\n fh: u64,\n\n lock_owner: u64,\n\n start: u64,\n\n end: u64,\n\n typ: i32,\n\n pid: u32,\n\n sleep: bool,\n\n reply: ReplyEmpty,\n\n ) {\n\n let async_impl = self.0.clone();\n\n spawn_reply(req.unique(), reply, async move {\n\n async_impl\n", "file_path": "src/fs/async_fs.rs", "rank": 49, "score": 27678.391282015153 }, { "content": " });\n\n }\n\n\n\n fn mknod(\n\n &mut self,\n\n req: &Request,\n\n parent: u64,\n\n name: &OsStr,\n\n mode: u32,\n\n umask: u32,\n\n rdev: u32,\n\n reply: ReplyEntry,\n\n ) {\n\n let async_impl = self.0.clone();\n\n let name = name.to_string_lossy().to_string().into();\n\n let uid = req.uid();\n\n let gid = req.gid();\n\n\n\n spawn_reply(req.unique(), reply, async move {\n\n async_impl\n", "file_path": "src/fs/async_fs.rs", "rank": 50, "score": 27678.30699396927 }, { "content": "\n\n fn read(\n\n &mut self,\n\n req: &Request,\n\n ino: u64,\n\n fh: u64,\n\n offset: i64,\n\n size: u32,\n\n flags: i32,\n\n lock_owner: Option<u64>,\n\n reply: ReplyData,\n\n ) {\n\n let async_impl = self.0.clone();\n\n spawn_reply(req.unique(), reply, async move {\n\n async_impl\n\n .read(ino, fh, offset, size, flags, lock_owner)\n\n .await\n\n });\n\n }\n\n\n", "file_path": "src/fs/async_fs.rs", "rank": 51, "score": 27678.290628835 }, { "content": " .await\n\n });\n\n }\n\n\n\n fn getlk(\n\n &mut self,\n\n req: &Request,\n\n ino: u64,\n\n fh: u64,\n\n lock_owner: u64,\n\n start: u64,\n\n end: u64,\n\n typ: i32,\n\n pid: u32,\n\n reply: ReplyLock,\n\n ) {\n\n let async_impl = self.0.clone();\n\n spawn_reply(req.unique(), reply, async move {\n\n async_impl\n\n .getlk(ino, fh, lock_owner, start, end, typ, pid)\n", "file_path": "src/fs/async_fs.rs", "rank": 52, "score": 27678.290628835 }, { "content": " &mut self,\n\n req: &Request,\n\n parent: u64,\n\n name: &OsStr,\n\n newparent: u64,\n\n newname: &OsStr,\n\n flags: u32,\n\n reply: ReplyEmpty,\n\n ) {\n\n let async_impl = self.0.clone();\n\n let name = name.to_string_lossy().to_string().into();\n\n let newname = newname.to_string_lossy().to_string().into();\n\n spawn_reply(req.unique(), reply, async move {\n\n async_impl\n\n .rename(parent, name, newparent, newname, flags)\n\n .await\n\n });\n\n }\n\n\n\n fn link(\n", "file_path": "src/fs/async_fs.rs", "rank": 53, "score": 27678.195689651995 }, { "content": " }\n\n\n\n fn create(\n\n &mut self,\n\n req: &Request,\n\n parent: u64,\n\n name: &OsStr,\n\n mode: u32,\n\n umask: u32,\n\n flags: i32,\n\n reply: ReplyCreate,\n\n ) {\n\n let uid = req.uid();\n\n let gid = req.gid();\n\n\n\n let async_impl = self.0.clone();\n\n let name = name.to_string_lossy().to_string().into();\n\n spawn_reply(req.unique(), reply, async move {\n\n async_impl\n\n .create(uid, gid, parent, name, mode, umask, flags)\n", "file_path": "src/fs/async_fs.rs", "rank": 54, "score": 27678.18038797829 }, { "content": " fn symlink(\n\n &mut self,\n\n req: &Request,\n\n parent: u64,\n\n name: &OsStr,\n\n link: &Path,\n\n reply: ReplyEntry,\n\n ) {\n\n let async_impl = self.0.clone();\n\n let name = name.to_string_lossy().to_string().into();\n\n let link = link.to_string_lossy().to_string().into();\n\n let uid = req.uid();\n\n let gid = req.gid();\n\n\n\n spawn_reply(req.unique(), reply, async move {\n\n async_impl.symlink(gid, uid, parent, name, link).await\n\n });\n\n }\n\n\n\n fn rename(\n", "file_path": "src/fs/async_fs.rs", "rank": 55, "score": 27678.165229724822 }, { "content": " .setlk(ino, fh, lock_owner, start, end, typ, pid, sleep)\n\n .await\n\n });\n\n }\n\n\n\n fn bmap(&mut self, req: &Request, ino: u64, blocksize: u32, idx: u64, reply: ReplyBmap) {\n\n let async_impl = self.0.clone();\n\n spawn_reply(req.unique(), reply, async move {\n\n async_impl.bmap(ino, blocksize, idx).await\n\n });\n\n }\n\n\n\n fn fallocate(\n\n &mut self,\n\n req: &Request,\n\n ino: u64,\n\n fh: u64,\n\n offset: i64,\n\n length: i64,\n\n mode: i32,\n", "file_path": "src/fs/async_fs.rs", "rank": 56, "score": 27678.10599132769 }, { "content": " .mknod(parent, name, mode, gid, uid, umask, rdev)\n\n .await\n\n });\n\n }\n\n\n\n fn mkdir(\n\n &mut self,\n\n req: &Request,\n\n parent: u64,\n\n name: &OsStr,\n\n mode: u32,\n\n umask: u32,\n\n reply: ReplyEntry,\n\n ) {\n\n let async_impl = self.0.clone();\n\n let name = name.to_string_lossy().to_string().into();\n\n let uid = req.uid();\n\n let gid = req.gid();\n\n\n\n spawn_reply(req.unique(), reply, async move {\n", "file_path": "src/fs/async_fs.rs", "rank": 57, "score": 27677.81046404554 }, { "content": " spawn_reply(\n\n req.unique(),\n\n reply,\n\n async move { async_impl.getattr(ino).await },\n\n );\n\n }\n\n\n\n fn setattr(\n\n &mut self,\n\n req: &Request,\n\n ino: u64,\n\n mode: Option<u32>,\n\n uid: Option<u32>,\n\n gid: Option<u32>,\n\n size: Option<u64>,\n\n atime: Option<TimeOrNow>,\n\n mtime: Option<TimeOrNow>,\n\n ctime: Option<SystemTime>,\n\n fh: Option<u64>,\n\n crtime: Option<SystemTime>,\n", "file_path": "src/fs/async_fs.rs", "rank": 58, "score": 27677.75770046059 }, { "content": "type BoxedFuture<'a, T> = Pin<Box<dyn 'a + Send + Future<Output = Result<T>>>>;\n\n\n\nimpl TiFs {\n\n pub const SCAN_LIMIT: u32 = 1 << 10;\n\n pub const DEFAULT_BLOCK_SIZE: u64 = 1 << 16;\n\n pub const MAX_NAME_LEN: u32 = 1 << 8;\n\n\n\n #[instrument]\n\n pub async fn construct<S>(\n\n pd_endpoints: Vec<S>,\n\n cfg: Config,\n\n options: Vec<MountOption>,\n\n ) -> anyhow::Result<Self>\n\n where\n\n S: Clone + Debug + Into<String>,\n\n {\n\n let client = TransactionClient::new_with_config(pd_endpoints.clone(), cfg.clone())\n\n .await\n\n .map_err(|err| anyhow!(\"{}\", err))?;\n\n info!(\"connected to pd endpoints: {:?}\", pd_endpoints);\n", "file_path": "src/fs/tikv_fs.rs", "rank": 59, "score": 27142.721223062304 }, { "content": " F: for<'a> FnOnce(&'a TiFs, &'a mut Txn) -> BoxedFuture<'a, T>,\n\n {\n\n match f(self, txn).await {\n\n Ok(v) => {\n\n txn.commit().await?;\n\n trace!(\"transaction committed\");\n\n Ok(v)\n\n }\n\n Err(e) => {\n\n txn.rollback().await?;\n\n debug!(\"transaction rollbacked\");\n\n Err(e)\n\n }\n\n }\n\n }\n\n\n\n async fn with_optimistic<F, T>(&self, f: F) -> Result<T>\n\n where\n\n T: 'static + Send,\n\n F: for<'a> FnOnce(&'a TiFs, &'a mut Txn) -> BoxedFuture<'a, T>,\n", "file_path": "src/fs/tikv_fs.rs", "rank": 60, "score": 9.730975877021496 }, { "content": "use std::fmt::{self, Debug};\n\nuse std::future::Future;\n\nuse std::matches;\n\nuse std::pin::Pin;\n\nuse std::time::{Duration, SystemTime};\n\n\n\nuse anyhow::anyhow;\n\nuse async_std::task::sleep;\n\nuse async_trait::async_trait;\n\nuse bytes::Bytes;\n\nuse bytestring::ByteString;\n\nuse fuser::consts::FOPEN_DIRECT_IO;\n\nuse fuser::*;\n\nuse libc::{F_RDLCK, F_UNLCK, F_WRLCK, SEEK_CUR, SEEK_END, SEEK_SET};\n\nuse parse_size::parse_size;\n\nuse tikv_client::{Config, TransactionClient};\n\nuse tracing::{debug, error, info, instrument, trace, warn};\n\n\n\nuse super::async_fs::AsyncFileSystem;\n\nuse super::dir::Directory;\n", "file_path": "src/fs/tikv_fs.rs", "rank": 61, "score": 9.446772580252226 }, { "content": "\n\n pub async fn get_index(&self, parent: u64, name: ByteString) -> Result<Option<u64>> {\n\n let key = ScopedKey::index(parent, &name);\n\n self.get(key)\n\n .await\n\n .map_err(FsError::from)\n\n .and_then(|value| {\n\n value\n\n .map(|data| Ok(Index::deserialize(&data)?.ino))\n\n .transpose()\n\n })\n\n }\n\n\n\n pub async fn set_index(&mut self, parent: u64, name: ByteString, ino: u64) -> Result<()> {\n\n let key = ScopedKey::index(parent, &name);\n\n let value = Index::new(ino).serialize()?;\n\n Ok(self.put(key, value).await?)\n\n }\n\n\n\n pub async fn remove_index(&mut self, parent: u64, name: ByteString) -> Result<()> {\n", "file_path": "src/fs/transaction.rs", "rank": 62, "score": 9.418070612454276 }, { "content": " async fn setxattr(\n\n &self,\n\n _ino: u64,\n\n _name: ByteString,\n\n _value: Vec<u8>,\n\n _flags: i32,\n\n _position: u32,\n\n ) -> Result<()> {\n\n // TODO: implement me\n\n Ok(())\n\n }\n\n\n\n /// Get an extended attribute.\n\n /// If `size` is 0, the size of the value should be sent with `reply.size()`.\n\n /// If `size` is not 0, and the value fits, send it with `reply.data()`, or\n\n /// `reply.error(ERANGE)` if it doesn't.\n\n async fn getxattr(&self, _ino: u64, _name: ByteString, size: u32) -> Result<Xattr> {\n\n // TODO: implement me\n\n if size == 0 {\n\n Ok(Xattr::size(0))\n", "file_path": "src/fs/tikv_fs.rs", "rank": 63, "score": 9.08268117721134 }, { "content": " } else {\n\n Ok(Xattr::data(Vec::new()))\n\n }\n\n }\n\n\n\n /// List extended attribute names.\n\n /// If `size` is 0, the size of the value should be sent with `reply.size()`.\n\n /// If `size` is not 0, and the value fits, send it with `reply.data()`, or\n\n /// `reply.error(ERANGE)` if it doesn't.\n\n async fn listxattr(&self, _ino: u64, size: u32) -> Result<Xattr> {\n\n // TODO: implement me\n\n if size == 0 {\n\n Ok(Xattr::size(0))\n\n } else {\n\n Ok(Xattr::data(Vec::new()))\n\n }\n\n }\n\n\n\n /// Remove an extended attribute.\n\n async fn removexattr(&self, _ino: u64, _name: ByteString) -> Result<()> {\n\n // TODO: implement me\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/fs/tikv_fs.rs", "rank": 64, "score": 8.3964777636686 }, { "content": " {\n\n let mut txn = Txn::begin_optimistic(\n\n &self.client,\n\n self.block_size,\n\n self.max_size,\n\n Self::MAX_NAME_LEN,\n\n )\n\n .await?;\n\n self.process_txn(&mut txn, f).await\n\n }\n\n\n\n async fn spin<F, T>(&self, delay: Option<Duration>, mut f: F) -> Result<T>\n\n where\n\n T: 'static + Send,\n\n F: for<'a> FnMut(&'a TiFs, &'a mut Txn) -> BoxedFuture<'a, T>,\n\n {\n\n loop {\n\n match self.with_optimistic(&mut f).await {\n\n Ok(v) => break Ok(v),\n\n Err(FsError::KeyError(err)) => {\n", "file_path": "src/fs/tikv_fs.rs", "rank": 65, "score": 8.031874217155362 }, { "content": " &self,\n\n ino: u64,\n\n fh: u64,\n\n offset: i64,\n\n data: Vec<u8>,\n\n _write_flags: u32,\n\n _flags: i32,\n\n _lock_owner: Option<u64>,\n\n ) -> Result<Write> {\n\n let data: Bytes = data.into();\n\n let len = self\n\n .spin_no_delay(move |_, txn| Box::pin(txn.write(ino, fh, offset, data.clone())))\n\n .await?;\n\n Ok(Write::new(len as u32))\n\n }\n\n\n\n /// Create a directory.\n\n #[tracing::instrument]\n\n async fn mkdir(\n\n &self,\n", "file_path": "src/fs/tikv_fs.rs", "rank": 66, "score": 8.023350590552022 }, { "content": "\n\n Ok(true)\n\n }\n\n\n\n fn check_file_name(name: &str) -> Result<()> {\n\n if name.len() <= Self::MAX_NAME_LEN as usize {\n\n Ok(())\n\n } else {\n\n Err(FsError::NameTooLong {\n\n file: name.to_string(),\n\n })\n\n }\n\n }\n\n}\n\n\n\nimpl Debug for TiFs {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.write_fmt(format_args!(\"tifs({:?})\", self.pd_endpoints))\n\n }\n\n}\n", "file_path": "src/fs/tikv_fs.rs", "rank": 67, "score": 7.9712585247215895 }, { "content": " let key = ScopedKey::index(parent, &name);\n\n Ok(self.delete(key).await?)\n\n }\n\n\n\n pub async fn read_inode(&self, ino: u64) -> Result<Inode> {\n\n let value = self\n\n .get(ScopedKey::inode(ino))\n\n .await?\n\n .ok_or_else(|| FsError::InodeNotFound { inode: ino })?;\n\n Ok(Inode::deserialize(&value)?)\n\n }\n\n\n\n pub async fn save_inode(&mut self, inode: &Inode) -> Result<()> {\n\n let key = ScopedKey::inode(inode.ino);\n\n\n\n if inode.nlink == 0 && inode.opened_fh == 0 {\n\n self.delete(key).await?;\n\n } else {\n\n self.put(key, inode.serialize()?).await?;\n\n debug!(\"save inode: {:?}\", inode);\n", "file_path": "src/fs/transaction.rs", "rank": 68, "score": 7.93480953451966 }, { "content": "#![feature(async_closure)]\n\n#![feature(array_chunks)]\n\n\n\npub mod fs;\n\n\n\nuse async_std::fs::read_to_string;\n\nuse async_std::path::PathBuf;\n\nuse fs::async_fs::AsyncFs;\n\nuse fs::client::TlsConfig;\n\nuse fs::tikv_fs::TiFs;\n\nuse fuser::MountOption as FuseMountOption;\n\nuse paste::paste;\n\nuse tracing::debug;\n\n\n\nconst DEFAULT_TLS_CONFIG_PATH: &str = \"~/.tifs/tls.toml\";\n\n\n", "file_path": "src/lib.rs", "rank": 69, "score": 7.881030544836062 }, { "content": "use super::error::{FsError, Result};\n\nuse super::key::ROOT_INODE;\n\nuse super::mode::make_mode;\n\nuse super::reply::{\n\n get_time, Attr, Create, Data, Dir, DirItem, Entry, Lock, Lseek, Open, StatFs, Write, Xattr,\n\n};\n\nuse super::transaction::Txn;\n\nuse crate::MountOption;\n\n\n\npub struct TiFs {\n\n pub pd_endpoints: Vec<String>,\n\n pub config: Config,\n\n pub client: TransactionClient,\n\n pub direct_io: bool,\n\n pub block_size: u64,\n\n pub max_size: Option<u64>,\n\n}\n\n\n", "file_path": "src/fs/tikv_fs.rs", "rank": 70, "score": 7.649586945957388 }, { "content": " trace!(\"spin because of a key error({})\", err);\n\n if let Some(time) = delay {\n\n sleep(time).await;\n\n }\n\n }\n\n Err(err) => break Err(err),\n\n }\n\n }\n\n }\n\n\n\n async fn spin_no_delay<F, T>(&self, f: F) -> Result<T>\n\n where\n\n T: 'static + Send,\n\n F: for<'a> FnMut(&'a TiFs, &'a mut Txn) -> BoxedFuture<'a, T>,\n\n {\n\n self.spin(None, f).await\n\n }\n\n\n\n async fn read_dir(&self, ino: u64) -> Result<Directory> {\n\n self.spin_no_delay(move |_, txn| Box::pin(txn.read_dir(ino)))\n", "file_path": "src/fs/tikv_fs.rs", "rank": 71, "score": 7.641410443784947 }, { "content": " inode.set_size(inode.size.max(target), self.block_size);\n\n self.save_inode(&inode.into()).await?;\n\n trace!(\"write data: {}\", String::from_utf8_lossy(&data));\n\n Ok(size)\n\n }\n\n\n\n pub async fn write_link(&mut self, inode: &mut Inode, data: Bytes) -> Result<usize> {\n\n debug_assert!(inode.file_attr.kind == FileType::Symlink);\n\n inode.inline_data = None;\n\n inode.set_size(0, self.block_size);\n\n self.write_inline_data(inode, 0, &data).await\n\n }\n\n\n\n pub async fn read_link(&mut self, ino: u64) -> Result<Vec<u8>> {\n\n let mut inode = self.read_inode(ino).await?;\n\n debug_assert!(inode.file_attr.kind == FileType::Symlink);\n\n let size = inode.size;\n\n self.read_inline_data(&mut inode, 0, size).await\n\n }\n\n\n", "file_path": "src/fs/transaction.rs", "rank": 72, "score": 7.496652391494366 }, { "content": " }\n\n\n\n #[tracing::instrument]\n\n async fn read(\n\n &self,\n\n ino: u64,\n\n fh: u64,\n\n offset: i64,\n\n size: u32,\n\n _flags: i32,\n\n _lock_owner: Option<u64>,\n\n ) -> Result<Data> {\n\n let data = self\n\n .spin_no_delay(move |_, txn| Box::pin(txn.read(ino, fh, offset, size)))\n\n .await?;\n\n Ok(Data::new(data))\n\n }\n\n\n\n #[tracing::instrument(skip(data))]\n\n async fn write(\n", "file_path": "src/fs/tikv_fs.rs", "rank": 73, "score": 7.443991796277434 }, { "content": " make_mode(FileType::Symlink, 0o777),\n\n gid,\n\n uid,\n\n 0,\n\n )\n\n .await?;\n\n\n\n txn.write_link(&mut attr, link.into_bytes()).await?;\n\n Ok(Entry::new(attr.into(), 0))\n\n })\n\n })\n\n .await\n\n }\n\n\n\n async fn readlink(&self, ino: u64) -> Result<Data> {\n\n self.spin(None, move |_, txn| {\n\n Box::pin(async move { Ok(Data::new(txn.read_link(ino).await?)) })\n\n })\n\n .await\n\n }\n", "file_path": "src/fs/tikv_fs.rs", "rank": 74, "score": 7.443991796277434 }, { "content": " self.save_inode(&inode).await?;\n\n self.put(ScopedKey::block(ino, 0), data).await?;\n\n Ok(inode)\n\n }\n\n\n\n pub async fn statfs(&mut self) -> Result<StatFs> {\n\n let bsize = self.block_size as u32;\n\n let mut meta = self\n\n .read_meta()\n\n .await?\n\n .expect(\"meta should not be none after fs initialized\");\n\n let next_inode = meta.inode_next;\n\n let (used_blocks, files) = self\n\n .scan(\n\n ScopedKey::inode_range(ROOT_INODE..next_inode),\n\n (next_inode - ROOT_INODE) as u32,\n\n )\n\n .await?\n\n .map(|pair| Inode::deserialize(pair.value()))\n\n .try_fold((0, 0), |(blocks, files), inode| {\n", "file_path": "src/fs/transaction.rs", "rank": 75, "score": 7.424117076734882 }, { "content": " self.read_data(ino, start as u64, Some(size as u64)).await\n\n }\n\n\n\n pub async fn write(&mut self, ino: u64, fh: u64, offset: i64, data: Bytes) -> Result<usize> {\n\n let handler = self.read_fh(ino, fh).await?;\n\n let start = handler.cursor as i64 + offset;\n\n if start < 0 {\n\n return Err(FsError::InvalidOffset { ino, offset: start });\n\n }\n\n\n\n self.write_data(ino, start as u64, data).await\n\n }\n\n\n\n pub async fn make_inode(\n\n &mut self,\n\n parent: u64,\n\n name: ByteString,\n\n mode: u32,\n\n gid: u32,\n\n uid: u32,\n", "file_path": "src/fs/transaction.rs", "rank": 76, "score": 7.298851804271166 }, { "content": "use std::fmt::Debug;\n\nuse std::time::{Duration, SystemTime, UNIX_EPOCH};\n\n\n\nuse fuser::*;\n\nuse serde::{Deserialize, Serialize};\n\nuse tracing::{debug, error, trace};\n\n\n\nuse super::error::Result;\n\n\n", "file_path": "src/fs/reply.rs", "rank": 77, "score": 7.062586246983384 }, { "content": " Default::default()\n\n };\n\n\n\n debug!(\"use tikv client config: {:?}\", client_cfg);\n\n let fs_impl = TiFs::construct(endpoints, client_cfg, options).await?;\n\n\n\n make_daemon()?;\n\n\n\n fuser::mount2(AsyncFs::from(fs_impl), mountpoint, &fuse_options)?;\n\n\n\n Ok(())\n\n}\n\n\n\npub async fn mount_tifs(\n\n mountpoint: String,\n\n endpoints: Vec<&str>,\n\n options: Vec<MountOption>,\n\n) -> anyhow::Result<()> {\n\n mount_tifs_daemonize(mountpoint, endpoints, options, || Ok(())).await\n\n}\n", "file_path": "src/lib.rs", "rank": 78, "score": 6.90769125203514 }, { "content": "use super::error::{FsError, Result};\n\nuse super::reply::DirItem;\n\nuse super::serialize::{deserialize, serialize, ENCODING};\n\n\n\npub type Directory = Vec<DirItem>;\n\n\n", "file_path": "src/fs/dir.rs", "rank": 79, "score": 6.825602679051174 }, { "content": " }\n\n Ok(())\n\n }\n\n\n\n pub async fn remove_inode(&mut self, ino: u64) -> Result<()> {\n\n self.delete(ScopedKey::inode(ino)).await?;\n\n Ok(())\n\n }\n\n\n\n pub async fn read_meta(&self) -> Result<Option<Meta>> {\n\n let opt_data = self.get(ScopedKey::meta()).await?;\n\n opt_data.map(|data| Meta::deserialize(&data)).transpose()\n\n }\n\n\n\n pub async fn save_meta(&mut self, meta: &Meta) -> Result<()> {\n\n self.put(ScopedKey::meta(), meta.serialize()?).await?;\n\n Ok(())\n\n }\n\n\n\n async fn transfer_inline_data_to_block(&mut self, inode: &mut Inode) -> Result<()> {\n", "file_path": "src/fs/transaction.rs", "rank": 80, "score": 6.701625427203219 }, { "content": " Ok::<_, FsError>((blocks + inode?.blocks, files + 1))\n\n })?;\n\n let ffree = std::u64::MAX - next_inode;\n\n let bfree = match self.max_blocks {\n\n Some(max_blocks) if max_blocks > used_blocks => max_blocks - used_blocks,\n\n Some(_) => 0,\n\n None => std::u64::MAX,\n\n };\n\n let blocks = match self.max_blocks {\n\n Some(max_blocks) => max_blocks,\n\n None => used_blocks,\n\n };\n\n\n\n let stat = StatFs::new(\n\n blocks,\n\n bfree,\n\n bfree,\n\n files,\n\n ffree,\n\n bsize,\n", "file_path": "src/fs/transaction.rs", "rank": 81, "score": 6.626970533014061 }, { "content": " assert_eq!(String::from(MountOption::BlkSize(123)), \"blksize=123\");\n\n assert_eq!(String::from(MountOption::BlkSize(0)), \"blksize=0\");\n\n }\n\n}\n\n\n\npub async fn mount_tifs_daemonize<F>(\n\n mountpoint: String,\n\n endpoints: Vec<&str>,\n\n options: Vec<MountOption>,\n\n make_daemon: F,\n\n) -> anyhow::Result<()>\n\nwhere\n\n F: FnOnce() -> anyhow::Result<()>,\n\n{\n\n let mut fuse_options = vec![\n\n FuseMountOption::FSName(format!(\"tifs:{}\", endpoints.join(\",\"))),\n\n FuseMountOption::AllowOther,\n\n FuseMountOption::DefaultPermissions,\n\n ];\n\n\n", "file_path": "src/lib.rs", "rank": 82, "score": 6.606018004816972 }, { "content": " attr.size = 0;\n\n attr.atime = SystemTime::now();\n\n self.save_inode(&attr).await?;\n\n Ok(clear_size)\n\n }\n\n\n\n pub async fn write_data(&mut self, ino: u64, start: u64, data: Bytes) -> Result<usize> {\n\n debug!(\"write data at ({})[{}]\", ino, start);\n\n self.check_space_left(&self.read_meta().await?.unwrap())?;\n\n\n\n let mut inode = self.read_inode(ino).await?;\n\n let size = data.len();\n\n let target = start + size as u64;\n\n\n\n if inode.inline_data.is_some() && target > self.block_size {\n\n self.transfer_inline_data_to_block(&mut inode).await?;\n\n }\n\n\n\n if (inode.inline_data.is_some() || inode.size == 0) && target <= self.block_size {\n\n return self.write_inline_data(&mut inode, start, &data).await;\n", "file_path": "src/fs/transaction.rs", "rank": 83, "score": 6.350957470861896 }, { "content": " entry.stat,\n\n entry.generation,\n\n open.fh,\n\n open.flags,\n\n ))\n\n }\n\n\n\n async fn lseek(&self, ino: u64, fh: u64, offset: i64, whence: i32) -> Result<Lseek> {\n\n self.spin_no_delay(move |_, txn| {\n\n Box::pin(async move {\n\n let mut file_handler = txn.read_fh(ino, fh).await?;\n\n let inode = txn.read_inode(ino).await?;\n\n let target_cursor = match whence {\n\n SEEK_SET => offset,\n\n SEEK_CUR => file_handler.cursor as i64 + offset,\n\n SEEK_END => inode.size as i64 + offset,\n\n _ => return Err(FsError::UnknownWhence { whence }),\n\n };\n\n\n\n if target_cursor < 0 {\n", "file_path": "src/fs/tikv_fs.rs", "rank": 84, "score": 6.108023318414068 }, { "content": "use std::ops::{Deref, DerefMut};\n\nuse std::time::SystemTime;\n\n\n\nuse bytes::Bytes;\n\nuse bytestring::ByteString;\n\nuse fuser::{FileAttr, FileType};\n\nuse tikv_client::{Transaction, TransactionClient};\n\nuse tracing::{debug, trace};\n\n\n\nuse super::block::empty_block;\n\nuse super::dir::Directory;\n\nuse super::error::{FsError, Result};\n\nuse super::file_handler::FileHandler;\n\nuse super::index::Index;\n\nuse super::inode::Inode;\n\nuse super::key::{ScopedKey, ROOT_INODE};\n\nuse super::meta::Meta;\n\nuse super::mode::{as_file_kind, as_file_perm, make_mode};\n\nuse super::reply::{DirItem, StatFs};\n\n\n", "file_path": "src/fs/transaction.rs", "rank": 85, "score": 6.0930472553184245 }, { "content": " })\n\n })\n\n .await\n\n }\n\n\n\n #[tracing::instrument]\n\n async fn lookup(&self, parent: u64, name: ByteString) -> Result<Entry> {\n\n Self::check_file_name(&name)?;\n\n self.spin_no_delay(move |_, txn| {\n\n let name = name.clone();\n\n Box::pin(async move {\n\n let ino = txn.lookup(parent, name).await?;\n\n Ok(Entry::new(txn.read_inode(ino).await?.into(), 0))\n\n })\n\n })\n\n .await\n\n }\n\n\n\n #[tracing::instrument]\n\n async fn getattr(&self, ino: u64) -> Result<Attr> {\n", "file_path": "src/fs/tikv_fs.rs", "rank": 86, "score": 6.054081906696652 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\nuse super::error::{FsError, Result};\n\nuse super::serialize::{deserialize, serialize, ENCODING};\n\n\n\n#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Hash, Clone, Copy, Deserialize, Serialize)]\n\npub struct Index {\n\n pub ino: u64,\n\n}\n\n\n\nimpl Index {\n\n pub const fn new(ino: u64) -> Self {\n\n Self { ino }\n\n }\n\n\n\n pub fn serialize(&self) -> Result<Vec<u8>> {\n\n serialize(self).map_err(|err| FsError::Serialize {\n\n target: \"index\",\n\n typ: ENCODING,\n\n msg: err.to_string(),\n", "file_path": "src/fs/index.rs", "rank": 87, "score": 6.048314622934301 }, { "content": " })\n\n }\n\n\n\n pub async fn fallocate(&mut self, inode: &mut Inode, offset: i64, length: i64) -> Result<()> {\n\n let target_size = (offset + length) as u64;\n\n if target_size <= inode.size {\n\n return Ok(());\n\n }\n\n\n\n if inode.inline_data.is_some() {\n\n if target_size <= self.inline_data_threshold() {\n\n let original_size = inode.size;\n\n let data = vec![0; (target_size - original_size) as usize];\n\n self.write_inline_data(inode, original_size, &data).await?;\n\n return Ok(());\n\n } else {\n\n self.transfer_inline_data_to_block(inode).await?;\n\n }\n\n }\n\n\n", "file_path": "src/fs/transaction.rs", "rank": 88, "score": 5.992626953649081 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\nuse super::error::{FsError, Result};\n\nuse super::serialize::{deserialize, serialize, ENCODING};\n\n\n\n#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Hash, Clone, Copy, Deserialize, Serialize)]\n\npub struct FileHandler {\n\n // TODO: add open flags\n\n pub cursor: u64,\n\n}\n\n\n\nimpl FileHandler {\n\n pub const fn new(cursor: u64) -> Self {\n\n Self { cursor }\n\n }\n\n\n\n pub fn serialize(&self) -> Result<Vec<u8>> {\n\n serialize(self).map_err(|err| FsError::Serialize {\n\n target: \"file handler\",\n\n typ: ENCODING,\n", "file_path": "src/fs/file_handler.rs", "rank": 89, "score": 5.9300204785421595 }, { "content": "use std::collections::HashSet;\n\nuse std::ops::{Deref, DerefMut};\n\n\n\nuse fuser::FileAttr;\n\nuse libc::F_UNLCK;\n\nuse serde::{Deserialize, Serialize};\n\n\n\nuse super::error::{FsError, Result};\n\nuse super::serialize::{deserialize, serialize, ENCODING};\n\n\n\n#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]\n\npub struct LockState {\n\n pub owner_set: HashSet<u64>,\n\n #[cfg(target_os = \"linux\")]\n\n pub lk_type: i32,\n\n #[cfg(any(target_os = \"freebsd\", target_os = \"macos\"))]\n\n pub lk_type: i16,\n\n}\n\n\n\n#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)]\n", "file_path": "src/fs/inode.rs", "rank": 90, "score": 5.670781806701482 }, { "content": "use serde::{Deserialize, Serialize};\n\n\n\nuse super::error::{FsError, Result};\n\nuse super::key::ROOT_INODE;\n\nuse super::reply::StatFs;\n\nuse super::serialize::{deserialize, serialize, ENCODING};\n\n\n\n#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]\n\npub struct Meta {\n\n pub inode_next: u64,\n\n pub block_size: u64,\n\n pub last_stat: Option<StatFs>,\n\n}\n\n\n\nimpl Meta {\n\n pub const fn new(block_size: u64) -> Self {\n\n Self {\n\n inode_next: ROOT_INODE,\n\n block_size,\n\n last_stat: None,\n", "file_path": "src/fs/meta.rs", "rank": 91, "score": 5.613959960086204 }, { "content": "\n\n #[tracing::instrument]\n\n async fn access(&self, ino: u64, mask: i32) -> Result<()> {\n\n Ok(())\n\n }\n\n\n\n async fn create(\n\n &self,\n\n uid: u32,\n\n gid: u32,\n\n parent: u64,\n\n name: ByteString,\n\n mode: u32,\n\n umask: u32,\n\n flags: i32,\n\n ) -> Result<Create> {\n\n Self::check_file_name(&name)?;\n\n let entry = self.mknod(parent, name, mode, gid, uid, umask, 0).await?;\n\n let open = self.open(entry.stat.ino, flags).await?;\n\n Ok(Create::new(\n", "file_path": "src/fs/tikv_fs.rs", "rank": 92, "score": 5.560014304109645 }, { "content": " pub async fn read_fh(&self, ino: u64, fh: u64) -> Result<FileHandler> {\n\n let data = self\n\n .get(ScopedKey::handler(ino, fh))\n\n .await?\n\n .ok_or_else(|| FsError::FhNotFound { ino, fh })?;\n\n FileHandler::deserialize(&data)\n\n }\n\n\n\n pub async fn save_fh(&mut self, ino: u64, fh: u64, handler: &FileHandler) -> Result<()> {\n\n Ok(self\n\n .put(ScopedKey::handler(ino, fh), handler.serialize()?)\n\n .await?)\n\n }\n\n\n\n pub async fn read(&mut self, ino: u64, fh: u64, offset: i64, size: u32) -> Result<Vec<u8>> {\n\n let handler = self.read_fh(ino, fh).await?;\n\n let start = handler.cursor as i64 + offset;\n\n if start < 0 {\n\n return Err(FsError::InvalidOffset { ino, offset: start });\n\n }\n", "file_path": "src/fs/transaction.rs", "rank": 93, "score": 5.514334195912497 }, { "content": " .await\n\n }\n\n\n\n async fn read_inode(&self, ino: u64) -> Result<FileAttr> {\n\n let ino = self\n\n .spin_no_delay(move |_, txn| Box::pin(txn.read_inode(ino)))\n\n .await?;\n\n Ok(ino.file_attr)\n\n }\n\n\n\n async fn setlkw(\n\n &self,\n\n ino: u64,\n\n lock_owner: u64,\n\n #[cfg(target_os = \"linux\")] typ: i32,\n\n #[cfg(any(target_os = \"freebsd\", target_os = \"macos\"))] typ: i16,\n\n ) -> Result<bool> {\n\n loop {\n\n let res = self\n\n .spin_no_delay(move |_, txn| {\n", "file_path": "src/fs/tikv_fs.rs", "rank": 94, "score": 5.494165658650233 }, { "content": "#### FileIndex\n\n\n\nKeys in the file index scope are designed to store file index of file, following is the layout of an encoded file index key.\n\n\n\n```\n\n+ 1byte +<----------------- 8bytes ---------------->+<-------------- dynamic size ---------------->+\n\n| | | |\n\n| | | |\n\n| | | |\n\n| | | |\n\n| | | |\n\n| | | |\n\n| v v v\n\n+--------------------------------------------------------------------------------------------------+\n\n| | | |\n\n| 4 | inode number of parent directory | file name in utf-8 encoding |\n\n| | | |\n\n+-------+-------------------------------------------+----------------------------------------------+\n\n```\n\n\n\n### Value\n\n\n\n#### Serialize\n\nWe would use the [serde framework](https://github.com/serde-rs/serde) to serialize/deserialize the meta, inodes, directories, file handlers and file indexes. Taking both of human-readablility and performance into consideration, we would use json in development and use bincode in production.\n\n\n", "file_path": "contribution/design.md", "rank": 95, "score": 5.478836666659263 }, { "content": "use std::mem::size_of;\n\nuse std::ops::Range;\n\n\n\nuse tikv_client::Key;\n\n\n\nuse super::error::{FsError, Result};\n\n\n\npub const ROOT_INODE: u64 = fuser::FUSE_ROOT_ID;\n\n\n\n#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Hash, Clone, Copy)]\n\npub enum ScopedKey<'a> {\n\n Meta,\n\n Inode(u64),\n\n Block { ino: u64, block: u64 },\n\n FileHandler { ino: u64, handler: u64 },\n\n FileIndex { parent: u64, name: &'a str },\n\n}\n\n\n\nimpl<'a> ScopedKey<'a> {\n\n const META: u8 = 0;\n", "file_path": "src/fs/key.rs", "rank": 96, "score": 5.467640135130141 }, { "content": " _flush: bool,\n\n ) -> Result<()> {\n\n self.spin_no_delay(move |_, txn| Box::pin(txn.close(ino, fh)))\n\n .await\n\n }\n\n\n\n /// Create a hard link.\n\n async fn link(&self, ino: u64, newparent: u64, newname: ByteString) -> Result<Entry> {\n\n Self::check_file_name(&newname)?;\n\n let inode = self\n\n .spin_no_delay(move |_, txn| Box::pin(txn.link(ino, newparent, newname.clone())))\n\n .await?;\n\n Ok(Entry::new(inode.into(), 0))\n\n }\n\n\n\n async fn unlink(&self, parent: u64, raw_name: ByteString) -> Result<()> {\n\n self.spin_no_delay(move |_, txn| Box::pin(txn.unlink(parent, raw_name.clone())))\n\n .await\n\n }\n\n\n", "file_path": "src/fs/tikv_fs.rs", "rank": 97, "score": 5.43217472365637 }, { "content": " debug_assert!(inode.size <= self.inline_data_threshold());\n\n let key = ScopedKey::block(inode.ino, 0);\n\n let mut data = inode.inline_data.clone().unwrap();\n\n data.resize(self.block_size as usize, 0);\n\n self.put(key, data).await?;\n\n inode.inline_data = None;\n\n Ok(())\n\n }\n\n\n\n async fn write_inline_data(\n\n &mut self,\n\n inode: &mut Inode,\n\n start: u64,\n\n data: &[u8],\n\n ) -> Result<usize> {\n\n debug_assert!(inode.size <= self.inline_data_threshold());\n\n let size = data.len() as u64;\n\n debug_assert!(start + size <= self.inline_data_threshold());\n\n\n\n let size = data.len();\n", "file_path": "src/fs/transaction.rs", "rank": 98, "score": 5.408474682786869 }, { "content": " // TODO: Find an api to calculate total and available space on tikv.\n\n async fn statfs(&self, _ino: u64) -> Result<StatFs> {\n\n self.spin_no_delay(|_, txn| Box::pin(txn.statfs())).await\n\n }\n\n\n\n #[tracing::instrument]\n\n async fn setlk(\n\n &self,\n\n ino: u64,\n\n fh: u64,\n\n lock_owner: u64,\n\n start: u64,\n\n end: u64,\n\n typ: i32,\n\n pid: u32,\n\n sleep: bool,\n\n ) -> Result<()> {\n\n #[cfg(any(target_os = \"freebsd\", target_os = \"macos\"))]\n\n let typ = typ as i16;\n\n let not_again = self.spin_no_delay(move |_, txn| {\n", "file_path": "src/fs/tikv_fs.rs", "rank": 99, "score": 5.3982661268752645 } ]
Rust
src/service.rs
wpbrown/ntex-mqtt
be783119479e532705848ee224297a19ec524184
use std::task::{Context, Poll}; use std::{fmt, future::Future, marker::PhantomData, pin::Pin, rc::Rc}; use ntex::codec::{AsyncRead, AsyncWrite, Decoder, Encoder}; use ntex::service::{IntoServiceFactory, Service, ServiceFactory}; use ntex::time::{Millis, Seconds, Sleep}; use ntex::util::{select, Either, Pool}; use super::io::{DispatchItem, Dispatcher, State, Timer}; type ResponseItem<U> = Option<<U as Encoder>::Item>; pub(crate) struct FramedService<St, C, T, Io, Codec> { connect: C, handler: Rc<T>, disconnect_timeout: Seconds, time: Timer, pool: Pool, _t: PhantomData<(St, Io, Codec)>, } impl<St, C, T, Io, Codec> FramedService<St, C, T, Io, Codec> { pub(crate) fn new(connect: C, service: T, pool: Pool, disconnect_timeout: Seconds) -> Self { FramedService { pool, connect, disconnect_timeout, handler: Rc::new(service), time: Timer::new(Millis::ONE_SEC), _t: PhantomData, } } } impl<St, C, T, Io, Codec> ServiceFactory for FramedService<St, C, T, Io, Codec> where Io: AsyncRead + AsyncWrite + Unpin + 'static, C: ServiceFactory<Config = (), Request = Io, Response = (Io, State, Codec, St, Seconds)>, C::Error: fmt::Debug, C::Future: 'static, <C::Service as Service>::Future: 'static, T: ServiceFactory< Config = St, Request = DispatchItem<Codec>, Response = ResponseItem<Codec>, Error = C::Error, InitError = C::Error, > + 'static, <T::Service as Service>::Error: 'static, <T::Service as Service>::Future: 'static, Codec: Decoder + Encoder + Clone + 'static, <Codec as Encoder>::Item: 'static, { type Config = (); type Request = Io; type Response = (); type Error = C::Error; type InitError = C::InitError; type Service = FramedServiceImpl<St, C::Service, T, Io, Codec>; type Future = Pin<Box<dyn Future<Output = Result<Self::Service, Self::InitError>>>>; fn new_service(&self, _: ()) -> Self::Future { let fut = self.connect.new_service(()); let handler = self.handler.clone(); let disconnect_timeout = self.disconnect_timeout; let time = self.time.clone(); let pool = self.pool.clone(); Box::pin(async move { Ok(FramedServiceImpl { handler, disconnect_timeout, pool, time, connect: fut.await?, _t: PhantomData, }) }) } } pub(crate) struct FramedServiceImpl<St, C, T, Io, Codec> { connect: C, handler: Rc<T>, disconnect_timeout: Seconds, pool: Pool, time: Timer, _t: PhantomData<(St, Io, Codec)>, } impl<St, C, T, Io, Codec> Service for FramedServiceImpl<St, C, T, Io, Codec> where Io: AsyncRead + AsyncWrite + Unpin + 'static, C: Service<Request = Io, Response = (Io, State, Codec, St, Seconds)>, C::Error: fmt::Debug, C::Future: 'static, T: ServiceFactory< Config = St, Request = DispatchItem<Codec>, Response = ResponseItem<Codec>, Error = C::Error, InitError = C::Error, > + 'static, <T::Service as Service>::Error: 'static, <T::Service as Service>::Future: 'static, Codec: Decoder + Encoder + Clone + 'static, <Codec as Encoder>::Item: 'static, { type Request = Io; type Response = (); type Error = C::Error; type Future = Pin<Box<dyn Future<Output = Result<(), Self::Error>>>>; #[inline] fn poll_ready(&self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> { let ready1 = self.connect.poll_ready(cx)?.is_ready(); let ready2 = self.pool.poll_ready(cx).is_ready(); if ready1 && ready2 { Poll::Ready(Ok(())) } else { Poll::Pending } } #[inline] fn poll_shutdown(&self, cx: &mut Context<'_>, is_error: bool) -> Poll<()> { self.connect.poll_shutdown(cx, is_error) } #[inline] fn call(&self, req: Io) -> Self::Future { log::trace!("Start connection handshake"); let handler = self.handler.clone(); let timeout = self.disconnect_timeout; let handshake = self.connect.call(req); let time = self.time.clone(); Box::pin(async move { let (io, st, codec, session, keepalive) = handshake.await.map_err(|e| { log::trace!("Connection handshake failed: {:?}", e); e })?; log::trace!("Connection handshake succeeded"); let handler = handler.new_service(session).await?; log::trace!("Connection handler is created, starting dispatcher"); Dispatcher::with(io, st, codec, handler, time) .keepalive_timeout(keepalive) .disconnect_timeout(timeout) .await }) } } pub(crate) struct FramedService2<St, C, T, Io, Codec> { connect: C, handler: Rc<T>, disconnect_timeout: Seconds, pool: Pool, time: Timer, _t: PhantomData<(St, Io, Codec)>, } impl<St, C, T, Io, Codec> FramedService2<St, C, T, Io, Codec> { pub(crate) fn new(connect: C, service: T, pool: Pool, disconnect_timeout: Seconds) -> Self { FramedService2 { connect, pool, disconnect_timeout, handler: Rc::new(service), time: Timer::new(Millis::ONE_SEC), _t: PhantomData, } } } impl<St, C, T, Io, Codec> ServiceFactory for FramedService2<St, C, T, Io, Codec> where Io: AsyncRead + AsyncWrite + Unpin + 'static, C: ServiceFactory< Config = (), Request = (Io, State), Response = (Io, State, Codec, St, Seconds), >, C::Error: fmt::Debug, C::Future: 'static, <C::Service as Service>::Future: 'static, T: ServiceFactory< Config = St, Request = DispatchItem<Codec>, Response = ResponseItem<Codec>, Error = C::Error, InitError = C::Error, > + 'static, <T::Service as Service>::Error: 'static, <T::Service as Service>::Future: 'static, Codec: Decoder + Encoder + Clone + 'static, <Codec as Encoder>::Item: 'static, { type Config = (); type Request = (Io, State, Option<Sleep>); type Response = (); type Error = C::Error; type InitError = C::InitError; type Service = FramedServiceImpl2<St, C::Service, T, Io, Codec>; type Future = Pin<Box<dyn Future<Output = Result<Self::Service, Self::InitError>>>>; fn new_service(&self, _: ()) -> Self::Future { let fut = self.connect.new_service(()); let handler = self.handler.clone(); let disconnect_timeout = self.disconnect_timeout; let time = self.time.clone(); let pool = self.pool.clone(); Box::pin(async move { Ok(FramedServiceImpl2 { handler, disconnect_timeout, time, pool, connect: fut.await?, _t: PhantomData, }) }) } } pub(crate) struct FramedServiceImpl2<St, C, T, Io, Codec> { connect: C, handler: Rc<T>, pool: Pool, disconnect_timeout: Seconds, time: Timer, _t: PhantomData<(St, Io, Codec)>, } impl<St, C, T, Io, Codec> Service for FramedServiceImpl2<St, C, T, Io, Codec> where Io: AsyncRead + AsyncWrite + Unpin + 'static, C: Service<Request = (Io, State), Response = (Io, State, Codec, St, Seconds)>, C::Error: fmt::Debug, C::Future: 'static, T: ServiceFactory< Config = St, Request = DispatchItem<Codec>, Response = ResponseItem<Codec>, Error = C::Error, InitError = C::Error, > + 'static, <T::Service as Service>::Error: 'static, <T::Service as Service>::Future: 'static, Codec: Decoder + Encoder + Clone + 'static, <Codec as Encoder>::Item: 'static, { type Request = (Io, State, Option<Sleep>); type Response = (); type Error = C::Error; type Future = Pin<Box<dyn Future<Output = Result<(), Self::Error>>>>; #[inline] fn poll_ready(&self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> { let ready1 = self.connect.poll_ready(cx)?.is_ready(); let ready2 = self.pool.poll_ready(cx).is_ready(); if ready1 && ready2 { Poll::Ready(Ok(())) } else { Poll::Pending } } #[inline] fn poll_shutdown(&self, cx: &mut Context<'_>, is_error: bool) -> Poll<()> { self.connect.poll_shutdown(cx, is_error) } #[inline] fn call(&self, (req, state, delay): (Io, State, Option<Sleep>)) -> Self::Future { log::trace!("Start connection handshake"); let handler = self.handler.clone(); let timeout = self.disconnect_timeout; let handshake = self.connect.call((req, state)); let time = self.time.clone(); Box::pin(async move { let (io, state, codec, ka, handler) = if let Some(delay) = delay { let res = select( delay, Box::pin(async { let (io, state, codec, st, ka) = handshake.await.map_err(|e| { log::trace!("Connection handshake failed: {:?}", e); e })?; log::trace!("Connection handshake succeeded"); let handler = handler.new_service(st).await?; log::trace!("Connection handler is created, starting dispatcher"); Ok::<_, C::Error>((io, state, codec, ka, handler)) }), ) .await; match res { Either::Left(_) => { log::warn!("Handshake timed out"); return Ok(()); } Either::Right(item) => item?, } } else { let (io, state, codec, st, ka) = handshake.await.map_err(|e| { log::trace!("Connection handshake failed: {:?}", e); e })?; log::trace!("Connection handshake succeeded"); let handler = handler.new_service(st).await?; log::trace!("Connection handler is created, starting dispatcher"); (io, state, codec, ka, handler) }; Dispatcher::with(io, state, codec, handler, time) .keepalive_timeout(ka) .disconnect_timeout(timeout) .await }) } }
use std::task::{Context, Poll}; use std::{fmt, future::Future, marker::PhantomData, pin::Pin, rc::Rc}; use ntex::codec::{AsyncRead, AsyncWrite, Decoder, Encoder}; use ntex::service::{IntoServiceFactory, Service, ServiceFactory}; use ntex::time::{Millis, Seconds, Sleep}; use ntex::util::{select, Either, Pool}; use super::io::{DispatchItem, Dispatcher, State, Timer}; type ResponseItem<U> = Option<<U as Encoder>::Item>; pub(crate) struct FramedService<St, C, T, Io, Codec> { connect: C, handler: Rc<T>, disconnect_timeout: Seconds, time: Timer, pool: Pool, _t: PhantomData<(St, Io, Codec)>, } impl<St, C, T, Io, Codec> FramedService<St, C, T, Io, Codec> { pub(crate) fn new(connect: C, service: T, pool: Pool, disconnect_timeout: Seconds) -> Self { FramedService { pool, connect, disconnect_timeout, handler: Rc::new(service), time: Timer::new(Millis::ONE_SEC), _t: PhantomData, } } } impl<St, C, T, Io, Codec> ServiceFactory for FramedService<St, C, T, Io, Codec> where Io: AsyncRead + AsyncWrite + Unpin + 'static, C: ServiceFactory<Config = (), Request = Io, Response = (Io, State, Codec, St, Seconds)>, C::Error: fmt::Debug, C::Future: 'static, <C::Service as Service>::Future: 'static, T: ServiceFactory< Config = St, Request = DispatchItem<Codec>, Response = ResponseItem<Codec>, Error = C::Error, InitError = C::Error, > + 'static, <T::Service as Service>::Error: 'static, <T::Service as Service>::Future: 'static, Codec: Decoder + Encoder + Clone + 'static, <Codec as Encoder>::Item: 'static, { type Config = (); type Request = Io; type Response = (); type Error = C::Error; type InitError = C::InitError; type Service = FramedServiceImpl<St, C::Service, T, Io, Codec>; type Future = Pin<Box<dyn Future<Output = Result<Self::Service, Self::InitError>>>>; fn new_service(&self, _: ()) -> Self::Future { let fut = self.connect.new_service(()); let handler = self.handler.clone(); let disconnect_timeout = self.disconnect_timeout; let time = self.time.clone(); let pool = self.pool.clone(); Box::pin(async move { Ok(FramedServiceImpl { handler, disconnect_timeout, pool, time, connect: fut.await?, _t: PhantomData, }) }) } } pub(crate) struct FramedServiceImpl<St, C, T, Io, Codec> { connect: C, handler: Rc<T>, disconnect_timeout: Seconds, pool: Poo
{:?}", e); e })?; log::trace!("Connection handshake succeeded"); let handler = handler.new_service(st).await?; log::trace!("Connection handler is created, starting dispatcher"); (io, state, codec, ka, handler) }; Dispatcher::with(io, state, codec, handler, time) .keepalive_timeout(ka) .disconnect_timeout(timeout) .await }) } }
l, time: Timer, _t: PhantomData<(St, Io, Codec)>, } impl<St, C, T, Io, Codec> Service for FramedServiceImpl<St, C, T, Io, Codec> where Io: AsyncRead + AsyncWrite + Unpin + 'static, C: Service<Request = Io, Response = (Io, State, Codec, St, Seconds)>, C::Error: fmt::Debug, C::Future: 'static, T: ServiceFactory< Config = St, Request = DispatchItem<Codec>, Response = ResponseItem<Codec>, Error = C::Error, InitError = C::Error, > + 'static, <T::Service as Service>::Error: 'static, <T::Service as Service>::Future: 'static, Codec: Decoder + Encoder + Clone + 'static, <Codec as Encoder>::Item: 'static, { type Request = Io; type Response = (); type Error = C::Error; type Future = Pin<Box<dyn Future<Output = Result<(), Self::Error>>>>; #[inline] fn poll_ready(&self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> { let ready1 = self.connect.poll_ready(cx)?.is_ready(); let ready2 = self.pool.poll_ready(cx).is_ready(); if ready1 && ready2 { Poll::Ready(Ok(())) } else { Poll::Pending } } #[inline] fn poll_shutdown(&self, cx: &mut Context<'_>, is_error: bool) -> Poll<()> { self.connect.poll_shutdown(cx, is_error) } #[inline] fn call(&self, req: Io) -> Self::Future { log::trace!("Start connection handshake"); let handler = self.handler.clone(); let timeout = self.disconnect_timeout; let handshake = self.connect.call(req); let time = self.time.clone(); Box::pin(async move { let (io, st, codec, session, keepalive) = handshake.await.map_err(|e| { log::trace!("Connection handshake failed: {:?}", e); e })?; log::trace!("Connection handshake succeeded"); let handler = handler.new_service(session).await?; log::trace!("Connection handler is created, starting dispatcher"); Dispatcher::with(io, st, codec, handler, time) .keepalive_timeout(keepalive) .disconnect_timeout(timeout) .await }) } } pub(crate) struct FramedService2<St, C, T, Io, Codec> { connect: C, handler: Rc<T>, disconnect_timeout: Seconds, pool: Pool, time: Timer, _t: PhantomData<(St, Io, Codec)>, } impl<St, C, T, Io, Codec> FramedService2<St, C, T, Io, Codec> { pub(crate) fn new(connect: C, service: T, pool: Pool, disconnect_timeout: Seconds) -> Self { FramedService2 { connect, pool, disconnect_timeout, handler: Rc::new(service), time: Timer::new(Millis::ONE_SEC), _t: PhantomData, } } } impl<St, C, T, Io, Codec> ServiceFactory for FramedService2<St, C, T, Io, Codec> where Io: AsyncRead + AsyncWrite + Unpin + 'static, C: ServiceFactory< Config = (), Request = (Io, State), Response = (Io, State, Codec, St, Seconds), >, C::Error: fmt::Debug, C::Future: 'static, <C::Service as Service>::Future: 'static, T: ServiceFactory< Config = St, Request = DispatchItem<Codec>, Response = ResponseItem<Codec>, Error = C::Error, InitError = C::Error, > + 'static, <T::Service as Service>::Error: 'static, <T::Service as Service>::Future: 'static, Codec: Decoder + Encoder + Clone + 'static, <Codec as Encoder>::Item: 'static, { type Config = (); type Request = (Io, State, Option<Sleep>); type Response = (); type Error = C::Error; type InitError = C::InitError; type Service = FramedServiceImpl2<St, C::Service, T, Io, Codec>; type Future = Pin<Box<dyn Future<Output = Result<Self::Service, Self::InitError>>>>; fn new_service(&self, _: ()) -> Self::Future { let fut = self.connect.new_service(()); let handler = self.handler.clone(); let disconnect_timeout = self.disconnect_timeout; let time = self.time.clone(); let pool = self.pool.clone(); Box::pin(async move { Ok(FramedServiceImpl2 { handler, disconnect_timeout, time, pool, connect: fut.await?, _t: PhantomData, }) }) } } pub(crate) struct FramedServiceImpl2<St, C, T, Io, Codec> { connect: C, handler: Rc<T>, pool: Pool, disconnect_timeout: Seconds, time: Timer, _t: PhantomData<(St, Io, Codec)>, } impl<St, C, T, Io, Codec> Service for FramedServiceImpl2<St, C, T, Io, Codec> where Io: AsyncRead + AsyncWrite + Unpin + 'static, C: Service<Request = (Io, State), Response = (Io, State, Codec, St, Seconds)>, C::Error: fmt::Debug, C::Future: 'static, T: ServiceFactory< Config = St, Request = DispatchItem<Codec>, Response = ResponseItem<Codec>, Error = C::Error, InitError = C::Error, > + 'static, <T::Service as Service>::Error: 'static, <T::Service as Service>::Future: 'static, Codec: Decoder + Encoder + Clone + 'static, <Codec as Encoder>::Item: 'static, { type Request = (Io, State, Option<Sleep>); type Response = (); type Error = C::Error; type Future = Pin<Box<dyn Future<Output = Result<(), Self::Error>>>>; #[inline] fn poll_ready(&self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> { let ready1 = self.connect.poll_ready(cx)?.is_ready(); let ready2 = self.pool.poll_ready(cx).is_ready(); if ready1 && ready2 { Poll::Ready(Ok(())) } else { Poll::Pending } } #[inline] fn poll_shutdown(&self, cx: &mut Context<'_>, is_error: bool) -> Poll<()> { self.connect.poll_shutdown(cx, is_error) } #[inline] fn call(&self, (req, state, delay): (Io, State, Option<Sleep>)) -> Self::Future { log::trace!("Start connection handshake"); let handler = self.handler.clone(); let timeout = self.disconnect_timeout; let handshake = self.connect.call((req, state)); let time = self.time.clone(); Box::pin(async move { let (io, state, codec, ka, handler) = if let Some(delay) = delay { let res = select( delay, Box::pin(async { let (io, state, codec, st, ka) = handshake.await.map_err(|e| { log::trace!("Connection handshake failed: {:?}", e); e })?; log::trace!("Connection handshake succeeded"); let handler = handler.new_service(st).await?; log::trace!("Connection handler is created, starting dispatcher"); Ok::<_, C::Error>((io, state, codec, ka, handler)) }), ) .await; match res { Either::Left(_) => { log::warn!("Handshake timed out"); return Ok(()); } Either::Right(item) => item?, } } else { let (io, state, codec, st, ka) = handshake.await.map_err(|e| { log::trace!("Connection handshake failed:
random
[ { "content": "struct DispatcherState<S: Service, U: Encoder + Decoder> {\n\n error: Option<IoDispatcherError<S::Error, <U as Encoder>::Error>>,\n\n base: usize,\n\n queue: VecDeque<ServiceResult<Result<S::Response, S::Error>>>,\n\n}\n\n\n", "file_path": "src/io.rs", "rank": 0, "score": 236579.058474531 }, { "content": "fn handshake_service_factory<Io, St, C>(\n\n factory: C,\n\n max_size: u32,\n\n handshake_timeout: Seconds,\n\n pool: Rc<MqttSinkPool>,\n\n) -> impl ServiceFactory<\n\n Config = (),\n\n Request = Io,\n\n Response = (Io, State, Rc<MqttShared>, Session<St>, Seconds),\n\n Error = MqttError<C::Error>,\n\n>\n\nwhere\n\n Io: AsyncRead + AsyncWrite + Unpin,\n\n C: ServiceFactory<Config = (), Request = Handshake<Io>, Response = HandshakeAck<Io, St>>,\n\n C::Error: fmt::Debug,\n\n{\n\n ntex::service::apply(\n\n Timeout::new(Millis::from(handshake_timeout)),\n\n ntex::service::fn_factory(move || {\n\n let pool = pool.clone();\n", "file_path": "src/v3/server.rs", "rank": 1, "score": 235997.75785951907 }, { "content": "fn handshake_service_factory<Io, St, C>(\n\n factory: C,\n\n max_size: u32,\n\n max_receive: u16,\n\n max_topic_alias: u16,\n\n max_qos: Option<QoS>,\n\n handshake_timeout: Seconds,\n\n pool: Rc<MqttSinkPool>,\n\n) -> impl ServiceFactory<\n\n Config = (),\n\n Request = Io,\n\n Response = (Io, State, Rc<MqttShared>, Session<St>, Seconds),\n\n Error = MqttError<C::Error>,\n\n>\n\nwhere\n\n Io: AsyncRead + AsyncWrite + Unpin + 'static,\n\n C: ServiceFactory<Config = (), Request = Handshake<Io>, Response = HandshakeAck<Io, St>>,\n\n C::Error: fmt::Debug,\n\n{\n\n ntex::service::apply(\n", "file_path": "src/v5/server.rs", "rank": 2, "score": 235997.75785951907 }, { "content": "fn handshake_service_factory2<Io, St, C>(\n\n factory: C,\n\n max_size: u32,\n\n handshake_timeout: Seconds,\n\n pool: Rc<MqttSinkPool>,\n\n) -> impl ServiceFactory<\n\n Config = (),\n\n Request = (Io, State),\n\n Response = (Io, State, Rc<MqttShared>, Session<St>, Seconds),\n\n Error = MqttError<C::Error>,\n\n InitError = C::InitError,\n\n>\n\nwhere\n\n Io: AsyncRead + AsyncWrite + Unpin,\n\n C: ServiceFactory<Config = (), Request = Handshake<Io>, Response = HandshakeAck<Io, St>>,\n\n C::Error: fmt::Debug,\n\n{\n\n ntex::service::apply(\n\n Timeout::new(Millis::from(handshake_timeout)),\n\n ntex::service::fn_factory(move || {\n", "file_path": "src/v3/server.rs", "rank": 3, "score": 235997.75785951904 }, { "content": "fn handshake_service_factory2<Io, St, C>(\n\n factory: C,\n\n max_size: u32,\n\n max_receive: u16,\n\n max_topic_alias: u16,\n\n max_qos: Option<QoS>,\n\n handshake_timeout: Seconds,\n\n pool: Rc<MqttSinkPool>,\n\n) -> impl ServiceFactory<\n\n Config = (),\n\n Request = (Io, State),\n\n Response = (Io, State, Rc<MqttShared>, Session<St>, Seconds),\n\n Error = MqttError<C::Error>,\n\n InitError = C::InitError,\n\n>\n\nwhere\n\n Io: AsyncRead + AsyncWrite + Unpin + 'static,\n\n C: ServiceFactory<Config = (), Request = Handshake<Io>, Response = HandshakeAck<Io, St>>,\n\n C::Error: fmt::Debug,\n\n{\n", "file_path": "src/v5/server.rs", "rank": 4, "score": 235997.75785951904 }, { "content": "fn encode_connect(connect: &Connect, dst: &mut BytesMut) -> Result<(), EncodeError> {\n\n let Connect {\n\n clean_session,\n\n keep_alive,\n\n ref last_will,\n\n ref client_id,\n\n ref username,\n\n ref password,\n\n } = *connect;\n\n\n\n MQTT.as_ref().encode(dst)?;\n\n\n\n let mut flags = ConnectFlags::empty();\n\n\n\n if username.is_some() {\n\n flags |= ConnectFlags::USERNAME;\n\n }\n\n if password.is_some() {\n\n flags |= ConnectFlags::PASSWORD;\n\n }\n", "file_path": "src/v3/codec/encode.rs", "rank": 5, "score": 202302.53509931418 }, { "content": "fn decode_connect_packet(src: &mut Bytes) -> Result<Packet, DecodeError> {\n\n ensure!(src.remaining() >= 10, DecodeError::InvalidLength);\n\n let len = src.get_u16();\n\n\n\n ensure!(len == 4 && &src.as_ref()[0..4] == MQTT, DecodeError::InvalidProtocol);\n\n src.advance(4);\n\n\n\n let level = src.get_u8();\n\n ensure!(level == MQTT_LEVEL_3, DecodeError::UnsupportedProtocolLevel);\n\n\n\n let flags =\n\n ConnectFlags::from_bits(src.get_u8()).ok_or(DecodeError::ConnectReservedFlagSet)?;\n\n\n\n let keep_alive = u16::decode(src)?;\n\n let client_id = ByteString::decode(src)?;\n\n\n\n ensure!(\n\n !client_id.is_empty() || flags.contains(ConnectFlags::CLEAN_START),\n\n DecodeError::InvalidClientId\n\n );\n", "file_path": "src/v3/codec/decode.rs", "rank": 6, "score": 166300.9422396102 }, { "content": "fn decode_connect_ack_packet(src: &mut Bytes) -> Result<Packet, DecodeError> {\n\n ensure!(src.remaining() >= 2, DecodeError::InvalidLength);\n\n let flags =\n\n ConnectAckFlags::from_bits(src.get_u8()).ok_or(DecodeError::ConnAckReservedFlagSet)?;\n\n\n\n let return_code = src.get_u8().try_into()?;\n\n Ok(Packet::ConnectAck {\n\n session_present: flags.contains(ConnectAckFlags::SESSION_PRESENT),\n\n return_code,\n\n })\n\n}\n\n\n", "file_path": "src/v3/codec/decode.rs", "rank": 7, "score": 162418.04305725102 }, { "content": "struct Inner<C> {\n\n control: C,\n\n sink: MqttSink,\n\n inflight: RefCell<HashSet<NonZeroU16>>,\n\n}\n\n\n\nimpl<St, T, C, E> Dispatcher<St, T, C, E>\n\nwhere\n\n T: Service<Request = Publish, Response = (), Error = E>,\n\n C: Service<Request = ControlMessage<E>, Response = ControlResult, Error = MqttError<E>>,\n\n{\n\n pub(crate) fn new(session: Session<St>, publish: T, control: C) -> Self {\n\n let sink = session.sink().clone();\n\n\n\n Self {\n\n session,\n\n publish,\n\n shutdown: Cell::new(false),\n\n inner: Rc::new(Inner { sink, control, inflight: RefCell::new(HashSet::default()) }),\n\n _t: PhantomData,\n", "file_path": "src/v3/dispatcher.rs", "rank": 8, "score": 155033.6319007203 }, { "content": "struct Inner<C> {\n\n control: C,\n\n sink: MqttSink,\n\n info: RefCell<PublishInfo>,\n\n}\n\n\n", "file_path": "src/v5/dispatcher.rs", "rank": 9, "score": 155033.6319007203 }, { "content": "type Handler<E> = boxed::BoxService<Publish, (), E>;\n\n\n\n/// Mqtt client with routing capabilities\n\npub struct ClientRouter<Io, Err, PErr> {\n\n builder: RouterBuilder<usize>,\n\n handlers: Vec<Handler<PErr>>,\n\n io: Io,\n\n shared: Rc<MqttShared>,\n\n keepalive: Seconds,\n\n disconnect_timeout: Seconds,\n\n max_receive: usize,\n\n _t: PhantomData<Err>,\n\n}\n\n\n\nimpl<Io, Err, PErr> fmt::Debug for ClientRouter<Io, Err, PErr> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"v3::ClientRouter\")\n\n .field(\"keepalive\", &self.keepalive)\n\n .field(\"disconnect_timeout\", &self.disconnect_timeout)\n\n .field(\"max_receive\", &self.max_receive)\n", "file_path": "src/v3/client/connection.rs", "rank": 10, "score": 152295.3373000391 }, { "content": "fn decode_last_will(src: &mut Bytes, flags: ConnectFlags) -> Result<LastWill, DecodeError> {\n\n let mut will_delay_interval_sec = None;\n\n let mut correlation_data = None;\n\n let mut message_expiry_interval = None;\n\n let mut content_type = None;\n\n let mut user_properties = Vec::new();\n\n let mut is_utf8_payload = None;\n\n let mut response_topic = None;\n\n let prop_src = &mut utils::take_properties(src)?;\n\n while prop_src.has_remaining() {\n\n match prop_src.get_u8() {\n\n pt::WILL_DELAY_INT => will_delay_interval_sec.read_value(prop_src)?,\n\n pt::CORR_DATA => correlation_data.read_value(prop_src)?,\n\n pt::MSG_EXPIRY_INT => message_expiry_interval.read_value(prop_src)?,\n\n pt::CONTENT_TYPE => content_type.read_value(prop_src)?,\n\n pt::UTF8_PAYLOAD => is_utf8_payload.read_value(prop_src)?,\n\n pt::RESP_TOPIC => response_topic.read_value(prop_src)?,\n\n pt::USER => user_properties.push(UserProperty::decode(prop_src)?),\n\n _ => return Err(DecodeError::MalformedPacket),\n\n }\n", "file_path": "src/v5/codec/packet/connect.rs", "rank": 11, "score": 151710.6049502318 }, { "content": "struct Inner<C> {\n\n control: C,\n\n sink: MqttSink,\n\n inflight: RefCell<HashSet<NonZeroU16>>,\n\n}\n\n\n\nimpl<T, C, E> Dispatcher<T, C, E>\n\nwhere\n\n T: Service<Request = Publish, Response = Either<(), Publish>, Error = E>,\n\n C: Service<Request = ControlMessage<E>, Response = ControlResult, Error = MqttError<E>>,\n\n{\n\n pub(crate) fn new(sink: MqttSink, publish: T, control: C) -> Self {\n\n Self {\n\n publish,\n\n sink: sink.clone(),\n\n shutdown: Cell::new(false),\n\n inner: Rc::new(Inner { sink, control, inflight: RefCell::new(HashSet::default()) }),\n\n _t: PhantomData,\n\n }\n\n }\n", "file_path": "src/v3/client/dispatcher.rs", "rank": 12, "score": 150599.19456564347 }, { "content": "struct Inner<C> {\n\n control: C,\n\n sink: MqttSink,\n\n info: RefCell<PublishInfo>,\n\n}\n\n\n", "file_path": "src/v5/client/dispatcher.rs", "rank": 13, "score": 150599.19456564347 }, { "content": "type Handler<E> = boxed::BoxService<Publish, PublishAck, E>;\n\n\n\n/// Mqtt client with routing capabilities\n\npub struct ClientRouter<Io, Err, PErr> {\n\n builder: RouterBuilder<usize>,\n\n handlers: Vec<Handler<PErr>>,\n\n io: Io,\n\n shared: Rc<MqttShared>,\n\n keepalive: Seconds,\n\n disconnect_timeout: Seconds,\n\n max_receive: usize,\n\n _t: marker::PhantomData<Err>,\n\n}\n\n\n\nimpl<Io, Err, PErr> fmt::Debug for ClientRouter<Io, Err, PErr> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_struct(\"v5::ClientRouter\")\n\n .field(\"keepalive\", &self.keepalive)\n\n .field(\"disconnect_timeout\", &self.disconnect_timeout)\n\n .field(\"max_receive\", &self.max_receive)\n", "file_path": "src/v5/client/connection.rs", "rank": 14, "score": 142782.10747081292 }, { "content": "fn dispatch<Err, PErr>(\n\n router: Router<usize>,\n\n handlers: Vec<Handler<PErr>>,\n\n) -> impl Service<Request = Publish, Response = Either<(), Publish>, Error = Err>\n\nwhere\n\n PErr: 'static,\n\n Err: From<PErr>,\n\n{\n\n into_service(move |mut req: Publish| {\n\n if let Some((idx, _info)) = router.recognize(req.topic_mut()) {\n\n // exec handler\n\n let fut = call(req, &handlers[*idx]);\n\n Either::Left(async move { fut.await })\n\n } else {\n\n Either::Right(Ready::<_, Err>::Ok(Either::Right(req)))\n\n }\n\n })\n\n}\n\n\n", "file_path": "src/v3/client/connection.rs", "rank": 15, "score": 139882.10581904647 }, { "content": "fn dispatch<Err, PErr>(\n\n router: Router<usize>,\n\n handlers: Vec<Handler<PErr>>,\n\n) -> impl Service<Request = Publish, Response = Either<Publish, PublishAck>, Error = Err>\n\nwhere\n\n PErr: 'static,\n\n PublishAck: TryFrom<PErr, Error = Err>,\n\n{\n\n let aliases: RefCell<HashMap<NonZeroU16, (usize, Path<ByteString>)>> =\n\n RefCell::new(HashMap::default());\n\n\n\n into_service(move |mut req: Publish| {\n\n if !req.publish_topic().is_empty() {\n\n if let Some((idx, _info)) = router.recognize(req.topic_mut()) {\n\n // save info for topic alias\n\n if let Some(alias) = req.packet().properties.topic_alias {\n\n aliases.borrow_mut().insert(alias, (*idx, req.topic().clone()));\n\n }\n\n\n\n // exec handler\n", "file_path": "src/v5/client/connection.rs", "rank": 16, "score": 139882.10581904647 }, { "content": "#[inline]\n\nfn decode_ack(mut src: Bytes, f: impl Fn(NonZeroU16) -> Packet) -> Result<Packet, DecodeError> {\n\n let packet_id = NonZeroU16::decode(&mut src)?;\n\n ensure!(!src.has_remaining(), DecodeError::InvalidLength);\n\n Ok(f(packet_id))\n\n}\n\n\n", "file_path": "src/v3/codec/decode.rs", "rank": 17, "score": 139682.76682003657 }, { "content": "fn decode_unsubscribe_packet(src: &mut Bytes) -> Result<Packet, DecodeError> {\n\n let packet_id = NonZeroU16::decode(src)?;\n\n let mut topic_filters = Vec::new();\n\n while src.remaining() > 0 {\n\n topic_filters.push(ByteString::decode(src)?);\n\n }\n\n Ok(Packet::Unsubscribe { packet_id, topic_filters })\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::utils::decode_variable_length;\n\n use crate::v3::codec::ConnectAckReason;\n\n\n\n macro_rules! assert_decode_packet (\n\n ($bytes:expr, $res:expr) => {{\n\n let first_byte = $bytes.as_ref()[0];\n\n let (_len, consumed) = decode_variable_length(&$bytes[1..]).unwrap().unwrap();\n\n let cur = Bytes::from_static(&$bytes[consumed + 1..]);\n", "file_path": "src/v3/codec/decode.rs", "rank": 18, "score": 139593.4824513308 }, { "content": "fn decode_subscribe_packet(src: &mut Bytes) -> Result<Packet, DecodeError> {\n\n let packet_id = NonZeroU16::decode(src)?;\n\n let mut topic_filters = Vec::new();\n\n while src.has_remaining() {\n\n let topic = ByteString::decode(src)?;\n\n ensure!(src.remaining() >= 1, DecodeError::InvalidLength);\n\n let qos = (src.get_u8() & 0b0000_0011).try_into()?;\n\n topic_filters.push((topic, qos));\n\n }\n\n\n\n Ok(Packet::Subscribe { packet_id, topic_filters })\n\n}\n\n\n", "file_path": "src/v3/codec/decode.rs", "rank": 19, "score": 139593.4824513308 }, { "content": "fn decode_subscribe_ack_packet(src: &mut Bytes) -> Result<Packet, DecodeError> {\n\n let packet_id = NonZeroU16::decode(src)?;\n\n let mut status = Vec::with_capacity(src.len());\n\n for code in src.as_ref().iter() {\n\n status.push(if *code == 0x80 {\n\n SubscribeReturnCode::Failure\n\n } else {\n\n SubscribeReturnCode::Success(QoS::try_from(*code)?)\n\n });\n\n }\n\n Ok(Packet::SubscribeAck { packet_id, status })\n\n}\n\n\n", "file_path": "src/v3/codec/decode.rs", "rank": 20, "score": 136498.02397224004 }, { "content": "#[derive(Debug)]\n\nstruct Error;\n\n\n\nimpl std::convert::TryFrom<Error> for v5::PublishAck {\n\n type Error = Error;\n\n\n\n fn try_from(err: Error) -> Result<Self, Self::Error> {\n\n Err(err)\n\n }\n\n}\n\n\n\nasync fn publish(pkt: v5::Publish) -> Result<v5::PublishAck, Error> {\n\n log::info!(\n\n \"incoming publish: {:?} -> {:?} payload {:?}\",\n\n pkt.id(),\n\n pkt.topic(),\n\n pkt.payload()\n\n );\n\n Ok(pkt.ack())\n\n}\n\n\n", "file_path": "examples/client.rs", "rank": 21, "score": 130069.42729103129 }, { "content": "fn decode_publish_packet(src: &mut Bytes, packet_flags: u8) -> Result<Packet, DecodeError> {\n\n let topic = ByteString::decode(src)?;\n\n let qos = QoS::try_from((packet_flags & 0b0110) >> 1)?;\n\n let packet_id = if qos == QoS::AtMostOnce {\n\n None\n\n } else {\n\n Some(NonZeroU16::decode(src)?) // packet id = 0 encountered\n\n };\n\n\n\n Ok(Packet::Publish(Publish {\n\n dup: (packet_flags & 0b1000) == 0b1000,\n\n qos,\n\n retain: (packet_flags & 0b0001) == 0b0001,\n\n topic,\n\n packet_id,\n\n payload: src.split_off(0),\n\n }))\n\n}\n\n\n", "file_path": "src/v3/codec/decode.rs", "rank": 22, "score": 129106.44696831987 }, { "content": "#[derive(Debug)]\n\nstruct Error;\n\n\n\nimpl std::convert::TryFrom<Error> for v5::PublishAck {\n\n type Error = Error;\n\n\n\n fn try_from(err: Error) -> Result<Self, Self::Error> {\n\n Err(err)\n\n }\n\n}\n\n\n\n#[ntex::main]\n\nasync fn main() -> std::io::Result<()> {\n\n std::env::set_var(\"RUST_LOG\", \"ntex=info,ntex_mqtt=trace,subs_client=trace\");\n\n env_logger::init();\n\n\n\n // connect to server\n\n let client = v5::client::MqttConnector::new(\"127.0.0.1:1883\")\n\n .client_id(\"my-client-id\")\n\n .keep_alive(Seconds(30))\n\n .connect()\n", "file_path": "examples/subs_client.rs", "rank": 23, "score": 126553.61092207872 }, { "content": "struct St;\n\n\n\nasync fn handshake<Io>(mut packet: Handshake<Io>) -> Result<HandshakeAck<Io, St>, ()> {\n\n packet.packet();\n\n packet.packet_mut();\n\n packet.io();\n\n packet.sink();\n\n Ok(packet.ack(St, false).idle_timeout(Seconds(16)))\n\n}\n\n\n\n#[ntex::test]\n\nasync fn test_simple() -> std::io::Result<()> {\n\n let srv = server::test_server(|| MqttServer::new(handshake).publish(|_t| ok(())).finish());\n\n\n\n // connect to server\n\n let client =\n\n client::MqttConnector::new(srv.addr()).client_id(\"user\").connect().await.unwrap();\n\n\n\n let sink = client.sink();\n\n\n", "file_path": "tests/test_server.rs", "rank": 24, "score": 126116.72659515326 }, { "content": "struct St;\n\n\n", "file_path": "tests/test_server_both.rs", "rank": 25, "score": 126116.72659515326 }, { "content": "struct St;\n\n\n", "file_path": "tests/test_server_v5.rs", "rank": 26, "score": 122888.79123433754 }, { "content": "type HandlerService<E> = BoxService<Publish, (), E>;\n\n\n\n/// Router - structure that follows the builder pattern\n\n/// for building publish packet router instances for mqtt server.\n\npub struct Router<S, Err> {\n\n router: RouterBuilder<usize>,\n\n handlers: Vec<Handler<S, Err>>,\n\n default: Handler<S, Err>,\n\n}\n\n\n\nimpl<S, Err> Router<S, Err>\n\nwhere\n\n S: Clone + 'static,\n\n Err: 'static,\n\n{\n\n /// Create mqtt application router.\n\n ///\n\n /// Default service to be used if no matching resource could be found.\n\n pub fn new<F, U: 'static>(default_service: F) -> Self\n\n where\n", "file_path": "src/v3/router.rs", "rank": 27, "score": 121359.76502698836 }, { "content": "#[derive(Copy, Clone, Debug)]\n\nenum IoDispatcherState {\n\n Processing,\n\n Stop,\n\n Shutdown,\n\n}\n\n\n\npub(crate) enum IoDispatcherError<S, U> {\n\n None,\n\n KeepAlive,\n\n Encoder(U),\n\n Service(S),\n\n}\n\n\n\nimpl<S, U> From<Either<S, U>> for IoDispatcherError<S, U> {\n\n fn from(err: Either<S, U>) -> Self {\n\n match err {\n\n Either::Left(err) => IoDispatcherError::Service(err),\n\n Either::Right(err) => IoDispatcherError::Encoder(err),\n\n }\n\n }\n", "file_path": "src/io.rs", "rank": 28, "score": 121256.76955521015 }, { "content": "type Response<U> = <U as Encoder>::Item;\n\n\n\npin_project_lite::pin_project! {\n\n /// Dispatcher for mqtt protocol\n\n pub(crate) struct Dispatcher<S, U>\n\n where\n\n S: Service<Request = DispatchItem<U>, Response = Option<Response<U>>>,\n\n S::Error: 'static,\n\n S::Future: 'static,\n\n U: Encoder,\n\n U: Decoder,\n\n <U as Encoder>::Item: 'static,\n\n {\n\n service: S,\n\n codec: U,\n\n state: State,\n\n inner: Rc<RefCell<DispatcherState<S, U>>>,\n\n st: IoDispatcherState,\n\n pool: Pool,\n\n timer: Timer,\n\n updated: time::Instant,\n\n keepalive_timeout: Seconds,\n\n #[pin]\n\n response: Option<S::Future>,\n\n response_idx: usize,\n\n }\n\n}\n\n\n", "file_path": "src/io.rs", "rank": 29, "score": 118889.25646004739 }, { "content": "type ServerFactory<Io, Err, InitErr> = boxed::BoxServiceFactory<\n\n (),\n\n SelectItem<Io>,\n\n Either<SelectItem<Io>, ()>,\n\n MqttError<Err>,\n\n InitErr,\n\n>;\n\n\n", "file_path": "src/v5/selector.rs", "rank": 30, "score": 116900.82397909179 }, { "content": "type ServerFactory<Io, Err, InitErr> = boxed::BoxServiceFactory<\n\n (),\n\n SelectItem<Io>,\n\n Either<SelectItem<Io>, ()>,\n\n MqttError<Err>,\n\n InitErr,\n\n>;\n\n\n", "file_path": "src/v3/selector.rs", "rank": 31, "score": 116900.82397909179 }, { "content": "#[derive(Debug, Clone, Copy)]\n\nenum DecodeState {\n\n FrameHeader,\n\n Frame(FixedHeader),\n\n}\n\n\n\nimpl Codec {\n\n /// Create `Codec` instance\n\n pub fn new() -> Self {\n\n Codec { state: Cell::new(DecodeState::FrameHeader), max_size: Cell::new(0) }\n\n }\n\n\n\n /// Set max inbound frame size.\n\n ///\n\n /// If max size is set to `0`, size is unlimited.\n\n /// By default max size is set to `0`\n\n pub fn max_size(self, size: u32) -> Self {\n\n self.max_size.set(size);\n\n self\n\n }\n\n\n", "file_path": "src/v3/codec/codec.rs", "rank": 32, "score": 115980.35430813857 }, { "content": "#[derive(Debug, Clone, Copy)]\n\nenum DecodeState {\n\n FrameHeader,\n\n Frame(FixedHeader),\n\n}\n\n\n\nimpl Codec {\n\n /// Create `Codec` instance\n\n pub fn new() -> Self {\n\n Codec {\n\n state: Cell::new(DecodeState::FrameHeader),\n\n max_in_size: Cell::new(0),\n\n max_out_size: Cell::new(0),\n\n flags: Cell::new(CodecFlags::empty()),\n\n }\n\n }\n\n\n\n /// Set max inbound frame size.\n\n ///\n\n /// If max size is set to `0`, size is unlimited.\n\n /// By default max size is set to `0`\n", "file_path": "src/v5/codec/codec.rs", "rank": 33, "score": 115980.35430813857 }, { "content": "fn parse_publish_properties(src: &mut Bytes) -> Result<PublishProperties, DecodeError> {\n\n let prop_src = &mut utils::take_properties(src)?;\n\n\n\n let mut message_expiry_interval = None;\n\n let mut topic_alias = None;\n\n let mut content_type = None;\n\n let mut correlation_data = None;\n\n let mut subscription_ids = None;\n\n let mut response_topic = None;\n\n let mut is_utf8_payload = None;\n\n let mut user_props = Vec::new();\n\n\n\n while prop_src.has_remaining() {\n\n match prop_src.get_u8() {\n\n pt::UTF8_PAYLOAD => is_utf8_payload.read_value(prop_src)?,\n\n pt::MSG_EXPIRY_INT => message_expiry_interval.read_value(prop_src)?,\n\n pt::CONTENT_TYPE => content_type.read_value(prop_src)?,\n\n pt::RESP_TOPIC => response_topic.read_value(prop_src)?,\n\n pt::CORR_DATA => correlation_data.read_value(prop_src)?,\n\n pt::SUB_ID => {\n", "file_path": "src/v5/codec/packet/publish.rs", "rank": 34, "score": 115428.85665884026 }, { "content": "type HandlerService<E> = BoxService<Publish, PublishAck, E>;\n\n\n\n/// Router - structure that follows the builder pattern\n\n/// for building publish packet router instances for mqtt server.\n\npub struct Router<S, Err> {\n\n router: RouterBuilder<usize>,\n\n handlers: Vec<Handler<S, Err>>,\n\n default: Handler<S, Err>,\n\n}\n\n\n\nimpl<S, Err> Router<S, Err>\n\nwhere\n\n S: Clone + 'static,\n\n Err: 'static,\n\n{\n\n /// Create mqtt application router.\n\n ///\n\n /// Default service to be used if no matching resource could be found.\n\n pub fn new<F, U: 'static>(default_service: F) -> Self\n\n where\n", "file_path": "src/v5/router.rs", "rank": 35, "score": 114294.6030171903 }, { "content": "type Handler<S, E> = BoxServiceFactory<S, Publish, (), E, E>;\n", "file_path": "src/v3/router.rs", "rank": 36, "score": 113369.77625968533 }, { "content": "type Server<Io, Err> =\n\n boxed::BoxService<SelectItem<Io>, Either<SelectItem<Io>, ()>, MqttError<Err>>;\n\n\n\n/// Mqtt server selector\n\n///\n\n/// Selector allows to choose different mqtt server impls depends on\n\n/// connectt packet.\n\npub struct Selector<Io, Err, InitErr> {\n\n servers: Vec<ServerFactory<Io, Err, InitErr>>,\n\n max_size: u32,\n\n handshake_timeout: Seconds,\n\n pool: Rc<MqttSinkPool>,\n\n _t: marker::PhantomData<(Io, Err, InitErr)>,\n\n}\n\n\n\nimpl<Io, Err, InitErr> Selector<Io, Err, InitErr> {\n\n #[allow(clippy::new_without_default)]\n\n pub fn new() -> Self {\n\n Selector {\n\n servers: Vec::new(),\n", "file_path": "src/v3/selector.rs", "rank": 37, "score": 113148.45247358337 }, { "content": "type Server<Io, Err> =\n\n boxed::BoxService<SelectItem<Io>, Either<SelectItem<Io>, ()>, MqttError<Err>>;\n\n\n\n/// Mqtt server selector\n\n///\n\n/// Selector allows to choose different mqtt server impls depends on\n\n/// connectt packet.\n\npub struct Selector<Io, Err, InitErr> {\n\n servers: Vec<ServerFactory<Io, Err, InitErr>>,\n\n max_size: u32,\n\n handshake_timeout: Seconds,\n\n pool: Rc<MqttSinkPool>,\n\n _t: marker::PhantomData<(Io, Err, InitErr)>,\n\n}\n\n\n\nimpl<Io, Err, InitErr> Selector<Io, Err, InitErr> {\n\n #[allow(clippy::new_without_default)]\n\n pub fn new() -> Self {\n\n Selector {\n\n servers: Vec::new(),\n", "file_path": "src/v5/selector.rs", "rank": 38, "score": 113148.45247358337 }, { "content": "struct SessionInner<T, St> {\n\n st: St,\n\n sink: T,\n\n max_receive: u16,\n\n max_topic_alias: u16,\n\n}\n\n\n\nimpl<T, St> Clone for Session<T, St> {\n\n #[inline]\n\n fn clone(&self) -> Self {\n\n Session(self.0.clone())\n\n }\n\n}\n\n\n\nimpl<T, St> Session<T, St> {\n\n pub(crate) fn new(st: St, sink: T) -> Self {\n\n Session(Rc::new(SessionInner { st, sink, max_receive: 0, max_topic_alias: 0 }))\n\n }\n\n\n\n pub(crate) fn new_v5(st: St, sink: T, max_receive: u16, max_topic_alias: u16) -> Self {\n", "file_path": "src/session.rs", "rank": 39, "score": 111891.37108733083 }, { "content": "type Handler<S, E> = BoxServiceFactory<S, Publish, PublishAck, E, E>;\n", "file_path": "src/v5/router.rs", "rank": 41, "score": 106804.88172480489 }, { "content": "fn pkt_publish() -> codec::Publish {\n\n codec::Publish {\n\n dup: false,\n\n retain: false,\n\n qos: codec::QoS::AtLeastOnce,\n\n topic: ByteString::from(\"test\"),\n\n packet_id: Some(NonZeroU16::new(1).unwrap()),\n\n payload: Bytes::new(),\n\n properties: Default::default(),\n\n }\n\n}\n\n\n\nasync fn handshake<Io>(packet: Handshake<Io>) -> Result<HandshakeAck<Io, St>, TestError> {\n\n Ok(packet.ack(St))\n\n}\n\n\n\n#[ntex::test]\n\nasync fn test_simple() -> std::io::Result<()> {\n\n let srv = server::test_server(|| {\n\n MqttServer::new(handshake).publish(|p: Publish| ok::<_, TestError>(p.ack())).finish()\n", "file_path": "tests/test_server_v5.rs", "rank": 42, "score": 106598.31821831912 }, { "content": "#[derive(Debug)]\n\nstruct MyServerError;\n\n\n\nimpl From<()> for MyServerError {\n\n fn from(_: ()) -> Self {\n\n MyServerError\n\n }\n\n}\n\n\n\nimpl std::convert::TryFrom<MyServerError> for PublishAck {\n\n type Error = MyServerError;\n\n\n\n fn try_from(err: MyServerError) -> Result<Self, Self::Error> {\n\n Err(err)\n\n }\n\n}\n\n\n\nasync fn handshake<Io>(\n\n handshake: v5::Handshake<Io>,\n\n) -> Result<v5::HandshakeAck<Io, MySession>, MyServerError> {\n\n log::info!(\"new connection: {:?}\", handshake);\n", "file_path": "examples/subs.rs", "rank": 43, "score": 100451.22633937323 }, { "content": "#[derive(Debug)]\n\nstruct ServerError;\n\n\n\nimpl From<()> for ServerError {\n\n fn from(_: ()) -> Self {\n\n ServerError\n\n }\n\n}\n\n\n\nimpl std::convert::TryFrom<ServerError> for v5::PublishAck {\n\n type Error = ServerError;\n\n\n\n fn try_from(err: ServerError) -> Result<Self, Self::Error> {\n\n Err(err)\n\n }\n\n}\n\n\n\nasync fn handshake_v3(\n\n handshake: v3::Handshake<SslStream<TcpStream>>,\n\n) -> Result<v3::HandshakeAck<SslStream<TcpStream>, Session>, ServerError> {\n\n log::info!(\"new connection: {:?}\", handshake);\n", "file_path": "examples/openssl.rs", "rank": 44, "score": 100451.22633937323 }, { "content": "#[derive(Debug)]\n\nstruct ServerError;\n\n\n\nimpl From<()> for ServerError {\n\n fn from(_: ()) -> Self {\n\n ServerError\n\n }\n\n}\n\n\n\nimpl std::convert::TryFrom<ServerError> for v5::PublishAck {\n\n type Error = ServerError;\n\n\n\n fn try_from(err: ServerError) -> Result<Self, Self::Error> {\n\n Err(err)\n\n }\n\n}\n\n\n\nasync fn handshake_v3<Io>(\n\n handshake: v3::Handshake<Io>,\n\n) -> Result<v3::HandshakeAck<Io, Session>, ServerError> {\n\n log::info!(\"new connection: {:?}\", handshake);\n", "file_path": "examples/basic.rs", "rank": 45, "score": 100451.22633937323 }, { "content": "#[derive(Debug)]\n\nstruct ServerError;\n\n\n\nimpl From<()> for ServerError {\n\n fn from(_: ()) -> Self {\n\n ServerError\n\n }\n\n}\n\n\n\nimpl std::convert::TryFrom<ServerError> for v5::PublishAck {\n\n type Error = ServerError;\n\n\n\n fn try_from(err: ServerError) -> Result<Self, Self::Error> {\n\n Err(err)\n\n }\n\n}\n\n\n\nasync fn handshake_v3(\n\n handshake: v3::Handshake<TlsStream<TcpStream>>,\n\n) -> Result<v3::HandshakeAck<TlsStream<TcpStream>, Session>, ServerError> {\n\n log::info!(\"new connection: {:?}\", handshake);\n", "file_path": "examples/rustls.rs", "rank": 46, "score": 100451.22633937323 }, { "content": "#[derive(Debug)]\n\nstruct MyServerError;\n\n\n\nimpl From<()> for MyServerError {\n\n fn from(_: ()) -> Self {\n\n MyServerError\n\n }\n\n}\n\n\n\nimpl std::convert::TryFrom<MyServerError> for v5::PublishAck {\n\n type Error = MyServerError;\n\n\n\n fn try_from(err: MyServerError) -> Result<Self, Self::Error> {\n\n Err(err)\n\n }\n\n}\n\n\n\nasync fn handshake_v3<Io>(\n\n handshake: v3::Handshake<Io>,\n\n) -> Result<v3::HandshakeAck<Io, MySession>, MyServerError> {\n\n log::info!(\"new connection: {:?}\", handshake);\n", "file_path": "examples/session.rs", "rank": 47, "score": 100451.22633937323 }, { "content": "#[derive(Debug)]\n\nstruct TestError;\n\n\n\nimpl From<()> for TestError {\n\n fn from(_: ()) -> Self {\n\n TestError\n\n }\n\n}\n\n\n\nimpl TryFrom<TestError> for v5::PublishAck {\n\n type Error = TestError;\n\n\n\n fn try_from(err: TestError) -> Result<Self, Self::Error> {\n\n Err(err)\n\n }\n\n}\n\n\n\n#[ntex::test]\n\nasync fn test_simple() -> std::io::Result<()> {\n\n let srv = server::test_server(|| {\n\n MqttServer::new()\n", "file_path": "tests/test_server_both.rs", "rank": 48, "score": 97229.4140227704 }, { "content": "fn control_service_factory() -> impl ServiceFactory<\n\n Config = Session<MySession>,\n\n Request = ControlMessage<MyServerError>,\n\n Response = ControlResult,\n\n Error = MyServerError,\n\n InitError = MyServerError,\n\n> {\n\n fn_factory_with_config(|session: Session<MySession>| {\n\n Ready::Ok(fn_service(move |control| match control {\n\n v5::ControlMessage::Auth(a) => Ready::Ok(a.ack(v5::codec::Auth::default())),\n\n v5::ControlMessage::Error(e) => {\n\n Ready::Ok(e.ack(v5::codec::DisconnectReasonCode::UnspecifiedError))\n\n }\n\n v5::ControlMessage::ProtocolError(e) => Ready::Ok(e.ack()),\n\n v5::ControlMessage::Ping(p) => Ready::Ok(p.ack()),\n\n v5::ControlMessage::Disconnect(d) => Ready::Ok(d.ack()),\n\n v5::ControlMessage::Subscribe(mut s) => {\n\n // store subscribed topics in session, publish service uses this list for echos\n\n s.iter_mut().for_each(|mut s| {\n\n session.subscriptions.borrow_mut().push(s.topic().clone());\n", "file_path": "examples/subs.rs", "rank": 49, "score": 97153.73152982925 }, { "content": "struct PublishInfo {\n\n inflight: HashSet<num::NonZeroU16>,\n\n aliases: HashSet<num::NonZeroU16>,\n\n}\n\n\n\nimpl<T, C, E, E2> Dispatcher<T, C, E, E2>\n\nwhere\n\n T: Service<Request = Publish, Response = PublishAck, Error = E2>,\n\n PublishAck: TryFrom<E2, Error = E>,\n\n C: Service<Request = ControlMessage<E>, Response = ControlResult, Error = MqttError<E>>,\n\n{\n\n fn new(\n\n sink: MqttSink,\n\n max_receive: usize,\n\n max_topic_alias: u16,\n\n publish: T,\n\n control: C,\n\n ) -> Self {\n\n Self {\n\n publish,\n", "file_path": "src/v5/dispatcher.rs", "rank": 50, "score": 96846.03098583898 }, { "content": "#[derive(Debug)]\n\nstruct TestError;\n\n\n\nimpl From<()> for TestError {\n\n fn from(_: ()) -> Self {\n\n TestError\n\n }\n\n}\n\n\n\nimpl TryFrom<TestError> for PublishAck {\n\n type Error = TestError;\n\n\n\n fn try_from(err: TestError) -> Result<Self, Self::Error> {\n\n Err(err)\n\n }\n\n}\n\n\n", "file_path": "tests/test_server_v5.rs", "rank": 51, "score": 94266.20745038129 }, { "content": "struct PublishInfo {\n\n inflight: HashSet<NonZeroU16>,\n\n aliases: HashSet<NonZeroU16>,\n\n}\n\n\n\nimpl<T, C, E> Dispatcher<T, C, E>\n\nwhere\n\n T: Service<Request = Publish, Response = Either<Publish, PublishAck>, Error = E>,\n\n C: Service<Request = ControlMessage<E>, Response = ControlResult, Error = MqttError<E>>,\n\n{\n\n fn new(\n\n sink: MqttSink,\n\n max_receive: usize,\n\n max_topic_alias: u16,\n\n publish: T,\n\n control: C,\n\n ) -> Self {\n\n Self {\n\n publish,\n\n max_receive,\n", "file_path": "src/v5/client/dispatcher.rs", "rank": 52, "score": 93898.21094839381 }, { "content": "fn call<S, Err>(\n\n req: Publish,\n\n srv: &S,\n\n) -> impl Future<Output = Result<Either<Publish, PublishAck>, Err>>\n\nwhere\n\n S: Service<Request = Publish, Response = PublishAck>,\n\n PublishAck: TryFrom<S::Error, Error = Err>,\n\n{\n\n let fut = srv.call(req);\n\n\n\n async move {\n\n match fut.await {\n\n Ok(ack) => Ok(Either::Right(ack)),\n\n Err(err) => match PublishAck::try_from(err) {\n\n Ok(ack) => Ok(Either::Right(ack)),\n\n Err(err) => Err(err),\n\n },\n\n }\n\n }\n\n}\n", "file_path": "src/v5/client/connection.rs", "rank": 53, "score": 88143.55050655198 }, { "content": "fn call<S, Err, PErr>(\n\n req: Publish,\n\n srv: &S,\n\n) -> impl Future<Output = Result<Either<(), Publish>, Err>>\n\nwhere\n\n S: Service<Request = Publish, Response = (), Error = PErr>,\n\n Err: From<PErr>,\n\n{\n\n let fut = srv.call(req);\n\n\n\n async move {\n\n match fut.await {\n\n Ok(_) => Ok(Either::Left(())),\n\n Err(err) => Err(err.into()),\n\n }\n\n }\n\n}\n\n\n\nasync fn keepalive(sink: MqttSink, timeout: Seconds) {\n\n log::debug!(\"start mqtt client keep-alive task\");\n", "file_path": "src/v3/client/connection.rs", "rank": 54, "score": 81744.32065771734 }, { "content": "use ntex::util::{ByteString, Bytes};\n\n\n\nuse super::{packet::*, UserProperty};\n\nuse crate::error::DecodeError;\n\nuse crate::types::packet_type;\n\nuse crate::utils::Decode;\n\n\n\npub(super) fn decode_packet(mut src: Bytes, first_byte: u8) -> Result<Packet, DecodeError> {\n\n match first_byte {\n\n packet_type::PUBLISH_START..=packet_type::PUBLISH_END => {\n\n Ok(Packet::Publish(Publish::decode(src, first_byte & 0b0000_1111)?))\n\n }\n\n packet_type::PUBACK => Ok(Packet::PublishAck(PublishAck::decode(&mut src)?)),\n\n packet_type::PINGREQ => Ok(Packet::PingRequest),\n\n packet_type::PINGRESP => Ok(Packet::PingResponse),\n\n packet_type::SUBSCRIBE => Ok(Packet::Subscribe(Subscribe::decode(&mut src)?)),\n\n packet_type::SUBACK => Ok(Packet::SubscribeAck(SubscribeAck::decode(&mut src)?)),\n\n packet_type::UNSUBSCRIBE => Ok(Packet::Unsubscribe(Unsubscribe::decode(&mut src)?)),\n\n packet_type::UNSUBACK => Ok(Packet::UnsubscribeAck(UnsubscribeAck::decode(&mut src)?)),\n\n packet_type::CONNECT => Ok(Packet::Connect(Box::new(Connect::decode(&mut src)?))),\n", "file_path": "src/v5/codec/decode.rs", "rank": 55, "score": 75738.32714379615 }, { "content": "use std::{convert::TryFrom, convert::TryInto, num::NonZeroU16};\n\n\n\nuse ntex::util::{Buf, ByteString, Bytes};\n\n\n\nuse crate::error::DecodeError;\n\nuse crate::types::{packet_type, QoS, MQTT, MQTT_LEVEL_3, WILL_QOS_SHIFT};\n\nuse crate::utils::Decode;\n\n\n\nuse super::packet::{Connect, LastWill, Packet, Publish, SubscribeReturnCode};\n\nuse super::{ConnectAckFlags, ConnectFlags};\n\n\n\npub(crate) fn decode_packet(mut src: Bytes, first_byte: u8) -> Result<Packet, DecodeError> {\n\n match first_byte {\n\n packet_type::CONNECT => decode_connect_packet(&mut src),\n\n packet_type::CONNACK => decode_connect_ack_packet(&mut src),\n\n packet_type::PUBLISH_START..=packet_type::PUBLISH_END => {\n\n decode_publish_packet(&mut src, first_byte & 0b0000_1111)\n\n }\n\n packet_type::PUBACK => decode_ack(src, |packet_id| Packet::PublishAck { packet_id }),\n\n packet_type::PUBREC => {\n", "file_path": "src/v3/codec/decode.rs", "rank": 56, "score": 75737.6735414802 }, { "content": " session_expiry_interval_secs: None,\n\n auth_method: None,\n\n auth_data: None,\n\n request_problem_info: true,\n\n request_response_info: false,\n\n receive_max: None,\n\n topic_alias_max: 0,\n\n user_properties: Vec::new(),\n\n max_packet_size: None,\n\n })\n\n );\n\n\n\n assert_eq!(\n\n Connect::decode(&mut Bytes::from_static(b\"\\x00\\x02MQ00000000000000000000\")),\n\n Err(DecodeError::InvalidProtocol),\n\n );\n\n assert_eq!(\n\n Connect::decode(&mut Bytes::from_static(b\"\\x00\\x04MQAA00000000000000000000\")),\n\n Err(DecodeError::InvalidProtocol),\n\n );\n", "file_path": "src/v5/codec/decode.rs", "rank": 57, "score": 75734.83358499201 }, { "content": " })))\n\n );\n\n\n\n assert_eq!(\n\n decode_connect_packet(&mut Bytes::from_static(b\"\\x00\\x02MQ00000000000000000000\")),\n\n Err(DecodeError::InvalidProtocol),\n\n );\n\n assert_eq!(\n\n decode_connect_packet(&mut Bytes::from_static(b\"\\x00\\x10MQ00000000000000000000\")),\n\n Err(DecodeError::InvalidProtocol),\n\n );\n\n assert_eq!(\n\n decode_connect_packet(&mut Bytes::from_static(b\"\\x00\\x04MQAA00000000000000000000\")),\n\n Err(DecodeError::InvalidProtocol),\n\n );\n\n assert_eq!(\n\n decode_connect_packet(&mut Bytes::from_static(\n\n b\"\\x00\\x04MQTT\\x0300000000000000000000\"\n\n )),\n\n Err(DecodeError::UnsupportedProtocolLevel),\n", "file_path": "src/v3/codec/decode.rs", "rank": 58, "score": 75733.95168036834 }, { "content": " packet_type::CONNACK => Ok(Packet::ConnectAck(Box::new(ConnectAck::decode(&mut src)?))),\n\n packet_type::DISCONNECT => Ok(Packet::Disconnect(Disconnect::decode(&mut src)?)),\n\n packet_type::AUTH => Ok(Packet::Auth(Auth::decode(&mut src)?)),\n\n packet_type::PUBREC => Ok(Packet::PublishReceived(PublishAck::decode(&mut src)?)),\n\n packet_type::PUBREL => Ok(Packet::PublishRelease(PublishAck2::decode(&mut src)?)),\n\n packet_type::PUBCOMP => Ok(Packet::PublishComplete(PublishAck2::decode(&mut src)?)),\n\n _ => Err(DecodeError::UnsupportedPacketType),\n\n }\n\n}\n\n\n\nimpl Decode for UserProperty {\n\n fn decode(src: &mut Bytes) -> Result<Self, DecodeError> {\n\n let key = ByteString::decode(src)?;\n\n let val = ByteString::decode(src)?;\n\n Ok((key, val))\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "src/v5/codec/decode.rs", "rank": 59, "score": 75733.67209309404 }, { "content": " assert_eq!(\n\n Connect::decode(&mut Bytes::from_static(b\"\\x00\\x04MQTT\\x0300000000000000000000\")),\n\n Err(DecodeError::UnsupportedProtocolLevel),\n\n );\n\n assert_eq!(\n\n Connect::decode(&mut Bytes::from_static(\n\n b\"\\x00\\x04MQTT\\x05\\xff00000000000000000000\"\n\n )),\n\n Err(DecodeError::ConnectReservedFlagSet)\n\n );\n\n\n\n assert_eq!(\n\n ConnectAck::decode(&mut Bytes::from_static(b\"\\x01\\x86\\x00\")),\n\n Ok(ConnectAck {\n\n session_present: true,\n\n reason_code: ConnectAckReason::BadUserNameOrPassword,\n\n ..ConnectAck::default()\n\n })\n\n );\n\n\n", "file_path": "src/v5/codec/decode.rs", "rank": 60, "score": 75732.89625760251 }, { "content": " );\n\n assert_eq!(\n\n decode_connect_packet(&mut Bytes::from_static(\n\n b\"\\x00\\x04MQTT\\x04\\xff00000000000000000000\"\n\n )),\n\n Err(DecodeError::ConnectReservedFlagSet)\n\n );\n\n\n\n assert_eq!(\n\n decode_connect_ack_packet(&mut Bytes::from_static(b\"\\x01\\x04\")),\n\n Ok(Packet::ConnectAck {\n\n session_present: true,\n\n return_code: ConnectAckReason::BadUserNameOrPassword\n\n })\n\n );\n\n\n\n assert_eq!(\n\n decode_connect_ack_packet(&mut Bytes::from_static(b\"\\x03\\x04\")),\n\n Err(DecodeError::ConnAckReservedFlagSet)\n\n );\n", "file_path": "src/v3/codec/decode.rs", "rank": 61, "score": 75732.76444591371 }, { "content": " res.clone(),\n\n &mut tmp,\n\n )\n\n .unwrap();\n\n let decoded = decode_packet(cur, fixed);\n\n let res = Ok(res);\n\n if decoded != res {\n\n panic!(\"decoded packet does not match expectations.\\nexpected: {:?}\\nactual: {:?}\\nencoding output for expected: {:X?}\", res, decoded, tmp.as_ref());\n\n }\n\n //assert_eq!(, Ok(res));\n\n }\n\n\n\n #[test]\n\n fn test_decode_connect_packets() {\n\n assert_eq!(\n\n Connect::decode(&mut Bytes::from_static(\n\n b\"\\x00\\x04MQTT\\x05\\xC0\\x00\\x3C\\x00\\x00\\x0512345\\x00\\x04user\\x00\\x04pass\"\n\n )),\n\n Ok(Connect {\n\n clean_start: false,\n", "file_path": "src/v5/codec/decode.rs", "rank": 62, "score": 75732.66404885318 }, { "content": " use ntex::util::{Bytes, BytesMut};\n\n use std::num::NonZeroU16;\n\n\n\n use super::*;\n\n use crate::types::QoS;\n\n use crate::utils::decode_variable_length;\n\n use crate::v5::codec::*;\n\n\n\n fn packet_id(v: u16) -> NonZeroU16 {\n\n NonZeroU16::new(v).unwrap()\n\n }\n\n\n\n fn assert_decode_packet<B: AsRef<[u8]>>(bytes: B, res: Packet) {\n\n let bytes = bytes.as_ref();\n\n let fixed = bytes[0];\n\n let (_len, consumed) = decode_variable_length(&bytes[1..]).unwrap().unwrap();\n\n let cur = Bytes::copy_from_slice(&bytes[consumed + 1..]);\n\n let mut tmp = BytesMut::with_capacity(4096);\n\n ntex::codec::Encoder::encode(\n\n &mut crate::v5::codec::Codec::new(),\n", "file_path": "src/v5/codec/decode.rs", "rank": 63, "score": 75731.81801295144 }, { "content": " keep_alive: 60,\n\n client_id: ByteString::from_static(\"12345\"),\n\n last_will: None,\n\n username: Some(ByteString::from_static(\"user\")),\n\n password: Some(Bytes::from_static(&b\"pass\"[..])),\n\n session_expiry_interval_secs: None,\n\n auth_method: None,\n\n auth_data: None,\n\n request_problem_info: true,\n\n request_response_info: false,\n\n receive_max: None,\n\n topic_alias_max: 0,\n\n user_properties: Vec::new(),\n\n max_packet_size: None,\n\n })\n\n );\n\n\n\n assert_eq!(\n\n Connect::decode(&mut Bytes::from_static(\n\n b\"\\x00\\x04MQTT\\x05\\x14\\x00\\x3C\\x00\\x00\\x0512345\\x00\\x00\\x05topic\\x00\\x07message\"\n", "file_path": "src/v5/codec/decode.rs", "rank": 64, "score": 75730.85336887778 }, { "content": " assert_eq!(decode_packet(cur, first_byte), Ok($res));\n\n }};\n\n );\n\n\n\n fn packet_id(v: u16) -> NonZeroU16 {\n\n NonZeroU16::new(v).unwrap()\n\n }\n\n\n\n #[test]\n\n fn test_decode_connect_packets() {\n\n assert_eq!(\n\n decode_connect_packet(&mut Bytes::from_static(\n\n b\"\\x00\\x04MQTT\\x04\\xC0\\x00\\x3C\\x00\\x0512345\\x00\\x04user\\x00\\x04pass\"\n\n )),\n\n Ok(Packet::Connect(Box::new(Connect {\n\n clean_session: false,\n\n keep_alive: 60,\n\n client_id: ByteString::try_from(Bytes::from_static(b\"12345\")).unwrap(),\n\n last_will: None,\n\n username: Some(ByteString::try_from(Bytes::from_static(b\"user\")).unwrap()),\n", "file_path": "src/v3/codec/decode.rs", "rank": 65, "score": 75730.38281794042 }, { "content": " assert_eq!(\n\n ConnectAck::decode(&mut Bytes::from_static(b\"\\x03\\x86\\x00\")),\n\n Err(DecodeError::ConnAckReservedFlagSet)\n\n );\n\n\n\n assert_decode_packet(\n\n b\"\\x20\\x03\\x01\\x86\\x00\",\n\n Packet::ConnectAck(Box::new(ConnectAck {\n\n session_present: true,\n\n reason_code: ConnectAckReason::BadUserNameOrPassword,\n\n ..ConnectAck::default()\n\n })),\n\n );\n\n\n\n assert_decode_packet([0b1110_0000, 0], Packet::Disconnect(Disconnect::default()));\n\n }\n\n\n\n fn default_test_publish() -> Publish {\n\n Publish {\n\n dup: false,\n", "file_path": "src/v5/codec/decode.rs", "rank": 66, "score": 75729.66856223134 }, { "content": " decode_ack(src, |packet_id| Packet::PublishReceived { packet_id })\n\n }\n\n packet_type::PUBREL => {\n\n decode_ack(src, |packet_id| Packet::PublishRelease { packet_id })\n\n }\n\n packet_type::PUBCOMP => {\n\n decode_ack(src, |packet_id| Packet::PublishComplete { packet_id })\n\n }\n\n packet_type::SUBSCRIBE => decode_subscribe_packet(&mut src),\n\n packet_type::SUBACK => decode_subscribe_ack_packet(&mut src),\n\n packet_type::UNSUBSCRIBE => decode_unsubscribe_packet(&mut src),\n\n packet_type::UNSUBACK => {\n\n decode_ack(src, |packet_id| Packet::UnsubscribeAck { packet_id })\n\n }\n\n packet_type::PINGREQ => Ok(Packet::PingRequest),\n\n packet_type::PINGRESP => Ok(Packet::PingResponse),\n\n packet_type::DISCONNECT => Ok(Packet::Disconnect),\n\n _ => Err(DecodeError::UnsupportedPacketType),\n\n }\n\n}\n\n\n\n#[inline]\n", "file_path": "src/v3/codec/decode.rs", "rank": 67, "score": 75729.39014186303 }, { "content": " ByteString::try_from(Bytes::from_static(b\"filter\")).unwrap(),\n\n ],\n\n };\n\n\n\n assert_eq!(\n\n decode_unsubscribe_packet(&mut Bytes::from_static(\n\n b\"\\x12\\x34\\x00\\x04test\\x00\\x06filter\"\n\n )),\n\n Ok(p.clone())\n\n );\n\n assert_decode_packet!(b\"\\xa2\\x10\\x12\\x34\\x00\\x04test\\x00\\x06filter\", p);\n\n\n\n assert_decode_packet!(\n\n b\"\\xb0\\x02\\x43\\x21\",\n\n Packet::UnsubscribeAck { packet_id: packet_id(0x4321) }\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_decode_ping_packets() {\n\n assert_decode_packet!(b\"\\xc0\\x00\", Packet::PingRequest);\n\n assert_decode_packet!(b\"\\xd0\\x00\", Packet::PingResponse);\n\n }\n\n}\n", "file_path": "src/v3/codec/decode.rs", "rank": 68, "score": 75728.90175448036 }, { "content": " password: Some(Bytes::from(&b\"pass\"[..])),\n\n })))\n\n );\n\n\n\n assert_eq!(\n\n decode_connect_packet(&mut Bytes::from_static(\n\n b\"\\x00\\x04MQTT\\x04\\x14\\x00\\x3C\\x00\\x0512345\\x00\\x05topic\\x00\\x07message\"\n\n )),\n\n Ok(Packet::Connect(Box::new(Connect {\n\n clean_session: false,\n\n keep_alive: 60,\n\n client_id: ByteString::try_from(Bytes::from_static(b\"12345\")).unwrap(),\n\n last_will: Some(LastWill {\n\n qos: QoS::ExactlyOnce,\n\n retain: false,\n\n topic: ByteString::try_from(Bytes::from_static(b\"topic\")).unwrap(),\n\n message: Bytes::from(&b\"message\"[..]),\n\n }),\n\n username: None,\n\n password: None,\n", "file_path": "src/v3/codec/decode.rs", "rank": 69, "score": 75728.7819211661 }, { "content": " assert_decode_packet(b\"\\x90\\x05\\x12\\x34\\x00\\x01\\x80\\x02\", p);\n\n\n\n let p = Packet::Unsubscribe(Unsubscribe {\n\n packet_id: packet_id(0x1234),\n\n topic_filters: vec![\n\n ByteString::from_static(\"test\"),\n\n ByteString::from_static(\"filter\"),\n\n ],\n\n user_properties: UserProperties::default(),\n\n });\n\n\n\n assert_eq!(\n\n Packet::Unsubscribe(\n\n Unsubscribe::decode(&mut Bytes::from_static(\n\n b\"\\x12\\x34\\x00\\x00\\x04test\\x00\\x06filter\"\n\n ))\n\n .unwrap()\n\n ),\n\n p.clone()\n\n );\n", "file_path": "src/v5/codec/decode.rs", "rank": 70, "score": 75726.06421835879 }, { "content": " #[test]\n\n fn test_decode_subscribe_packets() {\n\n let p = Packet::Subscribe {\n\n packet_id: packet_id(0x1234),\n\n topic_filters: vec![\n\n (ByteString::try_from(Bytes::from_static(b\"test\")).unwrap(), QoS::AtLeastOnce),\n\n (\n\n ByteString::try_from(Bytes::from_static(b\"filter\")).unwrap(),\n\n QoS::ExactlyOnce,\n\n ),\n\n ],\n\n };\n\n\n\n assert_eq!(\n\n decode_subscribe_packet(&mut Bytes::from_static(\n\n b\"\\x12\\x34\\x00\\x04test\\x01\\x00\\x06filter\\x02\"\n\n )),\n\n Ok(p.clone())\n\n );\n\n assert_decode_packet!(b\"\\x82\\x12\\x12\\x34\\x00\\x04test\\x01\\x00\\x06filter\\x02\", p);\n", "file_path": "src/v3/codec/decode.rs", "rank": 71, "score": 75726.04042963682 }, { "content": " retain: true,\n\n qos: QoS::ExactlyOnce,\n\n topic: ByteString::from_static(\"topic\"),\n\n packet_id: Some(packet_id(0x4321)),\n\n payload: Bytes::from_static(b\"data\"),\n\n ..default_test_publish()\n\n }),\n\n );\n\n assert_decode_packet(\n\n b\"\\x30\\x0C\\x00\\x05topic\\x00data\",\n\n Packet::Publish(Publish {\n\n dup: false,\n\n retain: false,\n\n qos: QoS::AtMostOnce,\n\n topic: ByteString::from_static(\"topic\"),\n\n packet_id: None,\n\n payload: Bytes::from_static(b\"data\"),\n\n ..default_test_publish()\n\n }),\n\n );\n", "file_path": "src/v5/codec/decode.rs", "rank": 72, "score": 75725.09931954597 }, { "content": "\n\n let p = Packet::SubscribeAck {\n\n packet_id: packet_id(0x1234),\n\n status: vec![\n\n SubscribeReturnCode::Success(QoS::AtLeastOnce),\n\n SubscribeReturnCode::Failure,\n\n SubscribeReturnCode::Success(QoS::ExactlyOnce),\n\n ],\n\n };\n\n\n\n assert_eq!(\n\n decode_subscribe_ack_packet(&mut Bytes::from_static(b\"\\x12\\x34\\x01\\x80\\x02\")),\n\n Ok(p.clone())\n\n );\n\n assert_decode_packet!(b\"\\x90\\x05\\x12\\x34\\x01\\x80\\x02\", p);\n\n\n\n let p = Packet::Unsubscribe {\n\n packet_id: packet_id(0x1234),\n\n topic_filters: vec![\n\n ByteString::try_from(Bytes::from_static(b\"test\")).unwrap(),\n", "file_path": "src/v3/codec/decode.rs", "rank": 73, "score": 75724.5444859896 }, { "content": " )),\n\n Ok(Connect {\n\n clean_start: false,\n\n keep_alive: 60,\n\n client_id: ByteString::from_static(\"12345\"),\n\n last_will: Some(LastWill {\n\n qos: QoS::ExactlyOnce,\n\n retain: false,\n\n topic: ByteString::from_static(\"topic\"),\n\n message: Bytes::from_static(&b\"message\"[..]),\n\n will_delay_interval_sec: None,\n\n correlation_data: None,\n\n message_expiry_interval: None,\n\n content_type: None,\n\n user_properties: Vec::new(),\n\n is_utf8_payload: None,\n\n response_topic: None,\n\n }),\n\n username: None,\n\n password: None,\n", "file_path": "src/v5/codec/decode.rs", "rank": 74, "score": 75724.52469555443 }, { "content": "\n\n let last_will = if flags.contains(ConnectFlags::WILL) {\n\n let topic = ByteString::decode(src)?;\n\n let message = Bytes::decode(src)?;\n\n Some(LastWill {\n\n qos: QoS::try_from((flags & ConnectFlags::WILL_QOS).bits() >> WILL_QOS_SHIFT)?,\n\n retain: flags.contains(ConnectFlags::WILL_RETAIN),\n\n topic,\n\n message,\n\n })\n\n } else {\n\n None\n\n };\n\n let username = if flags.contains(ConnectFlags::USERNAME) {\n\n Some(ByteString::decode(src)?)\n\n } else {\n\n None\n\n };\n\n let password =\n\n if flags.contains(ConnectFlags::PASSWORD) { Some(Bytes::decode(src)?) } else { None };\n", "file_path": "src/v3/codec/decode.rs", "rank": 75, "score": 75723.99721575377 }, { "content": "\n\n assert_decode_packet!(\n\n b\"\\x20\\x02\\x01\\x04\",\n\n Packet::ConnectAck {\n\n session_present: true,\n\n return_code: ConnectAckReason::BadUserNameOrPassword,\n\n }\n\n );\n\n\n\n assert_decode_packet!(b\"\\xe0\\x00\", Packet::Disconnect);\n\n }\n\n\n\n #[test]\n\n fn test_decode_publish_packets() {\n\n //assert_eq!(\n\n // decode_publish_packet(b\"\\x00\\x05topic\\x12\\x34\"),\n\n // Done(&b\"\"[..], (\"topic\".to_owned(), 0x1234))\n\n //);\n\n\n\n assert_decode_packet!(\n", "file_path": "src/v3/codec/decode.rs", "rank": 76, "score": 75723.46637979757 }, { "content": " assert_decode_packet(b\"\\xa2\\x11\\x12\\x34\\x00\\x00\\x04test\\x00\\x06filter\", p);\n\n\n\n assert_decode_packet(\n\n b\"\\xb0\\x03\\x43\\x21\\x00\",\n\n Packet::UnsubscribeAck(UnsubscribeAck {\n\n packet_id: packet_id(0x4321),\n\n properties: UserProperties::default(),\n\n reason_string: None,\n\n status: vec![],\n\n }),\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_decode_ping_packets() {\n\n assert_decode_packet(b\"\\xc0\\x00\", Packet::PingRequest);\n\n assert_decode_packet(b\"\\xd0\\x00\", Packet::PingResponse);\n\n }\n\n}\n", "file_path": "src/v5/codec/decode.rs", "rank": 77, "score": 75722.0600296552 }, { "content": " b\"\\x3d\\x0D\\x00\\x05topic\\x43\\x21data\",\n\n Packet::Publish(Publish {\n\n dup: true,\n\n retain: true,\n\n qos: QoS::ExactlyOnce,\n\n topic: ByteString::try_from(Bytes::from_static(b\"topic\")).unwrap(),\n\n packet_id: Some(packet_id(0x4321)),\n\n payload: Bytes::from_static(b\"data\"),\n\n })\n\n );\n\n assert_decode_packet!(\n\n b\"\\x30\\x0b\\x00\\x05topicdata\",\n\n Packet::Publish(Publish {\n\n dup: false,\n\n retain: false,\n\n qos: QoS::AtMostOnce,\n\n topic: ByteString::try_from(Bytes::from_static(b\"topic\")).unwrap(),\n\n packet_id: None,\n\n payload: Bytes::from_static(b\"data\"),\n\n })\n", "file_path": "src/v3/codec/decode.rs", "rank": 78, "score": 75721.6590309892 }, { "content": " fn test_decode_subscribe_packets() {\n\n let p = Packet::Subscribe(Subscribe {\n\n packet_id: packet_id(0x1234),\n\n topic_filters: vec![\n\n (\n\n ByteString::from_static(\"test\"),\n\n SubscriptionOptions {\n\n qos: QoS::AtLeastOnce,\n\n no_local: false,\n\n retain_as_published: false,\n\n retain_handling: RetainHandling::AtSubscribe,\n\n },\n\n ),\n\n (\n\n ByteString::from_static(\"filter\"),\n\n SubscriptionOptions {\n\n qos: QoS::ExactlyOnce,\n\n no_local: false,\n\n retain_as_published: false,\n\n retain_handling: RetainHandling::AtSubscribe,\n", "file_path": "src/v5/codec/decode.rs", "rank": 79, "score": 75721.09701638074 }, { "content": " },\n\n ),\n\n ],\n\n id: None,\n\n user_properties: Vec::new(),\n\n });\n\n\n\n assert_decode_packet(b\"\\x82\\x13\\x12\\x34\\x00\\x00\\x04test\\x01\\x00\\x06filter\\x02\", p);\n\n\n\n let p = Packet::SubscribeAck(SubscribeAck {\n\n packet_id: packet_id(0x1234),\n\n status: vec![\n\n SubscribeAckReason::GrantedQos1,\n\n SubscribeAckReason::UnspecifiedError,\n\n SubscribeAckReason::GrantedQos2,\n\n ],\n\n properties: UserProperties::default(),\n\n reason_string: None,\n\n });\n\n\n", "file_path": "src/v5/codec/decode.rs", "rank": 80, "score": 75719.75992123997 }, { "content": " );\n\n\n\n assert_decode_packet!(\n\n b\"\\x40\\x02\\x43\\x21\",\n\n Packet::PublishAck { packet_id: packet_id(0x4321) }\n\n );\n\n assert_decode_packet!(\n\n b\"\\x50\\x02\\x43\\x21\",\n\n Packet::PublishReceived { packet_id: packet_id(0x4321) }\n\n );\n\n assert_decode_packet!(\n\n b\"\\x62\\x02\\x43\\x21\",\n\n Packet::PublishRelease { packet_id: packet_id(0x4321) }\n\n );\n\n assert_decode_packet!(\n\n b\"\\x70\\x02\\x43\\x21\",\n\n Packet::PublishComplete { packet_id: packet_id(0x4321) }\n\n );\n\n }\n\n\n", "file_path": "src/v3/codec/decode.rs", "rank": 81, "score": 75718.75523868414 }, { "content": " Ok(Connect {\n\n clean_session: flags.contains(ConnectFlags::CLEAN_START),\n\n keep_alive,\n\n client_id,\n\n last_will,\n\n username,\n\n password,\n\n }\n\n .into())\n\n}\n\n\n", "file_path": "src/v3/codec/decode.rs", "rank": 82, "score": 75718.67430757644 }, { "content": " retain: false,\n\n qos: QoS::AtMostOnce,\n\n topic: ByteString::default(),\n\n packet_id: Some(packet_id(1)),\n\n payload: Bytes::new(),\n\n properties: PublishProperties::default(),\n\n }\n\n }\n\n\n\n #[test]\n\n fn test_decode_publish_packets() {\n\n //assert_eq!(\n\n // decode_publish_packet(b\"\\x00\\x05topic\\x12\\x34\"),\n\n // Done(&b\"\"[..], (\"topic\".to_owned(), 0x1234))\n\n //);\n\n\n\n assert_decode_packet(\n\n b\"\\x3d\\x0E\\x00\\x05topic\\x43\\x21\\x00data\",\n\n Packet::Publish(Publish {\n\n dup: true,\n", "file_path": "src/v5/codec/decode.rs", "rank": 83, "score": 75718.50058962706 }, { "content": "\n\n assert_decode_packet(\n\n b\"\\x40\\x02\\x43\\x21\",\n\n Packet::PublishAck(PublishAck {\n\n packet_id: packet_id(0x4321),\n\n reason_code: PublishAckReason::Success,\n\n properties: UserProperties::default(),\n\n reason_string: None,\n\n }),\n\n );\n\n assert_decode_packet(\n\n b\"\\x50\\x02\\x43\\x21\",\n\n Packet::PublishReceived(PublishAck {\n\n packet_id: packet_id(0x4321),\n\n reason_code: PublishAckReason::Success,\n\n properties: UserProperties::default(),\n\n reason_string: None,\n\n }),\n\n );\n\n assert_decode_packet(\n", "file_path": "src/v5/codec/decode.rs", "rank": 84, "score": 75718.35367542757 }, { "content": " b\"\\x62\\x02\\x43\\x21\",\n\n Packet::PublishRelease(PublishAck2 {\n\n packet_id: packet_id(0x4321),\n\n reason_code: PublishAck2Reason::Success,\n\n properties: UserProperties::default(),\n\n reason_string: None,\n\n }),\n\n );\n\n assert_decode_packet(\n\n b\"\\x70\\x02\\x43\\x21\",\n\n Packet::PublishComplete(PublishAck2 {\n\n packet_id: packet_id(0x4321),\n\n reason_code: PublishAck2Reason::Success,\n\n properties: UserProperties::default(),\n\n reason_string: None,\n\n }),\n\n );\n\n }\n\n\n\n #[test]\n", "file_path": "src/v5/codec/decode.rs", "rank": 85, "score": 75716.7421588068 }, { "content": "use ntex::util::{BufMut, ByteString, BytesMut};\n\n\n\nuse super::packet::{property_type as pt, *};\n\nuse super::{UserProperties, UserProperty};\n\nuse crate::error::EncodeError;\n\nuse crate::types::packet_type;\n\nuse crate::utils::{write_variable_length, Encode};\n\n\n\npub(super) trait EncodeLtd {\n\n fn encoded_size(&self, limit: u32) -> usize;\n\n\n\n fn encode(&self, buf: &mut BytesMut, size: u32) -> Result<(), EncodeError>;\n\n}\n\n\n\nimpl EncodeLtd for Packet {\n\n fn encoded_size(&self, limit: u32) -> usize {\n\n // limit -= 5; // fixed header = 1, var_len(remaining.max_value()) = 4\n\n match self {\n\n Packet::Connect(connect) => connect.encoded_size(limit),\n\n Packet::Publish(publish) => publish.encoded_size(limit),\n", "file_path": "src/v5/codec/encode.rs", "rank": 86, "score": 75697.31123028006 }, { "content": "use ntex::util::{BufMut, BytesMut};\n\n\n\nuse crate::error::EncodeError;\n\nuse crate::types::{packet_type, ConnectFlags, QoS, MQTT, MQTT_LEVEL_3, WILL_QOS_SHIFT};\n\nuse crate::utils::{write_variable_length, Encode};\n\n\n\nuse super::packet::*;\n\n\n\npub(crate) fn get_encoded_size(packet: &Packet) -> usize {\n\n match *packet {\n\n Packet::Connect ( ref connect ) => {\n\n let Connect {ref last_will, ref client_id, ref username, ref password, ..} = **connect;\n\n\n\n // Protocol Name + Protocol Level + Connect Flags + Keep Alive\n\n let mut n = 2 + 4 + 1 + 1 + 2;\n\n\n\n // Client Id\n\n n += 2 + client_id.len();\n\n\n\n // Will Topic + Will Message\n", "file_path": "src/v3/codec/encode.rs", "rank": 87, "score": 75695.42224339569 }, { "content": " Packet::PingRequest | Packet::PingResponse | Packet::Disconnect => 0,\n\n }\n\n}\n\n\n\npub(crate) fn encode(\n\n packet: &Packet,\n\n dst: &mut BytesMut,\n\n content_size: u32,\n\n) -> Result<(), EncodeError> {\n\n match packet {\n\n Packet::Connect(connect) => {\n\n dst.put_u8(packet_type::CONNECT);\n\n write_variable_length(content_size, dst);\n\n encode_connect(connect, dst)?;\n\n }\n\n Packet::ConnectAck { session_present, return_code } => {\n\n dst.put_u8(packet_type::CONNACK);\n\n write_variable_length(content_size, dst);\n\n let flags_byte = if *session_present { 0x01 } else { 0x00 };\n\n let code: u8 = From::from(*return_code);\n", "file_path": "src/v3/codec/encode.rs", "rank": 88, "score": 75693.75438835045 }, { "content": " fn test_encode_connect_packets() {\n\n assert_encode_packet(\n\n &Packet::Connect(Box::new(Connect {\n\n clean_session: false,\n\n keep_alive: 60,\n\n client_id: ByteString::from_static(\"12345\"),\n\n last_will: None,\n\n username: Some(ByteString::from_static(\"user\")),\n\n password: Some(Bytes::from_static(b\"pass\")),\n\n })),\n\n &b\"\\x10\\x1D\\x00\\x04MQTT\\x04\\xC0\\x00\\x3C\\x00\\\n\n\\x0512345\\x00\\x04user\\x00\\x04pass\"[..],\n\n );\n\n\n\n assert_encode_packet(\n\n &Packet::Connect(Box::new(Connect {\n\n clean_session: false,\n\n keep_alive: 60,\n\n client_id: ByteString::from_static(\"12345\"),\n\n last_will: Some(LastWill {\n", "file_path": "src/v3/codec/encode.rs", "rank": 89, "score": 75691.79231016804 }, { "content": " Packet::ConnectAck(ack) => ack.encoded_size(limit),\n\n Packet::PublishAck(ack) | Packet::PublishReceived(ack) => ack.encoded_size(limit),\n\n Packet::PublishRelease(ack) | Packet::PublishComplete(ack) => {\n\n ack.encoded_size(limit)\n\n }\n\n Packet::Subscribe(sub) => sub.encoded_size(limit),\n\n Packet::SubscribeAck(ack) => ack.encoded_size(limit),\n\n Packet::Unsubscribe(unsub) => unsub.encoded_size(limit),\n\n Packet::UnsubscribeAck(ack) => ack.encoded_size(limit),\n\n Packet::PingRequest | Packet::PingResponse => 0,\n\n Packet::Disconnect(disconnect) => disconnect.encoded_size(limit),\n\n Packet::Auth(auth) => auth.encoded_size(limit),\n\n }\n\n }\n\n\n\n fn encode(&self, buf: &mut BytesMut, check_size: u32) -> Result<(), EncodeError> {\n\n match self {\n\n Packet::Connect(connect) => {\n\n buf.put_u8(packet_type::CONNECT);\n\n write_variable_length(check_size, buf);\n", "file_path": "src/v5/codec/encode.rs", "rank": 90, "score": 75691.19951760049 }, { "content": " assert_encode_packet(\n\n &Packet::Connect(Box::new(Connect {\n\n clean_start: false,\n\n keep_alive: 60,\n\n client_id: ByteString::from_static(\"12345\"),\n\n last_will: Some(LastWill {\n\n qos: QoS::ExactlyOnce,\n\n retain: false,\n\n topic: ByteString::from_static(\"topic\"),\n\n message: Bytes::from_static(b\"message\"),\n\n will_delay_interval_sec: None,\n\n correlation_data: None,\n\n message_expiry_interval: None,\n\n content_type: None,\n\n user_properties: vec![],\n\n is_utf8_payload: None,\n\n response_topic: None,\n\n }),\n\n username: None,\n\n password: None,\n", "file_path": "src/v5/codec/encode.rs", "rank": 91, "score": 75689.0240341896 }, { "content": ") -> Result<(), EncodeError> {\n\n if let Some(v) = v {\n\n buf.put_u8(prop_type);\n\n v.encode(buf)\n\n } else {\n\n Ok(())\n\n }\n\n}\n\n\n\npub(super) fn encoded_bool_property_size(v: bool, skip_if: bool) -> usize {\n\n if v == skip_if {\n\n 0\n\n } else {\n\n 2\n\n }\n\n}\n\n\n\npub(super) fn encode_bool_property(\n\n v: bool,\n\n prop_type: u8,\n", "file_path": "src/v5/codec/encode.rs", "rank": 92, "score": 75688.20196053282 }, { "content": " connect.encode(buf, check_size)\n\n }\n\n Packet::ConnectAck(ack) => {\n\n buf.put_u8(packet_type::CONNACK);\n\n write_variable_length(check_size, buf);\n\n ack.encode(buf, check_size)\n\n }\n\n Packet::Publish(publish) => {\n\n buf.put_u8(\n\n packet_type::PUBLISH_START\n\n | (u8::from(publish.qos) << 1)\n\n | ((publish.dup as u8) << 3)\n\n | (publish.retain as u8),\n\n );\n\n write_variable_length(check_size, buf);\n\n publish.encode(buf, check_size)\n\n }\n\n Packet::PublishAck(ack) => {\n\n buf.put_u8(packet_type::PUBACK);\n\n write_variable_length(check_size, buf);\n", "file_path": "src/v5/codec/encode.rs", "rank": 93, "score": 75687.43607653516 }, { "content": " sub.encode(buf, check_size)\n\n }\n\n Packet::SubscribeAck(ack) => {\n\n buf.put_u8(packet_type::SUBACK);\n\n write_variable_length(check_size, buf);\n\n ack.encode(buf, check_size)\n\n }\n\n Packet::Unsubscribe(unsub) => {\n\n buf.put_u8(packet_type::UNSUBSCRIBE);\n\n write_variable_length(check_size, buf);\n\n unsub.encode(buf, check_size)\n\n }\n\n Packet::UnsubscribeAck(ack) => {\n\n buf.put_u8(packet_type::UNSUBACK);\n\n write_variable_length(check_size, buf);\n\n ack.encode(buf, check_size)\n\n }\n\n Packet::PingRequest => {\n\n buf.put_slice(&[packet_type::PINGREQ, 0]);\n\n Ok(())\n", "file_path": "src/v5/codec/encode.rs", "rank": 94, "score": 75686.90812548048 }, { "content": " qos: QoS::ExactlyOnce,\n\n retain: false,\n\n topic: ByteString::from_static(\"topic\"),\n\n message: Bytes::from_static(b\"message\"),\n\n }),\n\n username: None,\n\n password: None,\n\n })),\n\n &b\"\\x10\\x21\\x00\\x04MQTT\\x04\\x14\\x00\\x3C\\x00\\\n\n\\x0512345\\x00\\x05topic\\x00\\x07message\"[..],\n\n );\n\n\n\n assert_encode_packet(&Packet::Disconnect, b\"\\xe0\\x00\");\n\n }\n\n\n\n #[test]\n\n fn test_encode_publish_packets() {\n\n assert_encode_packet(\n\n &Packet::Publish(Publish {\n\n dup: true,\n", "file_path": "src/v3/codec/encode.rs", "rank": 95, "score": 75686.64629974953 }, { "content": " .collect();\n\n dst.put_slice(&buf);\n\n }\n\n Packet::Unsubscribe { packet_id, ref topic_filters } => {\n\n dst.put_u8(packet_type::UNSUBSCRIBE);\n\n write_variable_length(content_size, dst);\n\n packet_id.encode(dst)?;\n\n for filter in topic_filters {\n\n filter.encode(dst)?;\n\n }\n\n }\n\n Packet::UnsubscribeAck { packet_id } => {\n\n dst.put_u8(packet_type::UNSUBACK);\n\n write_variable_length(content_size, dst);\n\n packet_id.encode(dst)?;\n\n }\n\n Packet::PingRequest => dst.put_slice(&[packet_type::PINGREQ, 0]),\n\n Packet::PingResponse => dst.put_slice(&[packet_type::PINGRESP, 0]),\n\n Packet::Disconnect => dst.put_slice(&[packet_type::DISCONNECT, 0]),\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/v3/codec/encode.rs", "rank": 96, "score": 75686.5860122571 }, { "content": "\n\n use super::*;\n\n\n\n fn packet_id(v: u16) -> NonZeroU16 {\n\n NonZeroU16::new(v).unwrap()\n\n }\n\n\n\n #[test]\n\n fn test_encode_fixed_header() {\n\n let mut v = BytesMut::with_capacity(271);\n\n let p = Packet::PingRequest;\n\n\n\n assert_eq!(get_encoded_size(&p), 0);\n\n encode(&p, &mut v, 0).unwrap();\n\n assert_eq!(v, b\"\\xc0\\x00\".as_ref());\n\n\n\n v.clear();\n\n\n\n let p = Packet::Publish(Publish {\n\n dup: true,\n", "file_path": "src/v3/codec/encode.rs", "rank": 97, "score": 75686.20613993292 }, { "content": " dst.put_slice(&[flags_byte, code]);\n\n }\n\n Packet::Publish(publish) => {\n\n dst.put_u8(\n\n packet_type::PUBLISH_START\n\n | (u8::from(publish.qos) << 1)\n\n | ((publish.dup as u8) << 3)\n\n | (publish.retain as u8),\n\n );\n\n write_variable_length(content_size, dst);\n\n publish.topic.encode(dst)?;\n\n if publish.qos == QoS::AtMostOnce {\n\n if publish.packet_id.is_some() {\n\n return Err(EncodeError::MalformedPacket); // packet id must not be set\n\n }\n\n } else {\n\n publish.packet_id.ok_or(EncodeError::PacketIdRequired)?.encode(dst)?;\n\n }\n\n dst.put(publish.payload.as_ref());\n\n }\n", "file_path": "src/v3/codec/encode.rs", "rank": 98, "score": 75686.02284514824 }, { "content": " clean_start: false,\n\n keep_alive: 60,\n\n client_id: ByteString::from_static(\"12345\"),\n\n last_will: None,\n\n username: Some(ByteString::from_static(\"user\")),\n\n password: Some(Bytes::from_static(b\"pass\")),\n\n session_expiry_interval_secs: None,\n\n auth_method: None,\n\n auth_data: None,\n\n request_problem_info: true,\n\n request_response_info: false,\n\n receive_max: None,\n\n topic_alias_max: 0,\n\n user_properties: vec![],\n\n max_packet_size: None,\n\n })),\n\n &b\"\\x10\\x1E\\x00\\x04MQTT\\x05\\xC0\\x00\\x3C\\x00\\x00\\\n\n\\x0512345\\x00\\x04user\\x00\\x04pass\"[..],\n\n );\n\n\n", "file_path": "src/v5/codec/encode.rs", "rank": 99, "score": 75685.7355109134 } ]
Rust
imxrt1062-pac/imxrt1062-dmamux/src/chcfg.rs
Shock-1/teensy4-rs
effc3b290f1be3c7aef62a78e82dbfbc27aa6370
#[doc = "Reader of register CHCFG[%s]"] pub type R = crate::R<u32, super::CHCFG>; #[doc = "Writer for register CHCFG[%s]"] pub type W = crate::W<u32, super::CHCFG>; #[doc = "Register CHCFG[%s] `reset()`'s with value 0"] impl crate::ResetValue for super::CHCFG { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `SOURCE`"] pub type SOURCE_R = crate::R<u8, u8>; #[doc = "Write proxy for field `SOURCE`"] pub struct SOURCE_W<'a> { w: &'a mut W, } impl<'a> SOURCE_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x7f) | ((value as u32) & 0x7f); self.w } } #[doc = "DMA Channel Always Enable\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum A_ON_A { #[doc = "0: DMA Channel Always ON function is disabled"] A_ON_0 = 0, #[doc = "1: DMA Channel Always ON function is enabled"] A_ON_1 = 1, } impl From<A_ON_A> for bool { #[inline(always)] fn from(variant: A_ON_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `A_ON`"] pub type A_ON_R = crate::R<bool, A_ON_A>; impl A_ON_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> A_ON_A { match self.bits { false => A_ON_A::A_ON_0, true => A_ON_A::A_ON_1, } } #[doc = "Checks if the value of the field is `A_ON_0`"] #[inline(always)] pub fn is_a_on_0(&self) -> bool { *self == A_ON_A::A_ON_0 } #[doc = "Checks if the value of the field is `A_ON_1`"] #[inline(always)] pub fn is_a_on_1(&self) -> bool { *self == A_ON_A::A_ON_1 } } #[doc = "Write proxy for field `A_ON`"] pub struct A_ON_W<'a> { w: &'a mut W, } impl<'a> A_ON_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: A_ON_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "DMA Channel Always ON function is disabled"] #[inline(always)] pub fn a_on_0(self) -> &'a mut W { self.variant(A_ON_A::A_ON_0) } #[doc = "DMA Channel Always ON function is enabled"] #[inline(always)] pub fn a_on_1(self) -> &'a mut W { self.variant(A_ON_A::A_ON_1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 29)) | (((value as u32) & 0x01) << 29); self.w } } #[doc = "DMA Channel Trigger Enable\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum TRIG_A { #[doc = "0: Triggering is disabled. If triggering is disabled and ENBL is set, the DMA Channel will simply route the specified source to the DMA channel. (Normal mode)"] TRIG_0 = 0, #[doc = "1: Triggering is enabled. If triggering is enabled and ENBL is set, the DMA_CH_MUX is in Periodic Trigger mode."] TRIG_1 = 1, } impl From<TRIG_A> for bool { #[inline(always)] fn from(variant: TRIG_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `TRIG`"] pub type TRIG_R = crate::R<bool, TRIG_A>; impl TRIG_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> TRIG_A { match self.bits { false => TRIG_A::TRIG_0, true => TRIG_A::TRIG_1, } } #[doc = "Checks if the value of the field is `TRIG_0`"] #[inline(always)] pub fn is_trig_0(&self) -> bool { *self == TRIG_A::TRIG_0 } #[doc = "Checks if the value of the field is `TRIG_1`"] #[inline(always)] pub fn is_trig_1(&self) -> bool { *self == TRIG_A::TRIG_1 } } #[doc = "Write proxy for field `TRIG`"] pub struct TRIG_W<'a> { w: &'a mut W, } impl<'a> TRIG_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: TRIG_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Triggering is disabled. If triggering is disabled and ENBL is set, the DMA Channel will simply route the specified source to the DMA channel. (Normal mode)"] #[inline(always)] pub fn trig_0(self) -> &'a mut W { self.variant(TRIG_A::TRIG_0) } #[doc = "Triggering is enabled. If triggering is enabled and ENBL is set, the DMA_CH_MUX is in Periodic Trigger mode."] #[inline(always)] pub fn trig_1(self) -> &'a mut W { self.variant(TRIG_A::TRIG_1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 30)) | (((value as u32) & 0x01) << 30); self.w } } #[doc = "DMA Mux Channel Enable\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum ENBL_A { #[doc = "0: DMA Mux channel is disabled"] ENBL_0 = 0, #[doc = "1: DMA Mux channel is enabled"] ENBL_1 = 1, } impl From<ENBL_A> for bool { #[inline(always)] fn from(variant: ENBL_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `ENBL`"] pub type ENBL_R = crate::R<bool, ENBL_A>; impl ENBL_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> ENBL_A { match self.bits { false => ENBL_A::ENBL_0, true => ENBL_A::ENBL_1, } } #[doc = "Checks if the value of the field is `ENBL_0`"] #[inline(always)] pub fn is_enbl_0(&self) -> bool { *self == ENBL_A::ENBL_0 } #[doc = "Checks if the value of the field is `ENBL_1`"] #[inline(always)] pub fn is_enbl_1(&self) -> bool { *self == ENBL_A::ENBL_1 } } #[doc = "Write proxy for field `ENBL`"] pub struct ENBL_W<'a> { w: &'a mut W, } impl<'a> ENBL_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: ENBL_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "DMA Mux channel is disabled"] #[inline(always)] pub fn enbl_0(self) -> &'a mut W { self.variant(ENBL_A::ENBL_0) } #[doc = "DMA Mux channel is enabled"] #[inline(always)] pub fn enbl_1(self) -> &'a mut W { self.variant(ENBL_A::ENBL_1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31); self.w } } impl R { #[doc = "Bits 0:6 - DMA Channel Source (Slot Number)"] #[inline(always)] pub fn source(&self) -> SOURCE_R { SOURCE_R::new((self.bits & 0x7f) as u8) } #[doc = "Bit 29 - DMA Channel Always Enable"] #[inline(always)] pub fn a_on(&self) -> A_ON_R { A_ON_R::new(((self.bits >> 29) & 0x01) != 0) } #[doc = "Bit 30 - DMA Channel Trigger Enable"] #[inline(always)] pub fn trig(&self) -> TRIG_R { TRIG_R::new(((self.bits >> 30) & 0x01) != 0) } #[doc = "Bit 31 - DMA Mux Channel Enable"] #[inline(always)] pub fn enbl(&self) -> ENBL_R { ENBL_R::new(((self.bits >> 31) & 0x01) != 0) } } impl W { #[doc = "Bits 0:6 - DMA Channel Source (Slot Number)"] #[inline(always)] pub fn source(&mut self) -> SOURCE_W { SOURCE_W { w: self } } #[doc = "Bit 29 - DMA Channel Always Enable"] #[inline(always)] pub fn a_on(&mut self) -> A_ON_W { A_ON_W { w: self } } #[doc = "Bit 30 - DMA Channel Trigger Enable"] #[inline(always)] pub fn trig(&mut self) -> TRIG_W { TRIG_W { w: self } } #[doc = "Bit 31 - DMA Mux Channel Enable"] #[inline(always)] pub fn enbl(&mut self) -> ENBL_W { ENBL_W { w: self } } }
#[doc = "Reader of register CHCFG[%s]"] pub type R = crate::R<u32, super::CHCFG>; #[doc = "Writer for register CHCFG[%s]"] pub type W = crate::W<u32, super::CHCFG>; #[doc = "Register CHCFG[%s] `reset()`'s with value 0"] impl crate::ResetValue for super::CHCFG { type Type = u32; #[inline(always)] fn reset_value() -> Self::Type { 0 } } #[doc = "Reader of field `SOURCE`"] pub type SOURCE_R = crate::R<u8, u8>; #[doc = "Write proxy for field `SOURCE`"] pub struct SOURCE_W<'a> { w: &'a mut W, } impl<'a> SOURCE_W<'a> { #[doc = r"Writes raw bits to the field"] #[inline(always)] pub unsafe fn bits(self, value: u8) -> &'a mut W { self.w.bits = (self.w.bits & !0x7f) | ((value as u32) & 0x7f); self.w } } #[doc = "DMA Channel Always Enable\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum A_ON_A { #[doc = "0: DMA Channel Always ON function is disabled"] A_ON_0 = 0, #[doc = "1: DMA Channel Always ON function is enabled"] A_ON_1 = 1, } impl From<A_ON_A> for bool { #[inline(always)] fn from(variant: A_ON_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `A_ON`"] pub type A_ON_R = crate::R<bool, A_ON_A>; impl A_ON_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> A_ON_A { match self.bits { false => A_ON_A::A_ON_0, true => A_ON_A::A_ON_1, } } #[doc = "Checks if the value of the field is `A_ON_0`"] #[inline(always)] pub fn is_a_on_0(&self) -> bool { *self == A_ON_A::A_ON_0 } #[doc = "Checks if the value of the field is `A_ON_1`"] #[inline(always)] pub fn is_a_on_1(&self) -> bool { *self == A_ON_A::A_ON_1 } } #[doc = "Write proxy for field `A_ON`"] pub struct A_ON_W<'a> { w: &'a mut W, } impl<'a> A_ON_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: A_ON_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "DMA Channel Always ON function is disabled"] #[inline(always)] pub fn a_on_0(self) -> &'a mut W { self.variant(A_ON_A::A_ON_0) } #[doc = "DMA Channel Always ON function is enabled"] #[inline(always)] pub fn a_on_1(self) -> &'a mut W { self.variant(A_ON_A::A_ON_1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 29)) | (((value as u32) & 0x01) << 29); self.w } } #[doc = "DMA Channel Trigger Enable\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum TRIG_A { #[doc = "0: Triggering is disabled. If triggering is disabled and ENBL is set, the DMA Channel will simply route the specified source to the DMA channel. (Normal mode)"] TRIG_0 = 0, #[doc = "1: Triggering is enabled. If triggering is enabled and ENBL is set, the DMA_CH_MUX is in Periodic Trigger mode."] TRIG_1 = 1, } impl From<TRIG_A> for bool { #[inline(always)] fn from(variant: TRIG_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `TRIG`"] pub type TRIG_R = crate::R<bool, TRIG_A>; impl TRIG_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> TRIG_A { match self.bits { false => TRIG_A::TRIG_0, true => TRIG_A::TRIG_1, } } #[doc = "Checks if the value of the field is `TRIG_0`"] #[inline(always)] pub fn is_trig_0(&self) -> bool { *self == TRIG_A::TRIG_0 } #[doc = "Checks if the value of the field is `TRIG_1`"] #[inline(always)] pub fn is_trig_1(&self) -> bool { *self == TRIG_A::TRIG_1 } } #[doc = "Write proxy for field `TRIG`"] pub struct TRIG_W<'a> { w: &'a mut W, } impl<'a> TRIG_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: TRIG_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "Triggering is disabled. If triggering is disabled and ENBL is set, the DMA Channel will simply route the specified source to the DMA channel.
L_0) } #[doc = "DMA Mux channel is enabled"] #[inline(always)] pub fn enbl_1(self) -> &'a mut W { self.variant(ENBL_A::ENBL_1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31); self.w } } impl R { #[doc = "Bits 0:6 - DMA Channel Source (Slot Number)"] #[inline(always)] pub fn source(&self) -> SOURCE_R { SOURCE_R::new((self.bits & 0x7f) as u8) } #[doc = "Bit 29 - DMA Channel Always Enable"] #[inline(always)] pub fn a_on(&self) -> A_ON_R { A_ON_R::new(((self.bits >> 29) & 0x01) != 0) } #[doc = "Bit 30 - DMA Channel Trigger Enable"] #[inline(always)] pub fn trig(&self) -> TRIG_R { TRIG_R::new(((self.bits >> 30) & 0x01) != 0) } #[doc = "Bit 31 - DMA Mux Channel Enable"] #[inline(always)] pub fn enbl(&self) -> ENBL_R { ENBL_R::new(((self.bits >> 31) & 0x01) != 0) } } impl W { #[doc = "Bits 0:6 - DMA Channel Source (Slot Number)"] #[inline(always)] pub fn source(&mut self) -> SOURCE_W { SOURCE_W { w: self } } #[doc = "Bit 29 - DMA Channel Always Enable"] #[inline(always)] pub fn a_on(&mut self) -> A_ON_W { A_ON_W { w: self } } #[doc = "Bit 30 - DMA Channel Trigger Enable"] #[inline(always)] pub fn trig(&mut self) -> TRIG_W { TRIG_W { w: self } } #[doc = "Bit 31 - DMA Mux Channel Enable"] #[inline(always)] pub fn enbl(&mut self) -> ENBL_W { ENBL_W { w: self } } }
(Normal mode)"] #[inline(always)] pub fn trig_0(self) -> &'a mut W { self.variant(TRIG_A::TRIG_0) } #[doc = "Triggering is enabled. If triggering is enabled and ENBL is set, the DMA_CH_MUX is in Periodic Trigger mode."] #[inline(always)] pub fn trig_1(self) -> &'a mut W { self.variant(TRIG_A::TRIG_1) } #[doc = r"Sets the field bit"] #[inline(always)] pub fn set_bit(self) -> &'a mut W { self.bit(true) } #[doc = r"Clears the field bit"] #[inline(always)] pub fn clear_bit(self) -> &'a mut W { self.bit(false) } #[doc = r"Writes raw bits to the field"] #[inline(always)] pub fn bit(self, value: bool) -> &'a mut W { self.w.bits = (self.w.bits & !(0x01 << 30)) | (((value as u32) & 0x01) << 30); self.w } } #[doc = "DMA Mux Channel Enable\n\nValue on reset: 0"] #[derive(Clone, Copy, Debug, PartialEq)] pub enum ENBL_A { #[doc = "0: DMA Mux channel is disabled"] ENBL_0 = 0, #[doc = "1: DMA Mux channel is enabled"] ENBL_1 = 1, } impl From<ENBL_A> for bool { #[inline(always)] fn from(variant: ENBL_A) -> Self { variant as u8 != 0 } } #[doc = "Reader of field `ENBL`"] pub type ENBL_R = crate::R<bool, ENBL_A>; impl ENBL_R { #[doc = r"Get enumerated values variant"] #[inline(always)] pub fn variant(&self) -> ENBL_A { match self.bits { false => ENBL_A::ENBL_0, true => ENBL_A::ENBL_1, } } #[doc = "Checks if the value of the field is `ENBL_0`"] #[inline(always)] pub fn is_enbl_0(&self) -> bool { *self == ENBL_A::ENBL_0 } #[doc = "Checks if the value of the field is `ENBL_1`"] #[inline(always)] pub fn is_enbl_1(&self) -> bool { *self == ENBL_A::ENBL_1 } } #[doc = "Write proxy for field `ENBL`"] pub struct ENBL_W<'a> { w: &'a mut W, } impl<'a> ENBL_W<'a> { #[doc = r"Writes `variant` to the field"] #[inline(always)] pub fn variant(self, variant: ENBL_A) -> &'a mut W { { self.bit(variant.into()) } } #[doc = "DMA Mux channel is disabled"] #[inline(always)] pub fn enbl_0(self) -> &'a mut W { self.variant(ENBL_A::ENB
random
[ { "content": "/// Migrate the `lib.rs` of the PAC subscrate, adding\n\n/// our necessary header to the top of the file.\n\nfn write_lib<R: Read>(crate_path: &Path, mut src: R) {\n\n static LIB_PRELUDE: &str = r#\"#![deny(warnings)]\n\n#![allow(non_camel_case_types)]\n\n#![allow(clippy::all)]\n\n#![no_std]\n\n\n\nmod generic;\n\npub use generic::*;\n\n\n\n\"#;\n\n let mut crate_lib =\n\n fs::File::create(crate_path.join(\"src\").join(\"lib.rs\")).expect(\"Unable to create lib.rs\");\n\n crate_lib\n\n .write_all(LIB_PRELUDE.as_bytes())\n\n .expect(\"Unable to write lib.rs prelude\");\n\n io::copy(&mut src, &mut crate_lib).unwrap();\n\n}\n\n\n", "file_path": "tools/import/src/main.rs", "rank": 0, "score": 294576.1012728157 }, { "content": "/// Writes the buffer of data to the USB host\n\n///\n\n/// TODO error handling, return the number of bytes written, etc.\n\npub fn serial_write<B: AsRef<[u8]>>(buffer: &B) {\n\n unsafe {\n\n let buffer = buffer.as_ref();\n\n usb_serial_write(buffer.as_ptr(), buffer.len() as u32);\n\n }\n\n}\n", "file_path": "teensy4-bsp/teensy4-usb-sys/src/lib.rs", "rank": 1, "score": 268487.44199800125 }, { "content": "/// Chain two timers together, returning a `ChainedPIT` timer that can\n\n/// count twice as many ticks.\n\n///\n\n/// The API enforces that channel 1 is chained to channel 0, or channel 2 is\n\n/// chained to channel 1, or channel 3 is chained to channel 2. Any other\n\n/// combination of chaining is prevented by the compiler.\n\n///\n\n/// We do not support chaining more than two timers.\n\npub fn chain<C1: channel::Channel>(\n\n lower: PIT<<C1 as channel::Channel>::ChainedTo>,\n\n upper: PIT<C1>,\n\n) -> ChainedPIT<<C1 as channel::Channel>::ChainedTo, C1> {\n\n ChainedPIT { lower, upper }\n\n}\n\n\n\nimpl<C0, C1> CountDown for ChainedPIT<C0, C1>\n\nwhere\n\n C0: channel::Channel,\n\n C1: channel::Channel,\n\n{\n\n type Time = core::time::Duration;\n\n fn start<T: Into<Self::Time>>(&mut self, time: T) {\n\n // clock_hz and divider are equal across all PITs\n\n let ticks: Ticks<u64> = match ticks(time.into(), self.lower.clock_hz, self.lower.divider) {\n\n Ok(ticks) => ticks,\n\n // Saturate the load value\n\n Err(TicksError::TicksOverflow) | Err(TicksError::DurationOverflow) => {\n\n Ticks(core::u64::MAX)\n", "file_path": "imxrt1062-hal/src/pit.rs", "rank": 2, "score": 263957.0552783213 }, { "content": "/// Computes the number of clock ticks that span the provide duration, given\n\n/// the clock frequency and clock divider. If there is no divider, use `Divider::default()`\n\n/// to specify an unused divider. Returns `Ok(ticks)` when the computation of\n\n/// clock ticks succeeds, or an error.\n\npub fn ticks<R: TicksRepr>(\n\n dur: Duration,\n\n freq: Frequency,\n\n div: Divider,\n\n) -> Result<Ticks<R>, TicksError> {\n\n // Ticks computed as\n\n //\n\n // ticks = (duration / clock_period) - 1\n\n //\n\n // where `clock_period` is the effective clock period: `freq / div`\n\n let delay_ns = u64::try_from(dur.as_nanos()).map_err(|_| TicksError::DurationOverflow)?;\n\n let effective_freq = freq\n\n .0\n\n .checked_div(div.0)\n\n .ok_or(TicksError::DurationOverflow)?;\n\n let clock_period_ns = 1_000_000_000u32\n\n .checked_div(effective_freq)\n\n .map(u64::from)\n\n .ok_or(TicksError::DivideByZero)?;\n\n delay_ns\n", "file_path": "imxrt1062-hal/src/ccm.rs", "rank": 3, "score": 244451.1881476182 }, { "content": "#[inline(always)]\n\nfn reg3_trg(mv: u32) -> u8 {\n\n ((mv - 800) / 25) as u8\n\n}\n\n\n", "file_path": "imxrt1062-hal/src/ccm/arm_clock.rs", "rank": 4, "score": 237595.02083895146 }, { "content": "/// Sets the main system clock to as close to `hz` as possible.\n\n/// Returns the `(ARM, IPG)` clock frequencies based on the input frequency\n\n/// and selected prescalars.\n\npub fn set_arm_clock(\n\n mut hz: u32,\n\n ccm: &pac::CCM,\n\n ccm_analog: &pac::CCM_ANALOG,\n\n dcdc: &pac::DCDC,\n\n) -> (u32, u32) {\n\n let millivolts: u32 = if hz > 528_000_000 {\n\n 1250 // 1.25V\n\n } else if hz <= 24_000_000 {\n\n 950 // 0.95V\n\n } else {\n\n 1150 // 1.15V, default\n\n };\n\n\n\n // Enable clocks to the DCDC module\n\n // Safety: CG3 field is two bits\n\n ccm.ccgr6.modify(|_, w| unsafe { w.cg3().bits(0x3) });\n\n\n\n // Set VDD_SOC, voltage for the chip\n\n if dcdc.reg3.read().trg().bits() < reg3_trg(millivolts) {\n", "file_path": "imxrt1062-hal/src/ccm/arm_clock.rs", "rank": 5, "score": 237127.49235670257 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "tools/import/src/generic.rs", "rank": 6, "score": 226075.04879826328 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-csu/src/generic.rs", "rank": 7, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-usbphy1/src/generic.rs", "rank": 8, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-semc/src/generic.rs", "rank": 9, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-pgc/src/generic.rs", "rank": 10, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-enc1/src/generic.rs", "rank": 11, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-iomuxc/src/generic.rs", "rank": 12, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-csi/src/generic.rs", "rank": 13, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-pmu/src/generic.rs", "rank": 14, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-lpspi1/src/generic.rs", "rank": 15, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-trng/src/generic.rs", "rank": 16, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-tsc/src/generic.rs", "rank": 17, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-ccm/src/generic.rs", "rank": 18, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-flexio1/src/generic.rs", "rank": 19, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-lcdif/src/generic.rs", "rank": 20, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-usb1/src/generic.rs", "rank": 21, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-tempmon/src/generic.rs", "rank": 22, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-flexram/src/generic.rs", "rank": 23, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-lpi2c1/src/generic.rs", "rank": 24, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-xtalosc24m/src/generic.rs", "rank": 25, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-xbarb2/src/generic.rs", "rank": 26, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-kpp/src/generic.rs", "rank": 27, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-adc1/src/generic.rs", "rank": 28, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-dmamux/src/generic.rs", "rank": 29, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-spdif/src/generic.rs", "rank": 30, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-aipstz1/src/generic.rs", "rank": 31, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-can1/src/generic.rs", "rank": 32, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-dcp/src/generic.rs", "rank": 33, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-lpuart1/src/generic.rs", "rank": 34, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-wdog1/src/generic.rs", "rank": 35, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-usdhc1/src/generic.rs", "rank": 36, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-bee/src/generic.rs", "rank": 37, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-ewm/src/generic.rs", "rank": 38, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-gpc/src/generic.rs", "rank": 39, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-dma0/src/generic.rs", "rank": 40, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-usbnc1/src/generic.rs", "rank": 41, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-pwm1/src/generic.rs", "rank": 42, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-cmp1/src/generic.rs", "rank": 43, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-snvs/src/generic.rs", "rank": 44, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-enet/src/generic.rs", "rank": 45, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-gpio1/src/generic.rs", "rank": 46, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-ocotp/src/generic.rs", "rank": 47, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-rtwdog/src/generic.rs", "rank": 48, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-tmr1/src/generic.rs", "rank": 49, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-romc/src/generic.rs", "rank": 50, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-sai1/src/generic.rs", "rank": 51, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-aoi1/src/generic.rs", "rank": 52, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-pxp/src/generic.rs", "rank": 53, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-dcdc/src/generic.rs", "rank": 54, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-flexspi/src/generic.rs", "rank": 55, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-can3/src/generic.rs", "rank": 56, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-xbara1/src/generic.rs", "rank": 57, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-src/src/generic.rs", "rank": 58, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-pit/src/generic.rs", "rank": 59, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-gpt1/src/generic.rs", "rank": 60, "score": 218303.81705612916 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-usb-analog/src/generic.rs", "rank": 61, "score": 214636.3398688799 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-iomuxc-gpr/src/generic.rs", "rank": 62, "score": 214636.3398688799 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-ccm-analog/src/generic.rs", "rank": 63, "score": 214636.3398688799 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-adc-etc/src/generic.rs", "rank": 64, "score": 214636.3398688799 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-system-control/src/generic.rs", "rank": 65, "score": 214636.3398688799 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-iomuxc-snvs/src/generic.rs", "rank": 66, "score": 214636.3398688799 }, { "content": "#[doc = \"Reset value of the register\"]\n\n#[doc = \"\"]\n\n#[doc = \"This value is initial value for `write` method.\"]\n\n#[doc = \"It can be also directly writed to register by `reset` method.\"]\n\npub trait ResetValue {\n\n #[doc = \"Register size\"]\n\n type Type;\n\n #[doc = \"Reset value of the register\"]\n\n fn reset_value() -> Self::Type;\n\n}\n\n#[doc = \"This structure provides volatile access to register\"]\n\npub struct Reg<U, REG> {\n\n register: vcell::VolatileCell<U>,\n\n _marker: marker::PhantomData<REG>,\n\n}\n\nunsafe impl<U: Send, REG> Send for Reg<U, REG> {}\n\nimpl<U, REG> Reg<U, REG>\n\nwhere\n\n Self: Readable,\n\n U: Copy,\n\n{\n\n #[doc = \"Reads the contents of `Readable` register\"]\n\n #[doc = \"\"]\n\n #[doc = \"You can read the contents of a register in such way:\"]\n", "file_path": "imxrt1062-pac/imxrt1062-iomuxc-snvs-gpr/src/generic.rs", "rank": 67, "score": 211103.60281969025 }, { "content": " pub trait Channel {\n\n const IDX: usize;\n\n type ChainedTo: Channel;\n\n }\n\n\n\n impl Channel for _X {\n\n const IDX: usize = core::usize::MAX;\n\n type ChainedTo = _X;\n\n }\n\n\n\n impl Channel for _0 {\n\n const IDX: usize = 0;\n\n type ChainedTo = _X;\n\n }\n\n impl Channel for _1 {\n\n const IDX: usize = 1;\n\n type ChainedTo = _0;\n\n }\n\n impl Channel for _2 {\n\n const IDX: usize = 2;\n", "file_path": "imxrt1062-hal/src/pit.rs", "rank": 68, "score": 183588.53028671257 }, { "content": "type CatchAll = toml::value::Value;\n\n\n\n/// Support for Cargo.toml workspaces\n\nmod workspace {\n\n use std::path::Path;\n\n\n\n #[derive(serde::Deserialize, serde::Serialize)]\n\n struct Table {\n\n members: Vec<String>,\n\n exclude: Vec<String>,\n\n }\n\n\n\n /// A `serde` serializable and deserializable definition of a Cargo workspace\n\n #[derive(serde::Deserialize, serde::Serialize)]\n\n pub struct Workspace {\n\n workspace: Table,\n\n }\n\n\n\n impl Workspace {\n\n /// Add a new member to the workspace, then sort the collection of\n", "file_path": "tools/cargo-toml/src/lib.rs", "rank": 69, "score": 162878.75798978022 }, { "content": "#[doc(hidden)]\n\npub trait IntoRegister {\n\n fn into_reg() -> *const crate::pac::gpio1::RegisterBlock;\n\n}\n\n\n\nimpl IntoRegister for GPIO2 {\n\n fn into_reg() -> *const crate::pac::gpio1::RegisterBlock {\n\n crate::pac::GPIO2::ptr()\n\n }\n\n}\n\n\n\nimpl IntoRegister for GPIO7 {\n\n fn into_reg() -> *const crate::pac::gpio1::RegisterBlock {\n\n crate::pac::GPIO7::ptr()\n\n }\n\n}\n\n\n\nmacro_rules! _ios_impl {\n\n ($($io:ident)+) => {\n\n $(\n\n pub struct $io<GPIO, Dir> {\n", "file_path": "imxrt1062-hal/src/gpio.rs", "rank": 70, "score": 160874.62562360297 }, { "content": "fn copy_generic_rs(crate_path: &Path) {\n\n static GENERIC_RS: &[u8] = include_bytes!(\"generic.rs\");\n\n let mut generic_rs = fs::File::create(crate_path.join(\"src\").join(\"generic.rs\"))\n\n .expect(\"Unable to create generic.rs\");\n\n generic_rs\n\n .write_all(GENERIC_RS)\n\n .expect(\"Unable to write generic.rs\");\n\n}\n\n\n", "file_path": "tools/import/src/main.rs", "rank": 71, "score": 147151.36134347925 }, { "content": "#[derive(Clone, Copy)]\n\nstruct Reg(&'static pac::pwm1::RegisterBlock);\n\nimpl core::ops::Deref for Reg {\n\n type Target = pac::pwm1::RegisterBlock;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n self.0\n\n }\n\n}\n\n\n\nimpl Reg {\n\n fn reset_ok<S, F, R>(&mut self, mut act: F) -> R\n\n where\n\n F: FnMut(&pac::pwm1::SM) -> R,\n\n S: submodule::Submodule,\n\n {\n\n let idx: usize = <S as submodule::Submodule>::IDX;\n\n self.0.mctrl.modify(|_, w| unsafe {\n\n // Safety, cldok is 4 bits, idx is bound [0, 4)\n\n w.cldok().bits(1 << idx)\n\n });\n", "file_path": "imxrt1062-hal/src/pwm.rs", "rank": 72, "score": 142485.1575862498 }, { "content": "#[doc = \"Reader of register DEBUG_MODE\"]\n\npub type R = crate::R<u32, super::DEBUG_MODE>;\n\n#[doc = \"Writer for register DEBUG_MODE\"]\n\npub type W = crate::W<u32, super::DEBUG_MODE>;\n\n#[doc = \"Register DEBUG_MODE `reset()`'s with value 0\"]\n\nimpl crate::ResetValue for super::DEBUG_MODE {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0\n\n }\n\n}\n\n#[doc = \"Reader of field `ADC_CONV_VALUE`\"]\n\npub type ADC_CONV_VALUE_R = crate::R<u16, u16>;\n\n#[doc = \"Reader of field `ADC_COCO`\"]\n\npub type ADC_COCO_R = crate::R<bool, bool>;\n\n#[doc = \"Reader of field `EXT_HWTS`\"]\n\npub type EXT_HWTS_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `EXT_HWTS`\"]\n\npub struct EXT_HWTS_W<'a> {\n", "file_path": "imxrt1062-pac/imxrt1062-tsc/src/debug_mode.rs", "rank": 73, "score": 135954.9646826203 }, { "content": " w: &'a mut W,\n\n}\n\nimpl<'a> EXT_HWTS_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x1f << 16)) | (((value as u32) & 0x1f) << 16);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Trigger\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum TRIGGER_A {\n\n #[doc = \"0: No hardware trigger signal\"]\n\n TRIGGER_0 = 0,\n\n #[doc = \"1: Hardware trigger signal, the signal must last at least 1 ips clock period\"]\n\n TRIGGER_1 = 1,\n\n}\n\nimpl From<TRIGGER_A> for bool {\n\n #[inline(always)]\n", "file_path": "imxrt1062-pac/imxrt1062-tsc/src/debug_mode.rs", "rank": 74, "score": 135944.41408902488 }, { "content": " pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 26)) | (((value as u32) & 0x01) << 26);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Debug Enable\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum DEBUG_EN_A {\n\n #[doc = \"0: Enable debug mode\"]\n\n DEBUG_EN_0 = 0,\n", "file_path": "imxrt1062-pac/imxrt1062-tsc/src/debug_mode.rs", "rank": 75, "score": 135942.07485464425 }, { "content": " #[doc = \"1: Disable debug mode\"]\n\n DEBUG_EN_1 = 1,\n\n}\n\nimpl From<DEBUG_EN_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: DEBUG_EN_A) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Reader of field `DEBUG_EN`\"]\n\npub type DEBUG_EN_R = crate::R<bool, DEBUG_EN_A>;\n\nimpl DEBUG_EN_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n\n pub fn variant(&self) -> DEBUG_EN_A {\n\n match self.bits {\n\n false => DEBUG_EN_A::DEBUG_EN_0,\n\n true => DEBUG_EN_A::DEBUG_EN_1,\n\n }\n\n }\n", "file_path": "imxrt1062-pac/imxrt1062-tsc/src/debug_mode.rs", "rank": 76, "score": 135924.52022907944 }, { "content": " fn from(variant: TRIGGER_A) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Reader of field `TRIGGER`\"]\n\npub type TRIGGER_R = crate::R<bool, TRIGGER_A>;\n\nimpl TRIGGER_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n\n pub fn variant(&self) -> TRIGGER_A {\n\n match self.bits {\n\n false => TRIGGER_A::TRIGGER_0,\n\n true => TRIGGER_A::TRIGGER_1,\n\n }\n\n }\n\n #[doc = \"Checks if the value of the field is `TRIGGER_0`\"]\n\n #[inline(always)]\n\n pub fn is_trigger_0(&self) -> bool {\n\n *self == TRIGGER_A::TRIGGER_0\n\n }\n", "file_path": "imxrt1062-pac/imxrt1062-tsc/src/debug_mode.rs", "rank": 77, "score": 135923.55804917222 }, { "content": " pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24);\n\n self.w\n\n }\n\n}\n\n#[doc = \"ADC Coco Clear\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum ADC_COCO_CLEAR_A {\n\n #[doc = \"0: No ADC COCO clear\"]\n\n ADC_COCO_CLEAR_0 = 0,\n\n #[doc = \"1: Set ADC COCO clear\"]\n\n ADC_COCO_CLEAR_1 = 1,\n\n}\n\nimpl From<ADC_COCO_CLEAR_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: ADC_COCO_CLEAR_A) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Reader of field `ADC_COCO_CLEAR`\"]\n", "file_path": "imxrt1062-pac/imxrt1062-tsc/src/debug_mode.rs", "rank": 78, "score": 135922.54877663983 }, { "content": " #[doc = \"Checks if the value of the field is `TRIGGER_1`\"]\n\n #[inline(always)]\n\n pub fn is_trigger_1(&self) -> bool {\n\n *self == TRIGGER_A::TRIGGER_1\n\n }\n\n}\n\n#[doc = \"Write proxy for field `TRIGGER`\"]\n\npub struct TRIGGER_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TRIGGER_W<'a> {\n\n #[doc = r\"Writes `variant` to the field\"]\n\n #[inline(always)]\n\n pub fn variant(self, variant: TRIGGER_A) -> &'a mut W {\n\n {\n\n self.bit(variant.into())\n\n }\n\n }\n\n #[doc = \"No hardware trigger signal\"]\n\n #[inline(always)]\n", "file_path": "imxrt1062-pac/imxrt1062-tsc/src/debug_mode.rs", "rank": 79, "score": 135913.3943103638 }, { "content": " pub fn adc_coco_clear_1(self) -> &'a mut W {\n\n self.variant(ADC_COCO_CLEAR_A::ADC_COCO_CLEAR_1)\n\n }\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 25)) | (((value as u32) & 0x01) << 25);\n\n self.w\n\n }\n\n}\n", "file_path": "imxrt1062-pac/imxrt1062-tsc/src/debug_mode.rs", "rank": 80, "score": 135911.8651066354 }, { "content": " self.bit(variant.into())\n\n }\n\n }\n\n #[doc = \"Enable debug mode\"]\n\n #[inline(always)]\n\n pub fn debug_en_0(self) -> &'a mut W {\n\n self.variant(DEBUG_EN_A::DEBUG_EN_0)\n\n }\n\n #[doc = \"Disable debug mode\"]\n\n #[inline(always)]\n\n pub fn debug_en_1(self) -> &'a mut W {\n\n self.variant(DEBUG_EN_A::DEBUG_EN_1)\n\n }\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n", "file_path": "imxrt1062-pac/imxrt1062-tsc/src/debug_mode.rs", "rank": 81, "score": 135911.74492473315 }, { "content": " pub fn trigger_0(self) -> &'a mut W {\n\n self.variant(TRIGGER_A::TRIGGER_0)\n\n }\n\n #[doc = \"Hardware trigger signal, the signal must last at least 1 ips clock period\"]\n\n #[inline(always)]\n\n pub fn trigger_1(self) -> &'a mut W {\n\n self.variant(TRIGGER_A::TRIGGER_1)\n\n }\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n", "file_path": "imxrt1062-pac/imxrt1062-tsc/src/debug_mode.rs", "rank": 82, "score": 135909.386126213 }, { "content": " #[doc = \"Checks if the value of the field is `DEBUG_EN_0`\"]\n\n #[inline(always)]\n\n pub fn is_debug_en_0(&self) -> bool {\n\n *self == DEBUG_EN_A::DEBUG_EN_0\n\n }\n\n #[doc = \"Checks if the value of the field is `DEBUG_EN_1`\"]\n\n #[inline(always)]\n\n pub fn is_debug_en_1(&self) -> bool {\n\n *self == DEBUG_EN_A::DEBUG_EN_1\n\n }\n\n}\n\n#[doc = \"Write proxy for field `DEBUG_EN`\"]\n\npub struct DEBUG_EN_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> DEBUG_EN_W<'a> {\n\n #[doc = r\"Writes `variant` to the field\"]\n\n #[inline(always)]\n\n pub fn variant(self, variant: DEBUG_EN_A) -> &'a mut W {\n\n {\n", "file_path": "imxrt1062-pac/imxrt1062-tsc/src/debug_mode.rs", "rank": 83, "score": 135909.05414242536 }, { "content": " pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 28)) | (((value as u32) & 0x01) << 28);\n\n self.w\n\n }\n\n}\n\nimpl R {\n\n #[doc = \"Bits 0:11 - ADC Conversion Value\"]\n\n #[inline(always)]\n\n pub fn adc_conv_value(&self) -> ADC_CONV_VALUE_R {\n\n ADC_CONV_VALUE_R::new((self.bits & 0x0fff) as u16)\n\n }\n\n #[doc = \"Bit 12 - ADC COCO Signal\"]\n\n #[inline(always)]\n\n pub fn adc_coco(&self) -> ADC_COCO_R {\n\n ADC_COCO_R::new(((self.bits >> 12) & 0x01) != 0)\n", "file_path": "imxrt1062-pac/imxrt1062-tsc/src/debug_mode.rs", "rank": 84, "score": 135908.87255223357 }, { "content": " match self.bits {\n\n false => ADC_COCO_CLEAR_DISABLE_A::ADC_COCO_CLEAR_DISABLE_0,\n\n true => ADC_COCO_CLEAR_DISABLE_A::ADC_COCO_CLEAR_DISABLE_1,\n\n }\n\n }\n\n #[doc = \"Checks if the value of the field is `ADC_COCO_CLEAR_DISABLE_0`\"]\n\n #[inline(always)]\n\n pub fn is_adc_coco_clear_disable_0(&self) -> bool {\n\n *self == ADC_COCO_CLEAR_DISABLE_A::ADC_COCO_CLEAR_DISABLE_0\n\n }\n\n #[doc = \"Checks if the value of the field is `ADC_COCO_CLEAR_DISABLE_1`\"]\n\n #[inline(always)]\n\n pub fn is_adc_coco_clear_disable_1(&self) -> bool {\n\n *self == ADC_COCO_CLEAR_DISABLE_A::ADC_COCO_CLEAR_DISABLE_1\n\n }\n\n}\n\n#[doc = \"Write proxy for field `ADC_COCO_CLEAR_DISABLE`\"]\n\npub struct ADC_COCO_CLEAR_DISABLE_W<'a> {\n\n w: &'a mut W,\n\n}\n", "file_path": "imxrt1062-pac/imxrt1062-tsc/src/debug_mode.rs", "rank": 85, "score": 135907.22315498244 }, { "content": "#[doc = \"ADC COCO Clear Disable\\n\\nValue on reset: 0\"]\n\n#[derive(Clone, Copy, Debug, PartialEq)]\n\npub enum ADC_COCO_CLEAR_DISABLE_A {\n\n #[doc = \"0: Allow TSC hardware generates ADC COCO clear\"]\n\n ADC_COCO_CLEAR_DISABLE_0 = 0,\n\n #[doc = \"1: Prevent TSC from generate ADC COCO clear signal\"]\n\n ADC_COCO_CLEAR_DISABLE_1 = 1,\n\n}\n\nimpl From<ADC_COCO_CLEAR_DISABLE_A> for bool {\n\n #[inline(always)]\n\n fn from(variant: ADC_COCO_CLEAR_DISABLE_A) -> Self {\n\n variant as u8 != 0\n\n }\n\n}\n\n#[doc = \"Reader of field `ADC_COCO_CLEAR_DISABLE`\"]\n\npub type ADC_COCO_CLEAR_DISABLE_R = crate::R<bool, ADC_COCO_CLEAR_DISABLE_A>;\n\nimpl ADC_COCO_CLEAR_DISABLE_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n\n pub fn variant(&self) -> ADC_COCO_CLEAR_DISABLE_A {\n", "file_path": "imxrt1062-pac/imxrt1062-tsc/src/debug_mode.rs", "rank": 86, "score": 135905.8886428058 }, { "content": " }\n\n #[doc = \"Bit 28 - Debug Enable\"]\n\n #[inline(always)]\n\n pub fn debug_en(&self) -> DEBUG_EN_R {\n\n DEBUG_EN_R::new(((self.bits >> 28) & 0x01) != 0)\n\n }\n\n}\n\nimpl W {\n\n #[doc = \"Bits 16:20 - Hardware Trigger Select Signal\"]\n\n #[inline(always)]\n\n pub fn ext_hwts(&mut self) -> EXT_HWTS_W {\n\n EXT_HWTS_W { w: self }\n\n }\n\n #[doc = \"Bit 24 - Trigger\"]\n\n #[inline(always)]\n\n pub fn trigger(&mut self) -> TRIGGER_W {\n\n TRIGGER_W { w: self }\n\n }\n\n #[doc = \"Bit 25 - ADC Coco Clear\"]\n\n #[inline(always)]\n", "file_path": "imxrt1062-pac/imxrt1062-tsc/src/debug_mode.rs", "rank": 87, "score": 135905.13900438353 }, { "content": "pub type ADC_COCO_CLEAR_R = crate::R<bool, ADC_COCO_CLEAR_A>;\n\nimpl ADC_COCO_CLEAR_R {\n\n #[doc = r\"Get enumerated values variant\"]\n\n #[inline(always)]\n\n pub fn variant(&self) -> ADC_COCO_CLEAR_A {\n\n match self.bits {\n\n false => ADC_COCO_CLEAR_A::ADC_COCO_CLEAR_0,\n\n true => ADC_COCO_CLEAR_A::ADC_COCO_CLEAR_1,\n\n }\n\n }\n\n #[doc = \"Checks if the value of the field is `ADC_COCO_CLEAR_0`\"]\n\n #[inline(always)]\n\n pub fn is_adc_coco_clear_0(&self) -> bool {\n\n *self == ADC_COCO_CLEAR_A::ADC_COCO_CLEAR_0\n\n }\n\n #[doc = \"Checks if the value of the field is `ADC_COCO_CLEAR_1`\"]\n\n #[inline(always)]\n\n pub fn is_adc_coco_clear_1(&self) -> bool {\n\n *self == ADC_COCO_CLEAR_A::ADC_COCO_CLEAR_1\n\n }\n", "file_path": "imxrt1062-pac/imxrt1062-tsc/src/debug_mode.rs", "rank": 88, "score": 135892.35444477026 }, { "content": " pub fn adc_coco_clear(&mut self) -> ADC_COCO_CLEAR_W {\n\n ADC_COCO_CLEAR_W { w: self }\n\n }\n\n #[doc = \"Bit 26 - ADC COCO Clear Disable\"]\n\n #[inline(always)]\n\n pub fn adc_coco_clear_disable(&mut self) -> ADC_COCO_CLEAR_DISABLE_W {\n\n ADC_COCO_CLEAR_DISABLE_W { w: self }\n\n }\n\n #[doc = \"Bit 28 - Debug Enable\"]\n\n #[inline(always)]\n\n pub fn debug_en(&mut self) -> DEBUG_EN_W {\n\n DEBUG_EN_W { w: self }\n\n }\n\n}\n", "file_path": "imxrt1062-pac/imxrt1062-tsc/src/debug_mode.rs", "rank": 89, "score": 135891.4824246475 }, { "content": "impl<'a> ADC_COCO_CLEAR_DISABLE_W<'a> {\n\n #[doc = r\"Writes `variant` to the field\"]\n\n #[inline(always)]\n\n pub fn variant(self, variant: ADC_COCO_CLEAR_DISABLE_A) -> &'a mut W {\n\n {\n\n self.bit(variant.into())\n\n }\n\n }\n\n #[doc = \"Allow TSC hardware generates ADC COCO clear\"]\n\n #[inline(always)]\n\n pub fn adc_coco_clear_disable_0(self) -> &'a mut W {\n\n self.variant(ADC_COCO_CLEAR_DISABLE_A::ADC_COCO_CLEAR_DISABLE_0)\n\n }\n\n #[doc = \"Prevent TSC from generate ADC COCO clear signal\"]\n\n #[inline(always)]\n\n pub fn adc_coco_clear_disable_1(self) -> &'a mut W {\n\n self.variant(ADC_COCO_CLEAR_DISABLE_A::ADC_COCO_CLEAR_DISABLE_1)\n\n }\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n", "file_path": "imxrt1062-pac/imxrt1062-tsc/src/debug_mode.rs", "rank": 90, "score": 135889.8449939859 }, { "content": "}\n\n#[doc = \"Write proxy for field `ADC_COCO_CLEAR`\"]\n\npub struct ADC_COCO_CLEAR_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ADC_COCO_CLEAR_W<'a> {\n\n #[doc = r\"Writes `variant` to the field\"]\n\n #[inline(always)]\n\n pub fn variant(self, variant: ADC_COCO_CLEAR_A) -> &'a mut W {\n\n {\n\n self.bit(variant.into())\n\n }\n\n }\n\n #[doc = \"No ADC COCO clear\"]\n\n #[inline(always)]\n\n pub fn adc_coco_clear_0(self) -> &'a mut W {\n\n self.variant(ADC_COCO_CLEAR_A::ADC_COCO_CLEAR_0)\n\n }\n\n #[doc = \"Set ADC COCO clear\"]\n\n #[inline(always)]\n", "file_path": "imxrt1062-pac/imxrt1062-tsc/src/debug_mode.rs", "rank": 91, "score": 135889.33558664302 }, { "content": " }\n\n #[doc = \"Bits 16:20 - Hardware Trigger Select Signal\"]\n\n #[inline(always)]\n\n pub fn ext_hwts(&self) -> EXT_HWTS_R {\n\n EXT_HWTS_R::new(((self.bits >> 16) & 0x1f) as u8)\n\n }\n\n #[doc = \"Bit 24 - Trigger\"]\n\n #[inline(always)]\n\n pub fn trigger(&self) -> TRIGGER_R {\n\n TRIGGER_R::new(((self.bits >> 24) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 25 - ADC Coco Clear\"]\n\n #[inline(always)]\n\n pub fn adc_coco_clear(&self) -> ADC_COCO_CLEAR_R {\n\n ADC_COCO_CLEAR_R::new(((self.bits >> 25) & 0x01) != 0)\n\n }\n\n #[doc = \"Bit 26 - ADC COCO Clear Disable\"]\n\n #[inline(always)]\n\n pub fn adc_coco_clear_disable(&self) -> ADC_COCO_CLEAR_DISABLE_R {\n\n ADC_COCO_CLEAR_DISABLE_R::new(((self.bits >> 26) & 0x01) != 0)\n", "file_path": "imxrt1062-pac/imxrt1062-tsc/src/debug_mode.rs", "rank": 92, "score": 135885.07103135745 }, { "content": "#[doc = \"Reader of register DEBUG_SET\"]\n\npub type R = crate::R<u32, super::DEBUG_SET>;\n\n#[doc = \"Writer for register DEBUG_SET\"]\n\npub type W = crate::W<u32, super::DEBUG_SET>;\n\n#[doc = \"Register DEBUG_SET `reset()`'s with value 0x7f18_0000\"]\n\nimpl crate::ResetValue for super::DEBUG_SET {\n\n type Type = u32;\n\n #[inline(always)]\n\n fn reset_value() -> Self::Type {\n\n 0x7f18_0000\n\n }\n\n}\n\n#[doc = \"Reader of field `OTGIDPIOLOCK`\"]\n\npub type OTGIDPIOLOCK_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `OTGIDPIOLOCK`\"]\n\npub struct OTGIDPIOLOCK_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> OTGIDPIOLOCK_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n", "file_path": "imxrt1062-pac/imxrt1062-usbphy1/src/debug_set.rs", "rank": 93, "score": 135804.84440256841 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `RSVD1`\"]\n\npub type RSVD1_R = crate::R<u8, u8>;\n\n#[doc = \"Reader of field `SQUELCHRESETCOUNT`\"]\n\npub type SQUELCHRESETCOUNT_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `SQUELCHRESETCOUNT`\"]\n\npub struct SQUELCHRESETCOUNT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SQUELCHRESETCOUNT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n", "file_path": "imxrt1062-pac/imxrt1062-usbphy1/src/debug_set.rs", "rank": 94, "score": 135781.37571088705 }, { "content": " }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 29)) | (((value as u32) & 0x01) << 29);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `CLKGATE`\"]\n\npub type CLKGATE_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `CLKGATE`\"]\n\npub struct CLKGATE_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> CLKGATE_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n", "file_path": "imxrt1062-pac/imxrt1062-usbphy1/src/debug_set.rs", "rank": 95, "score": 135780.1655170982 }, { "content": " pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `SQUELCHRESETLENGTH`\"]\n\npub type SQUELCHRESETLENGTH_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `SQUELCHRESETLENGTH`\"]\n\npub struct SQUELCHRESETLENGTH_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> SQUELCHRESETLENGTH_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n", "file_path": "imxrt1062-pac/imxrt1062-usbphy1/src/debug_set.rs", "rank": 96, "score": 135779.56406438185 }, { "content": "impl<'a> ENHSTPULLDOWN_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !(0x03 << 4)) | (((value as u32) & 0x03) << 4);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `RSVD0`\"]\n\npub type RSVD0_R = crate::R<u8, u8>;\n\n#[doc = \"Reader of field `TX2RXCOUNT`\"]\n\npub type TX2RXCOUNT_R = crate::R<u8, u8>;\n\n#[doc = \"Write proxy for field `TX2RXCOUNT`\"]\n\npub struct TX2RXCOUNT_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> TX2RXCOUNT_W<'a> {\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub unsafe fn bits(self, value: u8) -> &'a mut W {\n", "file_path": "imxrt1062-pac/imxrt1062-usbphy1/src/debug_set.rs", "rank": 97, "score": 135778.8955246047 }, { "content": " #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n\n pub fn clear_bit(self) -> &'a mut W {\n\n self.bit(false)\n\n }\n\n #[doc = r\"Writes raw bits to the field\"]\n\n #[inline(always)]\n\n pub fn bit(self, value: bool) -> &'a mut W {\n\n self.w.bits = (self.w.bits & !0x01) | ((value as u32) & 0x01);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `DEBUG_INTERFACE_HOLD`\"]\n\npub type DEBUG_INTERFACE_HOLD_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `DEBUG_INTERFACE_HOLD`\"]\n\npub struct DEBUG_INTERFACE_HOLD_W<'a> {\n", "file_path": "imxrt1062-pac/imxrt1062-usbphy1/src/debug_set.rs", "rank": 98, "score": 135778.22932420383 }, { "content": " self.w.bits = (self.w.bits & !(0x1f << 16)) | (((value as u32) & 0x1f) << 16);\n\n self.w\n\n }\n\n}\n\n#[doc = \"Reader of field `RSVD2`\"]\n\npub type RSVD2_R = crate::R<u8, u8>;\n\n#[doc = \"Reader of field `ENSQUELCHRESET`\"]\n\npub type ENSQUELCHRESET_R = crate::R<bool, bool>;\n\n#[doc = \"Write proxy for field `ENSQUELCHRESET`\"]\n\npub struct ENSQUELCHRESET_W<'a> {\n\n w: &'a mut W,\n\n}\n\nimpl<'a> ENSQUELCHRESET_W<'a> {\n\n #[doc = r\"Sets the field bit\"]\n\n #[inline(always)]\n\n pub fn set_bit(self) -> &'a mut W {\n\n self.bit(true)\n\n }\n\n #[doc = r\"Clears the field bit\"]\n\n #[inline(always)]\n", "file_path": "imxrt1062-pac/imxrt1062-usbphy1/src/debug_set.rs", "rank": 99, "score": 135778.23385322056 } ]
Rust
all-crate-storage/blob_storage.rs
est31/cargo-local-serve
eabb70eb45ce390d927a781b2a24bbd9101df52f
use std::io::{Read, Write, Seek, SeekFrom, Result as IoResult, ErrorKind}; use std::collections::HashMap; use std::collections::hash_map::Entry; use byteorder::{ReadBytesExt, WriteBytesExt, BigEndian}; use super::hash_ctx::Digest; pub struct BlobStorage<S> { blob_offsets :HashMap<Digest, u64>, pub name_index :HashMap<String, Digest>, pub digest_to_multi_blob :HashMap<Digest, Digest>, storage :S, index_offset :u64, } pub(crate) fn write_delim_byte_slice<W :Write>(mut wtr :W, sl :&[u8]) -> IoResult<()> { try!(wtr.write_u64::<BigEndian>(sl.len() as u64)); try!(wtr.write(sl)); Ok(()) } pub(crate) fn read_delim_byte_slice<R :Read>(mut rdr :R) -> IoResult<Vec<u8>> { let len = try!(rdr.read_u64::<BigEndian>()); let mut res = vec![0; len as usize]; try!(rdr.read_exact(&mut res)); Ok(res) } impl<S :Read + Seek> BlobStorage<S> { pub fn empty(storage :S) -> Self { BlobStorage { name_index : HashMap::new(), digest_to_multi_blob : HashMap::new(), blob_offsets : HashMap::new(), storage, index_offset : 64, } } pub fn new(mut storage :S) -> IoResult<Self> { try!(storage.seek(SeekFrom::Start(0))); match storage.read_u64::<BigEndian>() { Ok(v) if v == BLOB_MAGIC => BlobStorage::load(storage), Ok(_) => panic!("Invalid header"), Err(ref e) if e.kind() == ErrorKind::UnexpectedEof => Ok(BlobStorage::empty(storage)), Err(e) => Err(e), } } pub fn load(mut storage :S) -> IoResult<Self> { try!(storage.seek(SeekFrom::Start(0))); let index_offset = try!(read_hdr(&mut storage)); try!(storage.seek(SeekFrom::Start(index_offset))); let blob_offsets = try!(read_offset_table(&mut storage)); let name_index = try!(read_name_idx(&mut storage)); let digest_to_multi_blob = try!(read_digest_to_multi_blob(&mut storage)); Ok(BlobStorage { blob_offsets, name_index, digest_to_multi_blob, storage, index_offset, }) } pub fn has(&self, digest :&Digest) -> bool { self.blob_offsets.get(digest).is_some() } pub fn get(&mut self, digest :&Digest) -> IoResult<Option<Vec<u8>>> { let blob_offs = match self.blob_offsets.get(digest) { Some(d) => *d, None => return Ok(None), }; try!(self.storage.seek(SeekFrom::Start(blob_offs))); let content = try!(read_delim_byte_slice(&mut self.storage)); Ok(Some(content)) } } impl<S :Seek + Write> BlobStorage<S> { pub fn insert_named_blob(&mut self, name :Option<String>, digest :Digest, content :&[u8]) -> IoResult<()> { if let Some(n) = name { self.name_index.insert(n, digest); } try!(self.insert(digest, &content)); Ok(()) } pub fn insert(&mut self, digest :Digest, content :&[u8]) -> IoResult<bool> { let e = self.blob_offsets.entry(digest); match e { Entry::Occupied(_) => return Ok(false), Entry::Vacant(v) => v.insert(self.index_offset), }; try!(self.storage.seek(SeekFrom::Start(self.index_offset))); try!(write_delim_byte_slice(&mut self.storage, content)); self.index_offset = try!(self.storage.seek(SeekFrom::Current(0))); Ok(true) } pub fn write_header_and_index(&mut self) -> IoResult<()> { try!(self.storage.seek(SeekFrom::Start(0))); try!(write_hdr(&mut self.storage, self.index_offset)); try!(self.storage.seek(SeekFrom::Start(self.index_offset))); try!(write_offset_table(&mut self.storage, &self.blob_offsets)); try!(write_name_idx(&mut self.storage, &self.name_index)); try!(write_digest_to_multi_blob(&mut self.storage, &self.digest_to_multi_blob)); Ok(()) } } const BLOB_MAGIC :u64 = 0x42_4C_4F_42_53_54_52_45; fn read_hdr<R :Read>(mut rdr :R) -> IoResult<u64> { let magic = try!(rdr.read_u64::<BigEndian>()); assert_eq!(magic, BLOB_MAGIC); let index_offset = try!(rdr.read_u64::<BigEndian>()); Ok(index_offset) } fn write_hdr<W :Write>(mut wtr :W, index_offset :u64) -> IoResult<()> { try!(wtr.write_u64::<BigEndian>(BLOB_MAGIC)); try!(wtr.write_u64::<BigEndian>(index_offset)); Ok(()) } fn read_offset_table<R :Read>(mut rdr :R) -> IoResult<HashMap<Digest, u64>> { let len = try!(rdr.read_u64::<BigEndian>()); let mut tbl = HashMap::new(); for _ in 0 .. len { let mut d :Digest = [0; 32]; try!(rdr.read_exact(&mut d)); let offset = try!(rdr.read_u64::<BigEndian>()); tbl.insert(d, offset); } Ok(tbl) } fn write_offset_table<W :Write>(mut wtr :W, tbl :&HashMap<Digest, u64>) -> IoResult<()> { try!(wtr.write_u64::<BigEndian>(tbl.len() as u64)); for (d, o) in tbl.iter() { try!(wtr.write(d)); try!(wtr.write_u64::<BigEndian>(*o)); } Ok(()) } fn read_name_idx<R :Read>(mut rdr :R) -> IoResult<HashMap<String, Digest>> { let nidx_len = try!(rdr.read_u64::<BigEndian>()); let mut nidx = HashMap::new(); for _ in 0 .. nidx_len { let s_bytes = try!(read_delim_byte_slice(&mut rdr)); let mut d :Digest = [0; 32]; try!(rdr.read_exact(&mut d)); let s = String::from_utf8(s_bytes).unwrap(); nidx.insert(s, d); } Ok(nidx) } fn write_name_idx<W :Write>(mut wtr :W, nidx :&HashMap<String, Digest>) -> IoResult<()> { try!(wtr.write_u64::<BigEndian>(nidx.len() as u64)); for (s,d) in nidx.iter() { try!(write_delim_byte_slice(&mut wtr, s.as_bytes())); try!(wtr.write(d)); } Ok(()) } fn read_digest_to_multi_blob<R :Read>(mut rdr :R) -> IoResult<HashMap<Digest, Digest>> { let res_len = try!(rdr.read_u64::<BigEndian>()); let mut res = HashMap::new(); for _ in 0 .. res_len { let mut d :Digest = [0; 32]; let mut d_multi :Digest = [0; 32]; try!(rdr.read_exact(&mut d)); try!(rdr.read_exact(&mut d_multi)); res.insert(d, d_multi); } Ok(res) } fn write_digest_to_multi_blob<W :Write>(mut wtr :W, nidx :&HashMap<Digest, Digest>) -> IoResult<()> { try!(wtr.write_u64::<BigEndian>(nidx.len() as u64)); for (d, d_multi) in nidx.iter() { try!(wtr.write(d)); try!(wtr.write(d_multi)); } Ok(()) }
use std::io::{Read, Write, Seek, SeekFrom, Result as IoResult, ErrorKind}; use std::collections::HashMap; use std::collections::hash_map::Entry; use byteorder::{ReadBytesExt, WriteBytesExt, BigEndian}; use super::hash_ctx::Digest; pub struct BlobStorage<S> { blob_offsets :HashMap<Digest, u64>, pub name_index :HashMap<String, Digest>, pub digest_to_multi_blob :HashMap<Digest, Digest>, storage :S, index_offset :u64, } pub(crate) fn write_delim_byte_slice<W :Write>(mut wtr :W, sl :&[u8]) -> IoResult<()> { try!(wtr.write_u64::<BigEndian>(sl.len() as u64)); try!(wtr.write(sl)); Ok(()) } pub(crate) fn read_delim_byte_slice<R :Read>(mut rdr :R) -> IoResult<Vec<u8>> { let len = try!(rdr.read_u64::<BigEndian>()); let mut res = vec![0; len as usize]; try!(rdr.read_exact(&mut res)); Ok(res) } impl<S :Read + Seek> BlobStorage<S> { pub fn empty(storage :S) -> Self { BlobStorage { name_index : HashMap::new(), digest_to_multi_blob : HashMap::new(), blob_offsets : HashMap::new(), storage, index_offset : 64, } } pub fn new(mut storage :S) -> IoResult<Self> { try!(storage.seek(SeekFrom::Start(0))); match storage.read_u64::<BigEndian>() { Ok(v) if v == BLOB_MAGIC => BlobStorage::load(storage), Ok(_) => panic!("Invalid header"), Err(ref e) if e.kind() == ErrorKind::UnexpectedEof => Ok(BlobStorage::empty(storage)), Err(e) => Err(e), } } pub fn load(mut storage :S) -> IoResult<Self> { try!(storage.seek(SeekFrom::Start(0))); let index_offset = try!(read_hdr(&mut storage)); try!(storage.seek(SeekFrom::Start(index_offset))); let blob_offsets = try!(read_offset_table(&mut storage)); let name_index = try!(read_name_idx(&mut storage)); let digest_to_multi_blob = try!(read_digest_to_multi_blob(&mut storage)); Ok(BlobStorage { blob_offsets, name_index, dig
&mut d)); let s = String::from_utf8(s_bytes).unwrap(); nidx.insert(s, d); } Ok(nidx) } fn write_name_idx<W :Write>(mut wtr :W, nidx :&HashMap<String, Digest>) -> IoResult<()> { try!(wtr.write_u64::<BigEndian>(nidx.len() as u64)); for (s,d) in nidx.iter() { try!(write_delim_byte_slice(&mut wtr, s.as_bytes())); try!(wtr.write(d)); } Ok(()) } fn read_digest_to_multi_blob<R :Read>(mut rdr :R) -> IoResult<HashMap<Digest, Digest>> { let res_len = try!(rdr.read_u64::<BigEndian>()); let mut res = HashMap::new(); for _ in 0 .. res_len { let mut d :Digest = [0; 32]; let mut d_multi :Digest = [0; 32]; try!(rdr.read_exact(&mut d)); try!(rdr.read_exact(&mut d_multi)); res.insert(d, d_multi); } Ok(res) } fn write_digest_to_multi_blob<W :Write>(mut wtr :W, nidx :&HashMap<Digest, Digest>) -> IoResult<()> { try!(wtr.write_u64::<BigEndian>(nidx.len() as u64)); for (d, d_multi) in nidx.iter() { try!(wtr.write(d)); try!(wtr.write(d_multi)); } Ok(()) }
est_to_multi_blob, storage, index_offset, }) } pub fn has(&self, digest :&Digest) -> bool { self.blob_offsets.get(digest).is_some() } pub fn get(&mut self, digest :&Digest) -> IoResult<Option<Vec<u8>>> { let blob_offs = match self.blob_offsets.get(digest) { Some(d) => *d, None => return Ok(None), }; try!(self.storage.seek(SeekFrom::Start(blob_offs))); let content = try!(read_delim_byte_slice(&mut self.storage)); Ok(Some(content)) } } impl<S :Seek + Write> BlobStorage<S> { pub fn insert_named_blob(&mut self, name :Option<String>, digest :Digest, content :&[u8]) -> IoResult<()> { if let Some(n) = name { self.name_index.insert(n, digest); } try!(self.insert(digest, &content)); Ok(()) } pub fn insert(&mut self, digest :Digest, content :&[u8]) -> IoResult<bool> { let e = self.blob_offsets.entry(digest); match e { Entry::Occupied(_) => return Ok(false), Entry::Vacant(v) => v.insert(self.index_offset), }; try!(self.storage.seek(SeekFrom::Start(self.index_offset))); try!(write_delim_byte_slice(&mut self.storage, content)); self.index_offset = try!(self.storage.seek(SeekFrom::Current(0))); Ok(true) } pub fn write_header_and_index(&mut self) -> IoResult<()> { try!(self.storage.seek(SeekFrom::Start(0))); try!(write_hdr(&mut self.storage, self.index_offset)); try!(self.storage.seek(SeekFrom::Start(self.index_offset))); try!(write_offset_table(&mut self.storage, &self.blob_offsets)); try!(write_name_idx(&mut self.storage, &self.name_index)); try!(write_digest_to_multi_blob(&mut self.storage, &self.digest_to_multi_blob)); Ok(()) } } const BLOB_MAGIC :u64 = 0x42_4C_4F_42_53_54_52_45; fn read_hdr<R :Read>(mut rdr :R) -> IoResult<u64> { let magic = try!(rdr.read_u64::<BigEndian>()); assert_eq!(magic, BLOB_MAGIC); let index_offset = try!(rdr.read_u64::<BigEndian>()); Ok(index_offset) } fn write_hdr<W :Write>(mut wtr :W, index_offset :u64) -> IoResult<()> { try!(wtr.write_u64::<BigEndian>(BLOB_MAGIC)); try!(wtr.write_u64::<BigEndian>(index_offset)); Ok(()) } fn read_offset_table<R :Read>(mut rdr :R) -> IoResult<HashMap<Digest, u64>> { let len = try!(rdr.read_u64::<BigEndian>()); let mut tbl = HashMap::new(); for _ in 0 .. len { let mut d :Digest = [0; 32]; try!(rdr.read_exact(&mut d)); let offset = try!(rdr.read_u64::<BigEndian>()); tbl.insert(d, offset); } Ok(tbl) } fn write_offset_table<W :Write>(mut wtr :W, tbl :&HashMap<Digest, u64>) -> IoResult<()> { try!(wtr.write_u64::<BigEndian>(tbl.len() as u64)); for (d, o) in tbl.iter() { try!(wtr.write(d)); try!(wtr.write_u64::<BigEndian>(*o)); } Ok(()) } fn read_name_idx<R :Read>(mut rdr :R) -> IoResult<HashMap<String, Digest>> { let nidx_len = try!(rdr.read_u64::<BigEndian>()); let mut nidx = HashMap::new(); for _ in 0 .. nidx_len { let s_bytes = try!(read_delim_byte_slice(&mut rdr)); let mut d :Digest = [0; 32]; try!(rdr.read_exact(
random
[ { "content": "fn handle_blocking_task<ET :FnMut(ParallelTask), S :Read + Seek + Write>(task :BlockingTask,\n\n\t\tblob_store :&mut BlobStorage<S>, blobs_to_store :&mut HashSet<Digest>,\n\n\t\tmut emit_task :ET) {\n\n\tmatch task {\n\n\t\tBlockingTask::StoreCrateUndeduplicated(crate_file_name, crate_blob) => {\n\n\t\t\t// TODO\n\n\t\t},\n\n\t\tBlockingTask::StoreCrateContentBlobs(crate_file_name, ccb) => {\n\n\t\t\tlet CrateRecMetaWithBlobs { meta, blobs } = ccb.into_meta_with_blobs();\n\n\t\t\tfor entry in blobs {\n\n\t\t\t\tlet entry_digest = entry.0;\n\n\t\t\t\tif blobs_to_store.insert(entry_digest) {\n\n\t\t\t\t\temit_task(ParallelTask::CompressBlob(entry_digest, entry.1));\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\t// emit a blob for meta as well\n\n\t\t\tlet mut meta_blob = Vec::new();\n\n\t\t\tmeta.serialize(&mut meta_blob).unwrap();\n\n\t\t\tlet mut meta_blob_hctx = HashCtx::new();\n\n\t\t\tio::copy(&mut meta_blob.as_slice(), &mut meta_blob_hctx).unwrap();\n", "file_path": "all-crate-storage/blob_crate_storage.rs", "rank": 8, "score": 163820.5505365472 }, { "content": "pub fn get_digest_hex(digest :Digest) -> String {\n\n\thex::encode(&digest)\n\n}\n\n\n", "file_path": "all-crate-storage/hash_ctx.rs", "rank": 9, "score": 152592.50753908613 }, { "content": "pub fn digest_from_hex(digest :&str) -> Option<Digest> {\n\n\tmatch hex::decode(&digest) {\n\n\t\tOk(v) => {\n\n\t\t\tlet mut res = [0; 32];\n\n\t\t\tres.copy_from_slice(&v[..32]);\n\n\t\t\tSome(res)\n\n\t\t},\n\n\t\tErr(_) => None,\n\n\t}\n\n}\n\n\n\n/// SHA-256 hash context that impls Write\n\npub struct HashCtx(Context);\n\n\n\nimpl io::Write for HashCtx {\n\n\tfn write(&mut self, data: &[u8]) -> Result<usize, io::Error> {\n\n\t\tself.0.update(data);\n\n\t\tOk(data.len())\n\n\t}\n\n\tfn flush(&mut self) -> Result<(), io::Error> {\n", "file_path": "all-crate-storage/hash_ctx.rs", "rank": 10, "score": 149659.6898543175 }, { "content": "fn extract_path_from_gz<T :Read>(r :T,\n\n\t\tpath_ex :&str) -> Option<Vec<u8>> {\n\n\tlet decoded = GzDecoder::new(r);\n\n\tlet mut archive = Archive::new(decoded);\n\n\tfor entry in otry!(archive.entries()) {\n\n\t\tlet mut entry = otry!(entry);\n\n\t\tlet is_path_ex = if let Some(path) = otry!(entry.path()).to_str() {\n\n\t\t\tpath_ex == path\n\n\t\t} else {\n\n\t\t\tfalse\n\n\t\t};\n\n\t\tif is_path_ex {\n\n\t\t\t// Extract the file\n\n\t\t\tlet mut v = Vec::new();\n\n\t\t\totry!(entry.read_to_end(&mut v));\n\n\t\t\treturn Some(v);\n\n\t\t}\n\n\t}\n\n\treturn None;\n\n}\n", "file_path": "all-crate-storage/crate_storage.rs", "rank": 11, "score": 121429.64942359837 }, { "content": "fn buf_to_index_json(buf :&[u8]) -> io::Result<Vec<CrateIndexJson>> {\n\n\tlet mut r = Vec::new();\n\n\tfor l in buf.lines() {\n\n\t\tlet l = try!(l);\n\n\t\tr.push(try!(from_str(&l)));\n\n\t}\n\n\tOk(r)\n\n}\n\n\n\npub type AllCratesJson = Vec<(String, Vec<CrateIndexJson>)>;\n\n\n\n#[derive(Copy, Clone, Eq, PartialEq, Debug, Fail)]\n\npub enum RegistryErrorKind {\n\n\t#[fail(display = \"Opening Registry failed\")]\n\n\tRegOpen,\n\n\t#[fail(display = \"Index reading failed\")]\n\n\tIndexRepoReading,\n\n\t#[fail(display = \"Index JSON reading failed\")]\n\n\tIndexJsonReading,\n\n\t#[fail(display = \"Index JSON file not found\")]\n\n\tIndexJsonMissing,\n\n}\n\n\n\npub type RegistryError = Context<RegistryErrorKind>;\n\n\n", "file_path": "all-crate-storage/registry/registry.rs", "rank": 12, "score": 117828.38303321111 }, { "content": "fn write_css_color(fmt :&mut fmt::Formatter, c :Color) -> fmt::Result {\n\n\tif c.a != 0xFF {\n\n\t\ttry!(write!(fmt,\"#{:02x}{:02x}{:02x}{:02x}\", c.r, c.g, c.b, c.a));\n\n\t} else {\n\n\t\ttry!(write!(fmt,\"#{:02x}{:02x}{:02x}\", c.r, c.g, c.b));\n\n\t}\n\n\tOk(())\n\n}\n", "file_path": "cargo-local-serve/code_format.rs", "rank": 13, "score": 114344.5968173967 }, { "content": "fn versions(r: &mut Request) -> IronResult<Response> {\n\n\tlet path = r.url.path();\n\n\tlet name = path[0];\n\n\tlet mut resp = Response::new();\n\n\n\n\tlet refferer = r.headers.get::<Referer>()\n\n\t\t.map(|s| s.as_str().to_string());\n\n\n\n\tlet crate_data = registry_data::get_versions_data(name, &REGISTRY, refferer);\n\n\tresp.set_mut(Template::new(\"versions\", crate_data))\n\n\t\t.set_mut(status::Ok);\n\n\tOk(resp)\n\n}\n\n\n", "file_path": "cargo-local-serve/main.rs", "rank": 14, "score": 112430.49125897618 }, { "content": "fn krate(r: &mut Request) -> IronResult<Response> {\n\n\tlet path = r.url.path();\n\n\tlet name = path[0];\n\n\tlet opt_version = path.get(1).map(|v| *v);\n\n\tlet mut resp = Response::new();\n\n\tCRATE_SOURCE.with(|s| {\n\n\t\tlet data = registry_data::get_crate_data(name.to_string(),\n\n\t\t\t&REGISTRY, &mut *s.borrow_mut(), opt_version);\n\n\t\tmatch data {\n\n\t\t\tOk(d) => {\n\n\t\t\t\tresp.set_mut(Template::new(\"crate\", d))\n\n\t\t\t\t\t.set_mut(status::Ok);\n\n\t\t\t},\n\n\t\t\tErr(e) => {\n\n\t\t\t\tresp.set_mut(Template::new(\"error\", e.as_map()))\n\n\t\t\t\t\t.set_mut(status::Ok);\n\n\t\t\t},\n\n\t\t}\n\n\t});\n\n\tOk(resp)\n\n}\n\n\n", "file_path": "cargo-local-serve/main.rs", "rank": 15, "score": 112430.49125897618 }, { "content": "fn reverse_dependencies(r: &mut Request) -> IronResult<Response> {\n\n\tlet path = r.url.path();\n\n\tlet name = path[0];\n\n\tlet mut resp = Response::new();\n\n\n\n\tlet refferer = r.headers.get::<Referer>()\n\n\t\t.map(|s| s.as_str().to_string());\n\n\n\n\tlet only_latest_versions = false;\n\n\n\n\tlet crate_data = registry_data::get_reverse_dependencies(name,\n\n\t\tonly_latest_versions, &CRATE_STATS, refferer);\n\n\tresp.set_mut(Template::new(\"reverse_dependencies\", crate_data))\n\n\t\t.set_mut(status::Ok);\n\n\tOk(resp)\n\n}\n\n\n", "file_path": "cargo-local-serve/main.rs", "rank": 16, "score": 110068.45548219618 }, { "content": "fn get_digest_lists(blob_graph :&GraphOfBlobs) -> Vec<Vec<Digest>> {\n\n\tlet mut res = Vec::with_capacity(blob_graph.roots.len());\n\n\tlet graph = &blob_graph.graph;\n\n\tlet mut visited = HashSet::new();\n\n\tfor root in blob_graph.roots.iter() {\n\n\t\tlet mut mblob_digests = Vec::new();\n\n\t\tlet mut to_walk = Vec::new();\n\n\t\tto_walk.push(*root);\n\n\t\t// TODO perform in-order traversal\n\n\t\t// TODO also actually return a tree not a traversal.\n\n\t\t// traversals perform pretty crappily if we have a tree present :).\n\n\t\twhile let Some(n) = to_walk.pop() {\n\n\t\t\tif !visited.insert(n) {\n\n\t\t\t\tcontinue;\n\n\t\t\t}\n\n\t\t\tlet digest = graph.node_weight(n).unwrap();\n\n\t\t\tmblob_digests.push(*digest);\n\n\t\t\tlet n = graph.neighbors(n);\n\n\t\t\tfor neigh in n {\n\n\t\t\t\tto_walk.push(neigh);\n", "file_path": "all-crate-storage/blob_crate_storage.rs", "rank": 17, "score": 99322.62863449962 }, { "content": "pub fn get_crate_file_data<C :CrateSource>(st :&mut C,\n\n\tname :&str, version_str :&str, path :&[&str])\n\n\t\t-> CrateFileData {\n\n\tuse std::str;\n\n\tuse syntect_format::SyntectFormatter;\n\n\n\n\tlet mut data = Map::new();\n\n\n\n\t// First step: find the path to the crate.\n\n\tlet version = SvVersion::parse(version_str).unwrap();\n\n\tlet mut fh = match st.get_crate_handle_nv(name.to_owned(), version.clone()) {\n\n\t\tSome(f) => f,\n\n\t\tNone => panic!(\"Version {} of crate {} not mirrored\", version, name),\n\n\t};\n\n\tlet file_path_str = path.iter().fold(String::new(), |s, u| s + \"/\" + u);\n\n\n\n\tif file_path_str.len() <= 1 {\n\n\n\n\t\t#[derive(Serialize, Debug)]\n\n\t\tstruct FileEntry {\n", "file_path": "cargo-local-serve/registry_data.rs", "rank": 18, "score": 97138.47553718014 }, { "content": "fn csp_hdr(req :&mut Request, mut res :Response) -> IronResult<Response> {\n\n\tlet mut csp_header =\n\n\t\t\"default-src 'none'; \\\n\n\t\timg-src 'self'; \\\n\n\t\tform-action 'self'; \".to_owned();\n\n\n\n\tlet path = req.url.path();\n\n\tlet allow_inline_style = if let Some(z) = path.get(0) {\n\n\t\t// TODO find a way to avoid inline css in the syntect formatter\n\n\t\t// and then remove || z == &\"crate\".\n\n\t\t// https://github.com/trishume/syntect/issues/121\n\n\t\tif z == &\"static\" || z == &\"crate\" || z == &\"files\" {\n\n\t\t\t// Needed for inline CSS inside SVG\n\n\t\t\ttrue\n\n\t\t} else {\n\n\t\t\tfalse\n\n\t\t}\n\n\t} else {\n\n\t\tfalse\n\n\t};\n\n\tif allow_inline_style {\n\n\t\tcsp_header += \"style-src 'self' 'unsafe-inline';\";\n\n\t} else {\n\n\t\tcsp_header += \"style-src 'self';\";\n\n\t}\n\n\tres.headers.set(ContentSecurityPolicy(csp_header));\n\n\tOk(res)\n\n}\n\n\n", "file_path": "cargo-local-serve/main.rs", "rank": 19, "score": 95134.59720684438 }, { "content": "fn index(_: &mut Request) -> IronResult<Response> {\n\n\tlet mut resp = Response::new();\n\n\n\n\tlet crate_data = registry_data::get_index_data(&CRATE_STATS);\n\n\tresp.set_mut(Template::new(\"index\", crate_data))\n\n\t\t.set_mut(status::Ok);\n\n\tOk(resp)\n\n}\n\n\n", "file_path": "cargo-local-serve/main.rs", "rank": 20, "score": 94740.50829235523 }, { "content": "fn handle_parallel_task<ET :FnMut(BlockingTask)>(task :ParallelTask, mut emit_task :ET) {\n\n\tmatch task {\n\n\t\tParallelTask::ObtainCrateContentBlobs(crate_file_name, crate_archive_file, digest) => {\n\n\t\t\tmatch CrateContentBlobs::from_archive_file(&crate_archive_file[..]) {\n\n\t\t\t\tOk(ccb) => {\n\n\t\t\t\t\tif digest == ccb.digest_of_reconstructed() {\n\n\t\t\t\t\t\temit_task(BlockingTask::StoreCrateContentBlobs(crate_file_name, ccb));\n\n\t\t\t\t\t} else {\n\n\t\t\t\t\t\t// Digest mismatch\n\n\t\t\t\t\t\temit_task(BlockingTask::StoreCrateUndeduplicated(crate_file_name, crate_archive_file));\n\n\t\t\t\t\t}\n\n\t\t\t\t},\n\n\t\t\t\tErr(_) => {\n\n\t\t\t\t\t// Error during CrateContentBlobs creation... most likely invalid gz file or sth\n\n\t\t\t\t\temit_task(BlockingTask::StoreCrateUndeduplicated(crate_file_name, crate_archive_file));\n\n\t\t\t\t},\n\n\t\t\t};\n\n\t\t},\n\n\t\tParallelTask::CompressBlob(d, blob) => {\n\n\t\t\tlet mut gz_enc = GzBuilder::new().read(blob.as_slice(), Compression::best());\n", "file_path": "all-crate-storage/blob_crate_storage.rs", "rank": 21, "score": 94038.90188455634 }, { "content": "fn search(req :&mut Request) -> IronResult<Response> {\n\n\tlet mut resp = Response::new();\n\n\n\n\tlet hmap = req.get_ref::<UrlEncodedQuery>().unwrap();\n\n\tlet (crate_data, maybe_only_one) = registry_data::get_search_result_data(&CRATE_STATS, hmap);\n\n\tif let Some(only_crate_name) = maybe_only_one {\n\n\t\tresp.headers.set(Location(format!(\"/crate/{}\", only_crate_name)));\n\n\t\tresp.set_mut(status::Found);\n\n\t} else {\n\n\t\tresp.set_mut(Template::new(\"search\", crate_data))\n\n\t\t\t.set_mut(status::Ok);\n\n\t}\n\n\tOk(resp)\n\n}\n\n\n", "file_path": "cargo-local-serve/main.rs", "rank": 22, "score": 92378.47251557522 }, { "content": "fn crate_files(req :&mut Request) -> IronResult<Response> {\n\n\tuse self::registry_data::CrateFileData::*;\n\n\n\n\tlet path = req.url.path();\n\n\tlet name = path[0];\n\n\tlet version = path[1];\n\n\tlet mut resp = Response::new();\n\n\n\n\tCRATE_SOURCE.with(|s| {\n\n\t\tlet crate_file_data = registry_data::get_crate_file_data(\n\n\t\t\t&mut *s.borrow_mut(), name, version, &path[2..]);\n\n\t\tlet template = match crate_file_data {\n\n\t\t\tFileListing(data) => Template::new(\"file-listing\", data),\n\n\t\t\tFileContent(data) => Template::new(\"file-content\", data),\n\n\t\t};\n\n\t\tresp.set_mut(template)\n\n\t\t\t.set_mut(status::Ok);\n\n\t});\n\n\tOk(resp)\n\n}\n\n\n", "file_path": "cargo-local-serve/main.rs", "rank": 23, "score": 90180.03742491954 }, { "content": "fn api_crate(req :&mut Request) -> IronResult<Response> {\n\n\n\n\tprintln!(\"{:?}\", req.url.path());\n\n\tlet path = req.url.path();\n\n\tlet name = path[0];\n\n\tlet version = path[1];\n\n\tlet mut resp = Response::new();\n\n\n\n\tlet sv_version = SvVersion::parse(version).unwrap();\n\n\tlet crate_spec = CrateSpec {\n\n\t\tname : name.to_string(),\n\n\t\tversion : sv_version,\n\n\t};\n\n\tCRATE_SOURCE.with(|s| {\n\n\t\tlet s = &mut *s.borrow_mut();\n\n\t\tlet crate_opt = s.get_crate(&crate_spec);\n\n\t\tif let Some(crate_data) = crate_opt {\n\n\t\t\tuse all_crate_storage::reconstruction::CrateContentBlobs;\n\n\t\t\tlet ccb = CrateContentBlobs::from_archive_file(&crate_data as &[u8]).unwrap();\n\n\t\t\tlet crate_data = ccb.to_archive_file();\n\n\t\t\tresp.set_mut(crate_data)\n\n\t\t\t\t.set_mut(status::Ok);\n\n\t\t} else {\n\n\t\t\tresp.set_mut(status::NotFound);\n\n\t\t}\n\n\t});\n\n\tOk(resp)\n\n}\n\n\n", "file_path": "cargo-local-serve/main.rs", "rank": 24, "score": 90180.03742491954 }, { "content": "pub fn obtain_crate_name_path(name :&str) -> String {\n\n\tmatch name.len() {\n\n\t\t1 => format!(\"1/{}\", name),\n\n\t\t2 => format!(\"2/{}\", name),\n\n\t\t3 => format!(\"3/{}/{}\", &name[..1], name),\n\n\t\t_ => format!(\"{}/{}/{}\", &name[..2], &name[2..4], name),\n\n\t}\n\n}\n\n\n", "file_path": "all-crate-storage/registry/registry.rs", "rank": 25, "score": 89492.54107687942 }, { "content": "pub fn compute_crate_statistics(acj :&AllCratesJson) -> CrateStats {\n\n\tlet mut names_interner = StringInterner::new();\n\n\n\n\tlet mut latest_crate_versions = HashMap::new();\n\n\tfor &(ref name, ref cjv) in acj.iter() {\n\n\t\tlet name_i = names_interner.get_or_intern(name.clone());\n\n\t\tif let Some(newest_krate) = cjv.iter().max_by_key(|krate| &krate.version) {\n\n\t\t\tlatest_crate_versions.insert(name_i, newest_krate.version.clone());\n\n\t\t}\n\n\t}\n\n\n\n\tlet mut revd = HashMap::new();\n\n\tlet mut ddon = HashMap::<CrateName, HashSet<CrateName>>::new();\n\n\tfor &(ref name, ref cjv) in acj.iter() {\n\n\t\tlet name_i = names_interner.get_or_intern(name.clone());\n\n\t\tlet latest_version = latest_crate_versions.get(&name_i).unwrap();\n\n\t\tfor krate in cjv.iter() {\n\n\t\t\tfor dep in krate.dependencies.iter() {\n\n\t\t\t\tlet dname_i = names_interner.get_or_intern(dep.name.clone());\n\n\t\t\t\tlet e = revd.entry(dname_i).or_insert(HashMap::new());\n", "file_path": "all-crate-storage/registry/statistics.rs", "rank": 26, "score": 87540.95621596169 }, { "content": "pub fn get_crate_data<C :CrateSource>(name :String, reg :&Registry, st :&mut C,\n\n\t\tversion :Option<&str>) -> Result<Map<String, Value>, StrErr> {\n\n\n\n\tlet mut data = Map::new();\n\n\n\n\t// First step: find the path to the crate.\n\n\tlet crate_json = reg.get_crate_json(&name)\n\n\t\t.map_err(|_| format!(\"Couldn't get crate json for crate '{}'\", name))?;\n\n\tlet version = if let Some(v) = version {\n\n\t\tSvVersion::parse(v).unwrap()\n\n\t} else {\n\n\t\t// Finds the latest version\n\n\t\tcrate_json.iter()\n\n\t\t\t.map(|v| &v.version)\n\n\t\t\t.max()\n\n\t\t\t.ok_or_else(|| format!(\"No version present of crate '{}'\", name))?\n\n\t\t\t.clone()\n\n\t};\n\n\n\n\tlet dtls = get_crate_details(&name, version.clone(), st);\n", "file_path": "cargo-local-serve/registry_data.rs", "rank": 27, "score": 84633.36873349233 }, { "content": "pub fn get_search_result_data(stats :&CrateStats, query_map :&QueryMap)\n\n\t\t-> (Map<String, Value>, Option<String>) {\n\n\n\n\tlet search_term = (&query_map[\"q\"][0]).clone(); // TODO add error handling\n\n\n\n\tlet results = stats.crate_names_interner.iter_values()\n\n\t\t.filter(|s| s.contains(&search_term))\n\n\t\t.map(|s| SearchResult { name : s.to_owned() })\n\n\t\t.collect::<Vec<_>>();\n\n\n\n\t#[derive(Serialize, Debug)]\n\n\tstruct SearchResult {\n\n\t\tname :String,\n\n\t}\n\n\n\n\t#[derive(Serialize, Debug)]\n\n\tstruct SearchResults {\n\n\t\tsearch_term :String,\n\n\t\tresults :Vec<SearchResult>,\n\n\t\tresults_length :usize,\n", "file_path": "cargo-local-serve/registry_data.rs", "rank": 28, "score": 81641.94838747923 }, { "content": "pub trait CrateStorage {\n\n\tfn store_parallel_iter<I :Iterator<Item = (CrateSpec, Vec<u8>, Digest)>>(\n\n\t\t\t&mut self, thread_count :u16, crate_iter :I);\n\n\n\n\tfn fill_crate_storage_from_source<S :CrateSource>(&mut self,\n\n\t\t\tthread_count :u16, acj :&AllCratesJson, source :&mut S,\n\n\t\t\tprogress_callback :fn(&str, &CrateIndexJson)) {\n\n\t\t// Iterators are cool they told me.\n\n\t\t// Iterators are idiomatic they told me.\n\n\t\t// THEN WHY THE FUCK DO I NEED THIS REFCELL CRAP?!?!?!\n\n\t\t// https://stackoverflow.com/a/28521985\n\n\t\tlet source_cell = RefCell::new(source);\n\n\t\tlet crate_iter = acj.iter()\n\n\t\t\t.flat_map(|&(ref name, ref versions)| {\n\n\t\t\t\tlet name = name.clone();\n\n\t\t\t\tlet source_cell = &source_cell;\n\n\t\t\t\tversions.iter().filter_map(move |v| {\n\n\t\t\t\t\tlet name = name.clone();\n\n\t\t\t\t\tlet mut source = source_cell.borrow_mut();\n\n\t\t\t\t\tprogress_callback(&name, &v);\n", "file_path": "all-crate-storage/crate_storage.rs", "rank": 29, "score": 79904.3589932347 }, { "content": "pub fn get_reverse_dependencies(name :&str,\n\n\t\tonly_latest_versions :bool,\n\n\t\tstats :&CrateStats, refferer :Option<String>) -> Map<String, Value> {\n\n\n\n\t#[derive(Serialize, Debug)]\n\n\tstruct RevDep {\n\n\t\tname :String,\n\n\t\treq :VersionReq,\n\n\t\tversion :SvVersion,\n\n\t}\n\n\n\n\t#[derive(Serialize, Debug)]\n\n\tstruct RevDependencies {\n\n\t\tname :String,\n\n\t\trefferer :Option<String>,\n\n\t\trev_d_len :usize,\n\n\t\trev_d :Vec<RevDep>,\n\n\t}\n\n\n\n\tlet mut data = Map::new();\n", "file_path": "cargo-local-serve/registry_data.rs", "rank": 30, "score": 75527.50628750687 }, { "content": "/// Renders a given markdown string to sanitized HTML\n\n/// with formatted code blocks.\n\npub fn render_markdown(markdown :&str) -> String {\n\n\tlet p = Parser::new(&markdown);\n\n\tlet ev_it = EventIter::new(p);\n\n\tlet mut unsafe_html = String::new();\n\n\thtml::push_html(&mut unsafe_html, ev_it);\n\n\tlet safe_html = AMMONIA_BUILDER.clean(&unsafe_html).to_string();\n\n\tsafe_html\n\n}\n", "file_path": "cargo-local-serve/markdown_render.rs", "rank": 31, "score": 73395.6867900659 }, { "content": "pub trait CrateSource :Sized {\n\n\ttype CrateHandle :CrateFileHandle<Self>;\n\n\tfn get_crate_handle_nv(&mut self,\n\n\t\t\tname :String, version :Version) -> Option<CrateHandle<Self, Self::CrateHandle>>;\n\n\t// TODO maybe use CrateSpec here?\n\n\tfn get_crate_nv(&mut self, name :String, version :Version) -> Option<Vec<u8>> {\n\n\t\tself.get_crate(&CrateSpec {\n\n\t\t\tname,\n\n\t\t\tversion,\n\n\t\t})\n\n\t}\n\n\tfn get_crate(&mut self, spec :&CrateSpec) -> Option<Vec<u8>>;\n\n}\n\n\n", "file_path": "all-crate-storage/crate_storage.rs", "rank": 32, "score": 72651.75768029118 }, { "content": "#[allow(dead_code)]\n\npub fn winapi_crate_data() -> Map<String, Value> {\n\n\tlet mut data = Map::new();\n\n\n\n\tlet krate = Crate {\n\n\t\tname : \"winapi\".to_string(),\n\n\t\tversion : \"0.2.8\".to_string(),\n\n\t\tde : Some(CrateDetails {\n\n\t\t\thomepage : None,\n\n\t\t\trepository : Some(\"https://github.com/retep998/winapi-rs\".to_string()),\n\n\t\t\tdescription : \"Types and constants for WinAPI bindings. See README for list of crates providing function bindings.\".to_string(),\n\n\n\n\t\t\treadme_html : None,\n\n\t\t\tvcs_commit : None,\n\n\t\t\tauthors : vec![\n\n\t\t\t\tAuthor {\n\n\t\t\t\t\tname : \"Peter Atashian\".to_string(),\n\n\t\t\t\t\temail : Some(\"retep998@gmail.com\".to_string()),\n\n\t\t\t\t},\n\n\t\t\t],\n\n\t\t\tlicense : \"MIT\".to_string(),\n", "file_path": "cargo-local-serve/registry_data.rs", "rank": 33, "score": 71933.97083931442 }, { "content": "#[test]\n\nfn store_and_load() {\n\n\tlet mut c = Cursor::new(Vec::new());\n\n\tlet test_data = [\n\n\t\t([1u8; 32], &vec![1,2,3,4,5,6,7,8]),\n\n\t\t([2; 32], &vec![7,8,9,1,1,1,9,8,7]),\n\n\t\t([3; 32], &vec![8,3,8,3,8,3,8,3,8,3,8,3,8,3,8,3,8,3]),\n\n\t];\n\n\t{\n\n\t\tlet mut st = BlobStorage::empty(&mut c);\n\n\t\tfor &(d, ref s) in test_data.iter() {\n\n\t\t\tst.insert(d, s).unwrap();\n\n\t\t}\n\n\t\tst.write_header_and_index().unwrap();\n\n\t}\n\n\tprintln!(\"c {:?}\", c);\n\n\t{\n\n\t\tlet mut st = BlobStorage::load(&mut c).unwrap();\n\n\t\tfor &(ref d, s) in test_data.iter() {\n\n\t\t\tassert_eq!(st.get(d).unwrap().as_ref(), Some(s));\n\n\t\t}\n\n\t}\n\n}\n", "file_path": "all-crate-storage/blob_storage_test.rs", "rank": 34, "score": 70608.47196327941 }, { "content": "pub trait CrateFileHandle<S :CrateSource> {\n\n\tfn get_file_list(&self, source :&mut S) -> Vec<String>;\n\n\tfn get_file(&self, source :&mut S, path :&str) -> Option<Vec<u8>>;\n\n}\n\n\n\nimpl<S :CrateSource> CrateFileHandle<S> for Box<dyn CrateFileHandle<S>> {\n\n\tfn get_file_list(&self, source :&mut S) -> Vec<String> {\n\n\t\t<Box<_> as Deref>::deref(self).get_file_list(source)\n\n\t}\n\n\tfn get_file(&self, source :&mut S, path :&str) -> Option<Vec<u8>> {\n\n\t\t<Box<_> as Deref>::deref(self).get_file(source, path)\n\n\t}\n\n}\n\n\n", "file_path": "all-crate-storage/crate_storage.rs", "rank": 35, "score": 66519.90544288911 }, { "content": "pub fn get_index_data(stats :&CrateStats) -> Map<String, Value> {\n\n\n\n\t#[derive(Serialize, Debug)]\n\n\tstruct CrateWithCount {\n\n\t\tname :String,\n\n\t\tcount :usize,\n\n\t}\n\n\n\n\t#[derive(Serialize, Debug)]\n\n\tstruct Index {\n\n\t\tdirect_rev_deps :Vec<CrateWithCount>,\n\n\t\ttransitive_rev_deps :Vec<CrateWithCount>,\n\n\t\tmost_versions :Vec<CrateWithCount>,\n\n\t}\n\n\n\n\tlet mut data = Map::new();\n\n\n\n\tlet transitive_rev_deps = vec![]; // TODO populate\n\n\n\n\tlet ddon = &stats.most_directly_depended_on;\n", "file_path": "cargo-local-serve/registry_data.rs", "rank": 36, "score": 64561.00152693392 }, { "content": "pub fn get_versions_data(name :&str, reg :&Registry, refferer :Option<String>)\n\n\t\t-> Map<String, Value> {\n\n\n\n\t#[derive(Serialize, Debug)]\n\n\tstruct Versions {\n\n\t\tname :String,\n\n\t\trefferer :Option<String>,\n\n\t\tversions_length :usize,\n\n\t\tversions :Vec<Version>,\n\n\t}\n\n\n\n\tlet mut data = Map::new();\n\n\n\n\tlet crate_json = reg.get_crate_json(&name).unwrap();\n\n\n\n\tlet version_list = crate_json.iter()\n\n\t\t.map(|jl| Version {\n\n\t\t\tv : format!(\"{}\", jl.version),\n\n\t\t\tdate : None,\n\n\t\t})\n", "file_path": "cargo-local-serve/registry_data.rs", "rank": 37, "score": 60621.11158394345 }, { "content": "fn run(tx :SyncSender<(usize, usize, String)>, acj :&AllCratesJson,\n\n\t\ttotal_file_count :usize, t :usize, tc :usize,\n\n\t\tgrepper :&RegexMatcher, storage_base :&Path) {\n\n\tlet mut ctr = 0;\n\n\n\n\tmacro_rules! pln {\n\n\t\t($($v:expr),*) => {\n\n\t\t\ttx.send((ctr, total_file_count, format!($($v),*))).unwrap();\n\n\t\t}\n\n\t}\n\n\n\n\tlet mut crate_source = FileTreeStorage::new(storage_base);\n\n\n\n\tfor &(ref name, ref versions) in acj.iter() {\n\n\n\n\t\tfor ref v in versions.iter() {\n\n\t\t\tctr += 1;\n\n\t\t\t/*if ctr != 21899 {\n\n\t\t\t\tcontinue;\n\n\t\t\t}*/\n", "file_path": "all-crate-grep/main.rs", "rank": 38, "score": 59745.37077085844 }, { "content": "fn normal_dep_kind() -> DependencyKind {\n\n\tDependencyKind::Normal\n\n}\n\n\n\n// TODO tests for dependency kind set to null or non existent.\n\n\n\n#[derive(Deserialize, Clone)]\n\npub struct CrateDepJson {\n\n\tpub name :String,\n\n\tpub features :Vec<String>,\n\n\tpub default_features :bool,\n\n\tpub target :Option<String>,\n\n\tpub req :VersionReq,\n\n\tpub optional :bool,\n\n\t// We need to set a default as kind may not always be != null,\n\n\t// or it may not be existent.\n\n\t// https://github.com/rust-lang/crates.io/issues/1168\n\n\t#[serde(default = \"normal_dep_kind\", deserialize_with = \"nullable_dep_kind\")]\n\n\tpub kind :DependencyKind,\n\n}\n", "file_path": "all-crate-storage/registry/registry.rs", "rank": 39, "score": 58220.01553143891 }, { "content": "pub fn highlight_string_snippet(s :&str, syntax :&SyntaxReference, theme :&Theme, syns :&SyntaxSet)\n\n\t\t-> String {\n\n\tlet mut output = String::new();\n\n\tlet mut highlighter = HighlightLines::new(syntax, theme);\n\n\tlet c = theme.settings.background.unwrap_or(Color::WHITE);\n\n\twrite!(output,\n\n\t\t\"<pre style=\\\"background-color:#{:02x}{:02x}{:02x};\\\">\\n\",\n\n\t\tc.r,\n\n\t\tc.g,\n\n\t\tc.b).unwrap();\n\n\tlet mut spcx = StyledPrintCx::new(IncludeBackground::IfDifferent(c));\n\n\tfor line in s.lines() {\n\n\t\tlet regions = highlighter.highlight(line, syns);\n\n\t\tspcx.styles_to_coloured_html(&mut output, &regions[..]);\n\n\t\toutput.push('\\n');\n\n\t}\n\n\tspcx.finish(&mut output);\n\n\toutput.push_str(\"</pre>\\n\");\n\n\toutput\n\n}\n\n\n", "file_path": "cargo-local-serve/code_format.rs", "rank": 40, "score": 57365.02483173301 }, { "content": "fn get_repo_head_tree<'a>(repo :&'a Repository)\n\n\t\t-> Result<git2::Tree<'a>, git2::Error> {\n\n\tlet head_id = try!(repo.refname_to_id(\"refs/remotes/origin/master\"));\n\n\tlet head_commit = try!(repo.find_commit(head_id));\n\n\tlet head_tree = try!(head_commit.tree());\n\n\tOk(head_tree)\n\n}\n\n\n\nimpl Registry {\n\n\tpub fn from_name(name :&str) -> Result<Self, env::VarError> {\n\n\t\t// The name is the name + hash pair.\n\n\t\t// For crates.io it is \"github.com-1ecc6299db9ec823\"\n\n\t\tlet home = try!(env::var(\"HOME\"));\n\n\t\tlet base_path = Path::new(&home).join(\".cargo/registry/\");\n\n\t\tlet cache_path = base_path.join(\"cache\").join(name);\n\n\t\t//let cache_path = env::current_dir().unwrap().join(\"crate-archives\");\n\n\t\tlet index_path = base_path.join(\"index\").join(name);\n\n\t\tOk(Registry {\n\n\t\t\tcache_path,\n\n\t\t\tindex_path,\n", "file_path": "all-crate-storage/registry/registry.rs", "rank": 41, "score": 53536.8956027977 }, { "content": "// This construct with AppConfig and AppConfigOpt\n\n// is needed due to\n\n// https://github.com/serde-rs/serde/issues/368\n\nstruct AppConfig {\n\n\tsite_dir :Option<String>,\n\n\tlisten_host :String,\n\n\tlisten_port :u32,\n\n\tsource :CrateSourceCfg,\n\n}\n\n\n\nimpl AppConfig {\n\n\tpub fn from_opt(o :AppConfigOpt) -> Self {\n\n\t\tAppConfig {\n\n\t\t\tsite_dir : o.site_dir,\n\n\t\t\tlisten_host : o.listen_host.unwrap_or(\"localhost\".to_owned()),\n\n\t\t\tlisten_port : o.listen_port.unwrap_or(3000),\n\n\t\t\tsource : o.source.unwrap_or(CrateSourceCfg::Cache),\n\n\t\t}\n\n\t}\n\n}\n\n\n", "file_path": "cargo-local-serve/main.rs", "rank": 42, "score": 51793.576012085374 }, { "content": "#[derive(Serialize, Debug)]\n\nstruct CrateDetails {\n\n\thomepage :Option<String>,\n\n\trepository :Option<String>,\n\n\tdescription :String,\n\n\treadme_html :Option<String>,\n\n\tvcs_commit :Option<String>,\n\n\tauthors :Vec<Author>,\n\n\tlicense :String,\n\n}\n\n\n", "file_path": "cargo-local-serve/registry_data.rs", "rank": 43, "score": 50688.75973566138 }, { "content": "#[derive(Deserialize, Debug)]\n\nstruct AppConfigOpt {\n\n\tsite_dir :Option<String>,\n\n\tlisten_host :Option<String>,\n\n\tlisten_port :Option<u32>,\n\n\tsource :Option<CrateSourceCfg>,\n\n}\n\n\n", "file_path": "cargo-local-serve/main.rs", "rank": 44, "score": 50688.75973566138 }, { "content": "struct StyledPrintCx {\n\n\tbackground :IncludeBackground,\n\n\tprev_style :Option<Style>,\n\n}\n\n\n\nimpl StyledPrintCx {\n\n\tfn new(bg :IncludeBackground) -> Self {\n\n\t\tStyledPrintCx {\n\n\t\t\tbackground : bg,\n\n\t\t\tprev_style : None,\n\n\t\t}\n\n\t}\n\n\tfn styles_to_coloured_html(&mut self, s :&mut String,\n\n\t\t\tv :&[(Style, &str)]) {\n\n\t\tfor &(ref style, text) in v.iter() {\n\n\t\t\tlet keep_style = if let Some(ref ps) = self.prev_style {\n\n\t\t\t\tstyle == ps ||\n\n\t\t\t\t\t(style.background == ps.background && text.trim().is_empty())\n\n\t\t\t} else {\n\n\t\t\t\tfalse\n", "file_path": "cargo-local-serve/code_format.rs", "rank": 45, "score": 49672.77068786188 }, { "content": "struct EventIter<'a> {\n\n\tp :Parser<'a>,\n\n}\n\n\n\nimpl<'a> EventIter<'a> {\n\n\tpub fn new(p :Parser<'a>) -> Self {\n\n\t\tEventIter {\n\n\t\t\tp,\n\n\t\t}\n\n\t}\n\n}\n\n\n\nlazy_static! {\n\n\tstatic ref AMMONIA_BUILDER :Builder<'static> = construct_ammonia_builder();\n\n}\n\n\n\nimpl<'a> Iterator for EventIter<'a> {\n\n\ttype Item = Event<'a>;\n\n\n\n\tfn next(&mut self) -> Option<Self::Item> {\n", "file_path": "cargo-local-serve/markdown_render.rs", "rank": 46, "score": 48676.39665907217 }, { "content": "// custom function needed due to https://github.com/serde-rs/serde/issues/1098\n\n// as default + rename_all = \"lowercase\" does not cover the kind: null case :/\n\nfn nullable_dep_kind<'de, D :Deserializer<'de>>(deserializer :D)\n\n\t\t-> Result<DependencyKind, D::Error> {\n\n\tlet opt = try!(Option::deserialize(deserializer));\n\n\tOk(opt.unwrap_or(DependencyKind::Normal))\n\n}\n\n\n", "file_path": "all-crate-storage/registry/registry.rs", "rank": 47, "score": 47989.67034470904 }, { "content": "type CrateName = usize;\n\n\n\npub struct CrateStats {\n\n\tpub crate_names_interner :StringInterner<CrateName>,\n\n\t/// Mapping a crate to its latest version\n\n\tpub latest_crate_versions :HashMap<CrateName, Version>,\n\n\t/// Mapping a crate to its reverse dependencies\n\n\tpub reverse_dependencies :HashMap<CrateName, HashMap<VersionReq, HashSet<(CrateName, Version)>>>,\n\n\t/// The list of crates ordered by the number of crates directly depending on them.\n\n\t///\n\n\t/// The algorithm doesn't count any reverse dependency where only\n\n\t/// a past version depended on a crate, but not the latest one.\n\n\tpub most_directly_depended_on :Vec<(CrateName, usize)>,\n\n\t/// The list of crates ordered by the number of versions they have.\n\n\tpub most_versions: Vec<(CrateName, usize)>,\n\n}\n\n\n", "file_path": "all-crate-storage/registry/statistics.rs", "rank": 48, "score": 46633.51314796688 }, { "content": "fn main() {\n\n\tprintln!(\"Loading all crates json...\");\n\n\tlet registry = Registry::from_name(\"github.com-1ecc6299db9ec823\").unwrap();\n\n\tlet acj :AllCratesJson = registry.get_all_crates_json().unwrap();\n\n\tlet total_file_count :usize = acj.iter().map(|&(_, ref v)| v.len()).sum();\n\n\n\n\tprintln!(\"The target is {} files.\", total_file_count);\n\n\tlet storage_base = env::current_dir().unwrap().join(\"crate-archives\");\n\n\tprintln!(\"Using directory {} to load the files from.\",\n\n\t\tstorage_base.to_str().unwrap());\n\n\n\n\tlet needle = env::args().nth(1).expect(\"expected search term\");\n\n\tprintln!(\"Search term '{}'\", needle);\n\n\tlet grepper = RegexMatcher::new_line_matcher(&needle).unwrap();\n\n\n\n\tlet (tx, rx) = sync_channel(10);\n\n\n\n\tlet thread_count = 8;\n\n\tfor v in 0..thread_count {\n\n\t\tlet tx = tx.clone();\n", "file_path": "all-crate-grep/main.rs", "rank": 49, "score": 45379.91824025751 }, { "content": "fn main() {\n\n\tenv_logger::init();\n\n\n\n\tlet cfg_opt :AppConfigOpt = match File::open(\"config.toml\") {\n\n\t\tOk(mut f) => {\n\n\t\t\tlet mut s = String::new();\n\n\t\t\tf.read_to_string(&mut s).unwrap();\n\n\t\t\ttoml::from_str(&s).unwrap()\n\n\t\t},\n\n\t\tErr(_) => {\n\n\t\t\ttoml::from_str(\"\").unwrap()\n\n\t\t},\n\n\t};\n\n\t//println!(\"Config: {:?}\", cfg_opt);\n\n\tlet cfg = AppConfig::from_opt(cfg_opt);\n\n\t//println!(\"Config: {:?}\", cfg);\n\n\n\n\tlet mut hbse = HandlebarsEngine::new();\n\n\n\n\n", "file_path": "cargo-local-serve/main.rs", "rank": 50, "score": 44112.94880297116 }, { "content": "struct FallbackHandler(Box<dyn Handler>);\n\n\n\nimpl Handler for FallbackHandler {\n\n\tfn handle(&self, req: &mut Request) -> IronResult<Response> {\n\n\t\tlet resp = self.0.handle(req);\n\n\n\n\t\tmatch resp {\n\n\t\t\tErr(err) => {\n\n\t\t\t\tmatch err.response.status {\n\n\t\t\t\t\tSome(status) => {\n\n\t\t\t\t\t\tlet mut m = Map::new();\n\n\t\t\t\t\t\tm.insert(\"error\".to_string(), Value::from(format!(\"{}\", status)));\n\n\t\t\t\t\t\tOk(Response::with((status,\n\n\t\t\t\t\t\t\tTemplate::new(\"error\", m))))\n\n\t\t\t\t\t}\n\n\t\t\t\t\t_ => Err(err),\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tother => other\n\n\t\t}\n", "file_path": "cargo-local-serve/main.rs", "rank": 51, "score": 43523.544668228205 }, { "content": "#[test]\n\nfn test_author_generation() {\n\n\tassert_eq!(Author::from_str(\"Hello World <hello@hello.example>\"),\n\n\t\tAuthor{\n\n\t\t\tname : \"Hello World\".to_string(),\n\n\t\t\temail : Some(\"hello@hello.example\".to_string()),\n\n\t\t});\n\n\tassert_eq!(Author::from_str(\"Hello World\"),\n\n\t\tAuthor{\n\n\t\t\tname : \"Hello World\".to_string(),\n\n\t\t\temail : None,\n\n\t\t});\n\n}\n\n\n\n#[derive(Serialize, Debug)]\n\npub struct Version {\n\n\tv :String,\n\n\tdate :Option<String>,\n\n}\n\n\n", "file_path": "cargo-local-serve/registry_data.rs", "rank": 52, "score": 40923.22654765901 }, { "content": "struct SpanBegin<'a>(&'a Style, &'a IncludeBackground);\n\n\n\nimpl<'a> fmt::Display for SpanBegin<'a> {\n\n\tfn fmt(&self, fmt :&mut fmt::Formatter) -> fmt::Result {\n\n\t\tlet style = self.0;\n\n\t\tlet bg = self.1;\n\n\n\n\t\ttry!(write!(fmt, \"<span style=\\\"\"));\n\n\t\tlet include_bg = match bg {\n\n\t\t\t&IncludeBackground::Yes => true,\n\n\t\t\t&IncludeBackground::No => false,\n\n\t\t\t&IncludeBackground::IfDifferent(c) => (style.background != c),\n\n\t\t};\n\n\t\tif include_bg {\n\n\t\t\ttry!(write!(fmt, \"background-color:\"));\n\n\t\t\ttry!(write_css_color(fmt, style.background));\n\n\t\t\ttry!(write!(fmt, \";\"));\n\n\t\t}\n\n\t\tif style.font_style.contains(FontStyle::UNDERLINE) {\n\n\t\t\ttry!(write!(fmt, \"text-decoration:underline;\"));\n", "file_path": "cargo-local-serve/code_format.rs", "rank": 53, "score": 40226.01136762409 }, { "content": "fn construct_ammonia_builder() -> Builder<'static> {\n\n\tuse std::iter;\n\n\tlet mut r = Builder::default();\n\n\t// TODO: filter out everything that can have scr attributes.\n\n\t// TODO: maybe replace all img's with their alt text?\n\n\tr.rm_tags(iter::once(\"img\"));\n\n\t// TODO: do filtering of inline CSS\n\n\t// (or even better: output classes instead of inline css)\n\n\tr.add_tag_attributes(\"span\", iter::once(\"style\"));\n\n\tr\n\n}\n\n\n", "file_path": "cargo-local-serve/markdown_render.rs", "rank": 54, "score": 36600.33483982509 }, { "content": "\t\t\tOk(f) => f,\n\n\t\t\tErr(_) => {\n\n\t\t\t\treturn None;\n\n\t\t\t},\n\n\t\t};\n\n\t\tlet mut file_buf = Vec::new();\n\n\t\tio::copy(&mut f, &mut file_buf).unwrap();\n\n\t\tSome(file_buf)\n\n\t}\n\n}\n\n\n\npub struct CacheStorage {\n\n\tstorage_base :PathBuf,\n\n}\n\n\n\nimpl CacheStorage {\n\n\tpub fn new(storage_base :&Path) -> Self {\n\n\t\tCacheStorage {\n\n\t\t\tstorage_base : storage_base.to_path_buf(),\n\n\t\t}\n", "file_path": "all-crate-storage/crate_storage.rs", "rank": 61, "score": 31394.79583948045 }, { "content": "pub struct BlobCrateHandle {\n\n\tcontent :Vec<u8>,\n\n}\n\n\n\nimpl BlobCrateHandle {\n\n\tpub fn new(content :Vec<u8>) -> Self {\n\n\t\tBlobCrateHandle {\n\n\t\t\tcontent\n\n\t\t}\n\n\t}\n\n\tpub fn map_all_files<F :FnMut(Option<String>, Option<Vec<u8>>)>(&self, mut f :F) {\n\n\t\tlet r = self.content.as_slice();\n\n\t\tlet decoded = GzDecoder::new(r);\n\n\t\tlet mut archive = Archive::new(decoded);\n\n\t\tfor entry in archive.entries().unwrap() {\n\n\t\t\tlet mut entry = if let Ok(entry) = entry {\n\n\t\t\t\tentry\n\n\t\t\t} else {\n\n\t\t\t\tcontinue;\n\n\t\t\t};\n", "file_path": "all-crate-storage/crate_storage.rs", "rank": 62, "score": 31393.08321249546 }, { "content": "use semver::Version;\n\nuse super::hash_ctx::{HashCtx, Digest};\n\nuse super::registry::registry::{CrateIndexJson, AllCratesJson};\n\nuse super::blob_crate_storage::{BlobCrateStorage, StorageFileHandle};\n\nuse flate2::read::GzDecoder;\n\nuse tar::Archive;\n\nuse std::path::{Path, PathBuf};\n\nuse std::cell::RefCell;\n\nuse std::fs::File;\n\nuse std::io::{self, Read, Seek};\n\nuse std::ops::Deref;\n\nuse registry::registry::obtain_crate_name_path;\n\n\n\n#[derive(Clone, PartialEq, Eq)]\n\npub struct CrateSpec {\n\n\tpub name :String,\n\n\tpub version :Version,\n\n}\n\n\n\nimpl CrateSpec {\n", "file_path": "all-crate-storage/crate_storage.rs", "rank": 63, "score": 31392.1943585108 }, { "content": "\tfn storage(&self) -> Option<&StorageFileHandle> {\n\n\t\tmatch self {\n\n\t\t\t&DynCrateHandle::StorageFileHandle(ref h) => Some(h),\n\n\t\t\t_ => None,\n\n\t\t}\n\n\t}\n\n\tfn overlay(&self) -> Option<&OverlayCrateHandle<DynCrateSource<S>, DynCrateSource<S>>> {\n\n\t\tmatch self {\n\n\t\t\t&DynCrateHandle::OverlayCrateHandle(ref h) => Some(h),\n\n\t\t\t_ => None,\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl<S :Read + Seek> CrateSource for DynCrateSource<S> {\n\n\ttype CrateHandle = DynCrateHandle<S>;\n\n\tfn get_crate_handle_nv(&mut self,\n\n\t\t\tname :String, version :Version) -> Option<CrateHandle<Self, Self::CrateHandle>> {\n\n\t\tlet ch = match self {\n\n\t\t\t&mut DynCrateSource::FileTreeStorage(ref mut s) => {\n", "file_path": "all-crate-storage/crate_storage.rs", "rank": 64, "score": 31391.79079965115 }, { "content": "pub enum DynCrateSource<S :Read + Seek> {\n\n\tFileTreeStorage(FileTreeStorage),\n\n\tCacheStorage(CacheStorage),\n\n\tBlobCrateStorage(BlobCrateStorage<S>),\n\n\tOverlayCrateSource(Box<OverlayCrateSource<DynCrateSource<S>, DynCrateSource<S>>>),\n\n}\n\n\n\npub enum DynCrateHandle<S :Read + Seek> {\n\n\tBlobCrateHandle(BlobCrateHandle),\n\n\tStorageFileHandle(StorageFileHandle),\n\n\tOverlayCrateHandle(Box<OverlayCrateHandle<DynCrateSource<S>, DynCrateSource<S>>>),\n\n}\n\n\n\nimpl<S :Read + Seek> DynCrateHandle<S> {\n\n\tfn blob(&self) -> Option<&BlobCrateHandle> {\n\n\t\tmatch self {\n\n\t\t\t&DynCrateHandle::BlobCrateHandle(ref h) => Some(h),\n\n\t\t\t_ => None,\n\n\t\t}\n\n\t}\n", "file_path": "all-crate-storage/crate_storage.rs", "rank": 65, "score": 31389.949131964117 }, { "content": "\t\t\t&OverlayCrateHandle::FallbackFound(ref s) => {\n\n\t\t\t\ts.get_file(&mut source.1, path)\n\n\t\t\t},\n\n\t\t}\n\n\t}\n\n}\n\n\n\npub struct FileTreeStorage {\n\n\tstorage_base :PathBuf,\n\n}\n\n\n\nimpl FileTreeStorage {\n\n\tpub fn new(storage_base :&Path) -> Self {\n\n\t\tFileTreeStorage {\n\n\t\t\tstorage_base : storage_base.to_path_buf(),\n\n\t\t}\n\n\t}\n\n}\n\n\n\n\n", "file_path": "all-crate-storage/crate_storage.rs", "rank": 66, "score": 31389.594500569125 }, { "content": "\t}\n\n}\n\n\n\nimpl CrateSource for CacheStorage {\n\n\ttype CrateHandle = BlobCrateHandle;\n\n\tfn get_crate_handle_nv(&mut self,\n\n\t\t\tname :String, version :Version) -> Option<CrateHandle<Self, Self::CrateHandle>> {\n\n\t\tif let Some(content) = self.get_crate_nv(name, version) {\n\n\t\t\tSome(CrateHandle {\n\n\t\t\t\tsource : self,\n\n\t\t\t\tcrate_file_handle : BlobCrateHandle::new(content),\n\n\t\t\t})\n\n\t\t} else {\n\n\t\t\tNone\n\n\t\t}\n\n\t}\n\n\tfn get_crate(&mut self, spec :&CrateSpec) -> Option<Vec<u8>> {\n\n\t\tlet crate_file_path = self.storage_base\n\n\t\t\t.join(spec.file_name());\n\n\t\tlet mut f = match File::open(&crate_file_path) {\n", "file_path": "all-crate-storage/crate_storage.rs", "rank": 67, "score": 31388.92102026141 }, { "content": "\t}\n\n}\n\n\n\nimpl<S :Read + Seek> CrateFileHandle<DynCrateSource<S>> for DynCrateHandle<S> {\n\n\tfn get_file_list(&self, source :&mut DynCrateSource<S>) -> Vec<String> {\n\n\t\tmatch source {\n\n\t\t\t&mut DynCrateSource::FileTreeStorage(ref mut s) => {\n\n\t\t\t\tself.blob().unwrap().get_file_list(s)\n\n\t\t\t},\n\n\t\t\t&mut DynCrateSource::CacheStorage(ref mut s) => {\n\n\t\t\t\tself.blob().unwrap().get_file_list(s)\n\n\t\t\t},\n\n\t\t\t&mut DynCrateSource::BlobCrateStorage(ref mut s) => {\n\n\t\t\t\tself.storage().unwrap().get_file_list(s)\n\n\t\t\t},\n\n\t\t\t&mut DynCrateSource::OverlayCrateSource(ref mut s) => {\n\n\t\t\t\tself.overlay().unwrap().get_file_list(s)\n\n\t\t\t},\n\n\t\t}\n\n\t}\n", "file_path": "all-crate-storage/crate_storage.rs", "rank": 68, "score": 31388.847270527855 }, { "content": "\n\n\n\nimpl CrateSource for FileTreeStorage {\n\n\ttype CrateHandle = BlobCrateHandle;\n\n\tfn get_crate_handle_nv(&mut self,\n\n\t\t\tname :String, version :Version) -> Option<CrateHandle<Self, Self::CrateHandle>> {\n\n\t\tif let Some(content) = self.get_crate_nv(name, version) {\n\n\t\t\tSome(CrateHandle {\n\n\t\t\t\tsource : self,\n\n\t\t\t\tcrate_file_handle : BlobCrateHandle::new(content),\n\n\t\t\t})\n\n\t\t} else {\n\n\t\t\tNone\n\n\t\t}\n\n\t}\n\n\tfn get_crate(&mut self, spec :&CrateSpec) -> Option<Vec<u8>> {\n\n\t\tlet crate_file_path = self.storage_base\n\n\t\t\t.join(obtain_crate_name_path(&spec.name))\n\n\t\t\t.join(spec.file_name());\n\n\t\tlet mut f = match File::open(&crate_file_path) {\n", "file_path": "all-crate-storage/crate_storage.rs", "rank": 69, "score": 31388.30253480522 }, { "content": "\tfn get_file(&self, source :&mut DynCrateSource<S>,\n\n\t\t\tpath :&str) -> Option<Vec<u8>> {\n\n\t\tmatch source {\n\n\t\t\t&mut DynCrateSource::FileTreeStorage(ref mut s) => {\n\n\t\t\t\tself.blob().unwrap().get_file(s, path)\n\n\t\t\t},\n\n\t\t\t&mut DynCrateSource::CacheStorage(ref mut s) => {\n\n\t\t\t\tself.blob().unwrap().get_file(s, path)\n\n\t\t\t},\n\n\t\t\t&mut DynCrateSource::BlobCrateStorage(ref mut s) => {\n\n\t\t\t\tself.storage().unwrap().get_file(s, path)\n\n\t\t\t},\n\n\t\t\t&mut DynCrateSource::OverlayCrateSource(ref mut s) => {\n\n\t\t\t\tself.overlay().unwrap().get_file(s, path)\n\n\t\t\t},\n\n\t\t}\n\n\t}\n\n}\n\n\n\npub struct OverlayCrateSource<S :CrateSource, T :CrateSource>(S, T);\n", "file_path": "all-crate-storage/crate_storage.rs", "rank": 70, "score": 31387.288291192006 }, { "content": "\t\t\t})\n\n\t\t} else {\n\n\t\t\tNone\n\n\t\t}\n\n\t}\n\n\tfn get_crate(&mut self, spec :&CrateSpec) -> Option<Vec<u8>> {\n\n\t\tmatch self {\n\n\t\t\t&mut DynCrateSource::FileTreeStorage(ref mut s) => {\n\n\t\t\t\ts.get_crate(spec)\n\n\t\t\t},\n\n\t\t\t&mut DynCrateSource::CacheStorage(ref mut s) => {\n\n\t\t\t\ts.get_crate(spec)\n\n\t\t\t},\n\n\t\t\t&mut DynCrateSource::BlobCrateStorage(ref mut s) => {\n\n\t\t\t\ts.get_crate(spec)\n\n\t\t\t},\n\n\t\t\t&mut DynCrateSource::OverlayCrateSource(ref mut s) => {\n\n\t\t\t\ts.get_crate(spec)\n\n\t\t\t},\n\n\t\t}\n", "file_path": "all-crate-storage/crate_storage.rs", "rank": 71, "score": 31386.671814789308 }, { "content": "\t\t\tlet path :Option<String> = entry.path().ok()\n\n\t\t\t\t.and_then(|s| if let Some(s) = s.to_str() {\n\n\t\t\t\t\tSome(s.to_owned())\n\n\t\t\t\t} else {\n\n\t\t\t\t\tNone\n\n\t\t\t\t});\n\n\t\t\tlet mut v = Vec::new();\n\n\t\t\tlet v = if entry.read_to_end(&mut v).is_ok() {\n\n\t\t\t\tSome(v)\n\n\t\t\t} else {\n\n\t\t\t\tNone\n\n\t\t\t};\n\n\t\t\tf(path, v);\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl<S :CrateSource> CrateFileHandle<S> for BlobCrateHandle {\n\n\tfn get_file_list(&self, _source :&mut S) -> Vec<String> {\n\n\t\tlet f = self.content.as_slice();\n", "file_path": "all-crate-storage/crate_storage.rs", "rank": 72, "score": 31386.644894424808 }, { "content": "\tpub fn file_name(&self) -> String {\n\n\t\tformat!(\"{}-{}.crate\", self.name, self.version)\n\n\t}\n\n}\n\n\n\npub struct CrateHandle<'a, S :CrateSource + 'a, C :CrateFileHandle<S>> {\n\n\tpub source :&'a mut S,\n\n\tpub crate_file_handle :C,\n\n}\n\n\n\nimpl<'a, S :CrateSource + 'a, C :CrateFileHandle<S>> CrateHandle<'a, S, C> {\n\n\tpub fn get_file_list(&mut self) -> Vec<String> {\n\n\t\tself.crate_file_handle.get_file_list(&mut self.source)\n\n\t}\n\n\tpub fn get_file(&mut self, path :&str) -> Option<Vec<u8>> {\n\n\t\tself.crate_file_handle.get_file(&mut self.source, path)\n\n\t}\n\n}\n\n\n", "file_path": "all-crate-storage/crate_storage.rs", "rank": 73, "score": 31386.04710393771 }, { "content": "\t\t\tOk(f) => f,\n\n\t\t\tErr(_) => {\n\n\t\t\t\treturn None;\n\n\t\t\t},\n\n\t\t};\n\n\t\tlet mut file_buf = Vec::new();\n\n\t\tio::copy(&mut f, &mut file_buf).unwrap();\n\n\t\tSome(file_buf)\n\n\t}\n\n}\n", "file_path": "all-crate-storage/crate_storage.rs", "rank": 74, "score": 31385.53203239155 }, { "content": "\tFallbackFound(F::CrateHandle),\n\n}\n\n\n\nimpl<S :CrateSource, T: CrateSource> CrateFileHandle<OverlayCrateSource<S, T>> for OverlayCrateHandle<S, T> {\n\n\tfn get_file_list(&self, source :&mut OverlayCrateSource<S, T>) -> Vec<String> {\n\n\t\tmatch self {\n\n\t\t\t&OverlayCrateHandle::DefaultFound(ref s) => {\n\n\t\t\t\ts.get_file_list(&mut source.0)\n\n\t\t\t},\n\n\t\t\t&OverlayCrateHandle::FallbackFound(ref s) => {\n\n\t\t\t\ts.get_file_list(&mut source.1)\n\n\t\t\t},\n\n\t\t}\n\n\t}\n\n\tfn get_file(&self, source :&mut OverlayCrateSource<S, T>,\n\n\t\t\tpath :&str) -> Option<Vec<u8>> {\n\n\t\tmatch self {\n\n\t\t\t&OverlayCrateHandle::DefaultFound(ref s) => {\n\n\t\t\t\ts.get_file(&mut source.0, path)\n\n\t\t\t},\n", "file_path": "all-crate-storage/crate_storage.rs", "rank": 75, "score": 31384.479494999214 }, { "content": "\t\t\tname :String, version :Version) -> Option<CrateHandle<Self, Self::CrateHandle>> {\n\n\t\tif let Some(ch) = self.get_overlay_crate_handle_nv(name, version) {\n\n\t\t\tSome(CrateHandle {\n\n\t\t\t\tsource : self,\n\n\t\t\t\tcrate_file_handle : ch,\n\n\t\t\t})\n\n\t\t} else {\n\n\t\t\tNone\n\n\t\t}\n\n\t}\n\n\tfn get_crate(&mut self, spec :&CrateSpec) -> Option<Vec<u8>> {\n\n\t\tif let Some(v) = self.0.get_crate(spec) {\n\n\t\t\treturn Some(v);\n\n\t\t}\n\n\t\treturn self.1.get_crate(spec);\n\n\t}\n\n}\n\n\n\npub enum OverlayCrateHandle<D :CrateSource, F :CrateSource> {\n\n\tDefaultFound(D::CrateHandle),\n", "file_path": "all-crate-storage/crate_storage.rs", "rank": 76, "score": 31384.241574484462 }, { "content": "\t\tlet mut l = Vec::new();\n\n\t\tlet decoded = GzDecoder::new(f);\n\n\t\tlet mut archive = Archive::new(decoded);\n\n\t\tfor entry in archive.entries().unwrap() {\n\n\t\t\tlet entry = entry.unwrap();\n\n\t\t\tlet path = entry.path().unwrap();\n\n\t\t\tlet s :String = path.to_str().unwrap().to_owned();\n\n\t\t\tl.push(s);\n\n\t\t}\n\n\t\tl\n\n\t}\n\n\tfn get_file(&self, _ :&mut S, path :&str) -> Option<Vec<u8>> {\n\n\t\textract_path_from_gz(self.content.as_slice(), path)\n\n\t}\n\n}\n\n\n\nmacro_rules! otry {\n\n\t($v:expr) => {{\n\n\t\tif let Some(v) = $v.ok() {\n\n\t\t\tv\n\n\t\t} else {\n\n\t\t\treturn None;\n\n\t\t}\n\n\t}};\n\n}\n\n\n", "file_path": "all-crate-storage/crate_storage.rs", "rank": 77, "score": 31383.128404491206 }, { "content": "\n\n\t\t\t\t\tlet spec = CrateSpec {\n\n\t\t\t\t\t\tname : name.to_owned(),\n\n\t\t\t\t\t\tversion : v.version.clone(),\n\n\t\t\t\t\t};\n\n\t\t\t\t\tlet crate_file_buf = match source.get_crate(&spec) {\n\n\t\t\t\t\t\tSome(cfb) => cfb,\n\n\t\t\t\t\t\tNone => return None,\n\n\t\t\t\t\t};\n\n\n\n\t\t\t\t\tlet mut hctx = HashCtx::new();\n\n\t\t\t\t\tio::copy(&mut crate_file_buf.as_slice(), &mut hctx).unwrap();\n\n\t\t\t\t\tlet d = hctx.finish_and_get_digest();\n\n\t\t\t\t\tSome((spec, crate_file_buf, d))\n\n\t\t\t\t})\n\n\t\t\t});\n\n\t\tself.store_parallel_iter(thread_count, crate_iter);\n\n\t}\n\n}\n\n\n", "file_path": "all-crate-storage/crate_storage.rs", "rank": 78, "score": 31383.0742245338 }, { "content": "\n\nimpl<S :CrateSource, T :CrateSource> OverlayCrateSource<S, T> {\n\n\tpub fn new(default :S, fallback :T) -> Self {\n\n\t\tOverlayCrateSource(default, fallback)\n\n\t}\n\n\tfn get_overlay_crate_handle_nv(&mut self,\n\n\t\t\tname :String, version :Version) -> Option<OverlayCrateHandle<S, T>> {\n\n\t\tif let Some(v) = self.0.get_crate_handle_nv(name.clone(), version.clone()) {\n\n\t\t\treturn Some(OverlayCrateHandle::DefaultFound(v.crate_file_handle));\n\n\t\t}\n\n\t\tif let Some(v) = self.1.get_crate_handle_nv(name, version) {\n\n\t\t\treturn Some(OverlayCrateHandle::FallbackFound(v.crate_file_handle));\n\n\t\t}\n\n\t\treturn None;\n\n\t}\n\n}\n\n\n\nimpl<S :CrateSource, T :CrateSource> CrateSource for OverlayCrateSource<S, T> {\n\n\ttype CrateHandle = OverlayCrateHandle<S, T>;\n\n\tfn get_crate_handle_nv(&mut self,\n", "file_path": "all-crate-storage/crate_storage.rs", "rank": 79, "score": 31382.99088270081 }, { "content": "\t\t\t\ts.get_crate_handle_nv(name, version)\n\n\t\t\t\t\t.map(|h| DynCrateHandle::BlobCrateHandle(h.crate_file_handle))\n\n\t\t\t},\n\n\t\t\t&mut DynCrateSource::CacheStorage(ref mut s) => {\n\n\t\t\t\ts.get_crate_handle_nv(name, version)\n\n\t\t\t\t\t.map(|h| DynCrateHandle::BlobCrateHandle(h.crate_file_handle))\n\n\t\t\t},\n\n\t\t\t&mut DynCrateSource::BlobCrateStorage(ref mut s) => {\n\n\t\t\t\ts.get_crate_handle_nv(name, version)\n\n\t\t\t\t\t.map(|h| DynCrateHandle::StorageFileHandle(h.crate_file_handle))\n\n\t\t\t},\n\n\t\t\t&mut DynCrateSource::OverlayCrateSource(ref mut s) => {\n\n\t\t\t\ts.get_crate_handle_nv(name, version)\n\n\t\t\t\t\t.map(|h| DynCrateHandle::OverlayCrateHandle(Box::new(h.crate_file_handle)))\n\n\t\t\t},\n\n\t\t};\n\n\t\tif let Some(ch) = ch {\n\n\t\t\tSome(CrateHandle {\n\n\t\t\t\tsource : self,\n\n\t\t\t\tcrate_file_handle : ch,\n", "file_path": "all-crate-storage/crate_storage.rs", "rank": 80, "score": 31381.79124907122 }, { "content": "\t\tSome(meta)\n\n\t}\n\n}\n\n\n\nimpl<S :Read + Seek + Write> BlobCrateStorage<S> {\n\n\tpub fn store(&mut self) -> io::Result<()> {\n\n\t\ttry!(self.b.write_header_and_index());\n\n\t\tOk(())\n\n\t}\n\n}\n\n\n", "file_path": "all-crate-storage/blob_crate_storage.rs", "rank": 81, "score": 30462.71277816726 }, { "content": "\t\t\t}\n\n\t\t}\n\n\t\tres.push(mblob_digests);\n\n\t}\n\n\tres\n\n}\n\n\n\nimpl<S :Read + Seek + Write> BlobCrateStorage<S> {\n\n\tpub fn store_parallel_mb<T :Read + Seek + Write>(\n\n\t\t\t&mut self, src :&mut BlobCrateStorage<T>,\n\n\t\t\tblob_graph :&GraphOfBlobs, thread_count :u16) {\n\n\t\tuse std::sync::mpsc::{sync_channel, TrySendError};\n\n\t\tuse multiqueue::mpmc_queue;\n\n\t\tuse std::time::Duration;\n\n\t\tuse std::thread;\n\n\n\n\t\t// TODO somehow also store the metadata -- without it we can't provide reading functionality to anyone\n\n\n\n\t\tlet digest_lists = get_digest_lists(blob_graph);\n\n\t\tlet mut digests_iter = digest_lists.iter();\n", "file_path": "all-crate-storage/blob_crate_storage.rs", "rank": 82, "score": 30456.375346237095 }, { "content": "}\n\n\n\nimpl<S :Read + Seek + Write> CrateStorage for BlobCrateStorage<S> {\n\n\tfn store_parallel_iter<I :Iterator<Item = (CrateSpec, Vec<u8>, Digest)>>(\n\n\t\t\t&mut self, thread_count :u16, mut crate_iter :I) {\n\n\t\tuse std::sync::mpsc::{sync_channel, TrySendError};\n\n\t\tuse multiqueue::mpmc_queue;\n\n\t\tuse std::time::Duration;\n\n\t\tuse std::thread;\n\n\n\n\t\tlet (bt_tx, bt_rx) = sync_channel(3 * thread_count as usize);\n\n\t\tlet (pt_tx, pt_rx) = mpmc_queue(3 * thread_count as u64);\n\n\t\tfor _ in 0 .. thread_count {\n\n\t\t\tlet bt_tx = bt_tx.clone();\n\n\t\t\tlet pt_rx = pt_rx.clone();\n\n\t\t\tthread::spawn(move || {\n\n\t\t\t\twhile let Ok(task) = pt_rx.recv() {\n\n\t\t\t\t\thandle_parallel_task(task, |bt| bt_tx.send(bt).unwrap());\n\n\t\t\t\t}\n\n\t\t\t});\n", "file_path": "all-crate-storage/blob_crate_storage.rs", "rank": 83, "score": 30454.063944167014 }, { "content": "macro_rules! optry {\n\n\t($e:expr) => {\n\n\t\tmatch $e {\n\n\t\t\tSome(d) => d,\n\n\t\t\tNone => return None,\n\n\t\t}\n\n\t};\n\n}\n\n\n\nmacro_rules! decompress {\n\n\t($e:expr) => {{\n\n\t\tlet mut gz_dec = GzDecoder::new($e.as_slice());\n\n\t\tlet mut r = Vec::new();\n\n\t\tio::copy(&mut gz_dec, &mut r).unwrap();\n\n\t\tr\n\n\t}}\n\n}\n\n\n\nimpl<S :Read + Seek> BlobCrateStorage<S> {\n\n\tpub fn empty(storage :S) -> Self {\n", "file_path": "all-crate-storage/blob_crate_storage.rs", "rank": 84, "score": 30453.641139106338 }, { "content": "use super::blob_storage::BlobStorage;\n\nuse super::hash_ctx::{HashCtx, Digest};\n\nuse super::reconstruction::{CrateContentBlobs, CrateRecMetadata,\n\n\tCrateRecMetaWithBlobs, hdr_from_ptr};\n\nuse super::crate_storage::{CrateStorage, CrateSpec, CrateSource,\n\n\tCrateHandle, CrateFileHandle};\n\nuse super::multi_blob::MultiBlob;\n\nuse super::diff::Diff;\n\nuse super::multi_blob_crate_storage::GraphOfBlobs;\n\n\n\nuse semver::Version;\n\nuse flate2::{Compression, GzBuilder};\n\nuse flate2::read::GzDecoder;\n\nuse std::io::{self, Read, Seek, Write, Result as IoResult};\n\nuse std::collections::HashSet;\n\n\n\npub struct BlobCrateStorage<S :Read + Seek> {\n\n\tpub(crate) b :BlobStorage<S>,\n\n}\n\n\n", "file_path": "all-crate-storage/blob_crate_storage.rs", "rank": 85, "score": 30453.399434400304 }, { "content": "\t\tBlobCrateStorage {\n\n\t\t\tb : BlobStorage::empty(storage),\n\n\t\t}\n\n\t}\n\n\tpub fn new(storage :S) -> IoResult<Self> {\n\n\t\tOk(BlobCrateStorage {\n\n\t\t\tb : try!(BlobStorage::new(storage)),\n\n\t\t})\n\n\t}\n\n\tpub fn load(storage :S) -> IoResult<Self> {\n\n\t\tOk(BlobCrateStorage {\n\n\t\t\tb : try!(BlobStorage::load(storage)),\n\n\t\t})\n\n\t}\n\n\tpub(crate) fn get_crate_rec_meta(&mut self, s :&CrateSpec) -> Option<CrateRecMetadata> {\n\n\t\tlet meta_d = optry!(self.b.name_index.get(&s.file_name())).clone();\n\n\n\n\t\tlet cmeta = optry!(optry!(self.b.get(&meta_d).ok()));\n\n\t\tlet dmeta = decompress!(cmeta);\n\n\t\tlet meta = optry!(CrateRecMetadata::deserialize(dmeta.as_slice()).ok());\n", "file_path": "all-crate-storage/blob_crate_storage.rs", "rank": 86, "score": 30450.61910499067 }, { "content": "\t\t\t}\n\n\t\t\tlet blob = optry!(optry!(source.b.get(d).ok()));\n\n\t\t\tlet decompressed = decompress!(blob);\n\n\t\t\treturn Some(decompressed);\n\n\t\t}\n\n\t\tNone\n\n\t}\n\n}\n\n\n\nimpl<S :Read + Seek> CrateSource for BlobCrateStorage<S> {\n\n\n\n\ttype CrateHandle = StorageFileHandle;\n\n\tfn get_crate_handle_nv(&mut self,\n\n\t\t\tname :String, version :Version) -> Option<CrateHandle<Self, Self::CrateHandle>> {\n\n\t\tlet s = CrateSpec {\n\n\t\t\tname,\n\n\t\t\tversion,\n\n\t\t};\n\n\t\tlet meta = optry!(self.get_crate_rec_meta(&s));\n\n\n", "file_path": "all-crate-storage/blob_crate_storage.rs", "rank": 87, "score": 30449.537692497986 }, { "content": "\t\t}\n\n\t}\n\n}\n\n\n\npub struct StorageFileHandle {\n\n\tmeta :CrateRecMetadata,\n\n}\n\n\n\nimpl<S :Read + Seek> CrateFileHandle<BlobCrateStorage<S>> for StorageFileHandle {\n\n\tfn get_file_list(&self, _source :&mut BlobCrateStorage<S>) -> Vec<String> {\n\n\t\tself.meta.get_file_list()\n\n\t}\n\n\tfn get_file(&self, source :&mut BlobCrateStorage<S>,\n\n\t\t\tpath :&str) -> Option<Vec<u8>> {\n\n\t\tfor &(ref hdr, ref d) in self.meta.entry_metadata.iter() {\n\n\t\t\tlet hdr = hdr_from_ptr(hdr);\n\n\t\t\tlet p = hdr.path().unwrap();\n\n\t\t\tlet s = p.to_str().unwrap();\n\n\t\t\tif s != path {\n\n\t\t\t\tcontinue;\n", "file_path": "all-crate-storage/blob_crate_storage.rs", "rank": 88, "score": 30448.615084920075 }, { "content": "\t\tlet mut to_go = digest_lists.len();\n\n\n\n\t\tlet (bt_tx, bt_rx) = sync_channel(3 * thread_count as usize);\n\n\t\tlet (pt_tx, pt_rx) = mpmc_queue(3 * thread_count as u64);\n\n\t\tfor _ in 0 .. thread_count {\n\n\t\t\tlet bt_tx = bt_tx.clone();\n\n\t\t\tlet pt_rx = pt_rx.clone();\n\n\t\t\tthread::spawn(move || {\n\n\t\t\t\twhile let Ok(task) = pt_rx.recv() {\n\n\t\t\t\t\thandle_parallel_task(task, |bt| bt_tx.send(bt).unwrap());\n\n\t\t\t\t}\n\n\t\t\t});\n\n\t\t}\n\n\t\tdrop(bt_tx);\n\n\t\tpt_rx.unsubscribe();\n\n\t\tlet mut par_task_backlog = Vec::new();\n\n\t\tlet mut blobs_to_store = HashSet::new();\n\n\t\tloop {\n\n\t\t\tlet mut done_something = false;\n\n\t\t\tif let Ok(task) = bt_rx.recv_timeout(Duration::new(0, 50_000)) {\n", "file_path": "all-crate-storage/blob_crate_storage.rs", "rank": 89, "score": 30445.70120383319 }, { "content": "\t\t\t\thandle_blocking_task(task, &mut self.b,\n\n\t\t\t\t\t&mut blobs_to_store, |tsk| par_task_backlog.push(tsk));\n\n\t\t\t\tdone_something = true;\n\n\t\t\t}\n\n\t\t\tif par_task_backlog.is_empty() {\n\n\t\t\t\tfor digests in (&mut digests_iter).take(10) {\n\n\t\t\t\t\tto_go -= 1;\n\n\t\t\t\t\tprintln!(\"{} {}\", to_go, digests.len());\n\n\t\t\t\t\tlet blobs = digests.iter()\n\n\t\t\t\t\t\t// TODO don't use unwrap here\n\n\t\t\t\t\t\t// TODO instead of filter_map and skipping report an error or something something\n\n\t\t\t\t\t\t.filter_map(|digest| src.b.get(digest).unwrap().map(|b|(*digest, b)))\n\n\t\t\t\t\t\t.collect::<Vec<_>>();\n\n\t\t\t\t\tpar_task_backlog.push(ParallelTask::CreateMultiBlob(blobs));\n\n\t\t\t\t\tdone_something = true;\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tloop {\n\n\t\t\t\tlet mut removed_something = false;\n\n\t\t\t\tif let Some(t) = par_task_backlog.pop() {\n", "file_path": "all-crate-storage/blob_crate_storage.rs", "rank": 90, "score": 30444.17399167931 }, { "content": "\t\tSome(CrateHandle {\n\n\t\t\tsource : self,\n\n\t\t\tcrate_file_handle : StorageFileHandle {\n\n\t\t\t\tmeta\n\n\t\t\t},\n\n\t\t})\n\n\t}\n\n\tfn get_crate(&mut self, s :&CrateSpec) -> Option<Vec<u8>> {\n\n\t\tlet meta = optry!(self.get_crate_rec_meta(s));\n\n\t\tlet mut blobs = Vec::with_capacity(meta.entry_metadata.len());\n\n\t\tfor &(ref _hdr, ref d) in meta.entry_metadata.iter() {\n\n\t\t\tlet blob = optry!(optry!(self.b.get(d).ok()));\n\n\t\t\tlet decompressed = decompress!(blob);\n\n\t\t\tblobs.push((*d, decompressed));\n\n\t\t}\n\n\t\tlet crmb = CrateRecMetaWithBlobs {\n\n\t\t\tmeta,\n\n\t\t\tblobs\n\n\t\t};\n\n\t\tlet ccb = CrateContentBlobs::from_meta_with_blobs(crmb);\n\n\t\tSome(ccb.to_archive_file())\n\n\t}\n\n}\n\n\n", "file_path": "all-crate-storage/blob_crate_storage.rs", "rank": 91, "score": 30443.600174087245 }, { "content": "\t\t\t\t} else {\n\n\t\t\t\t\t// UTF-8 decode error. Skip this one.\n\n\t\t\t\t\t// TODO emit a compression parallel task here. one day.\n\n\t\t\t\t\tlet mut gz_enc = GzBuilder::new().read(blob.as_slice(), Compression::best());\n\n\t\t\t\t\tlet mut buffer_compressed = Vec::new();\n\n\t\t\t\t\tio::copy(&mut gz_enc, &mut buffer_compressed).unwrap();\n\n\n\n\t\t\t\t\temit_task(BlockingTask::StoreBlob(*digest, buffer_compressed));\n\n\t\t\t\t}\n\n\t\t\t}\n\n\n\n\t\t\tif digests.len() > 0 {\n\n\t\t\t\tlet mb = MultiBlob {\n\n\t\t\t\t\troot_blob : root_blob.unwrap(),\n\n\t\t\t\t\tdiff_list,\n\n\t\t\t\t};\n\n\t\t\t\tlet mut mb_blob = Vec::new();\n\n\t\t\t\tmb.serialize(&mut mb_blob).unwrap();\n\n\n\n\t\t\t\tlet mut hctx = HashCtx::new();\n", "file_path": "all-crate-storage/blob_crate_storage.rs", "rank": 92, "score": 30443.56515437845 }, { "content": "\t\t\t\tio::copy(&mut mb_blob.as_slice(), &mut hctx).unwrap();\n\n\t\t\t\tlet multi_blob_digest = hctx.finish_and_get_digest();\n\n\n\n\t\t\t\tlet mut gz_enc = GzBuilder::new().read(mb_blob.as_slice(), Compression::best());\n\n\t\t\t\tlet mut buffer_compressed = Vec::new();\n\n\t\t\t\tio::copy(&mut gz_enc, &mut buffer_compressed).unwrap();\n\n\n\n\t\t\t\tlet digests = blobs.iter()\n\n\t\t\t\t\t\t.map(|(digest, _b)| *digest)\n\n\t\t\t\t\t\t.collect::<Vec<_>>();\n\n\n\n\t\t\t\tlet task = BlockingTask::StoreMultiBlob(multi_blob_digest, digests, buffer_compressed);\n\n\t\t\t\temit_task(task);\n\n\t\t\t}\n\n\t\t},\n\n\t}\n\n}\n\n\n", "file_path": "all-crate-storage/blob_crate_storage.rs", "rank": 93, "score": 30443.417930773856 }, { "content": "\t\t\tlet mut buffer_compressed = Vec::new();\n\n\t\t\tio::copy(&mut gz_enc, &mut buffer_compressed).unwrap();\n\n\n\n\t\t\temit_task(BlockingTask::StoreBlob(d, buffer_compressed));\n\n\t\t},\n\n\t\tParallelTask::CreateMultiBlob(blobs) => {\n\n\t\t\tlet mut root_blob = None;\n\n\t\t\tlet mut diff_list = Vec::new();\n\n\t\t\tlet mut digests = Vec::new();\n\n\t\t\tlet mut last :Option<(Digest, &str)> = None;\n\n\t\t\tfor (digest, blob) in blobs.iter() {\n\n\t\t\t\tif let Ok(s) = ::std::str::from_utf8(&blob) {\n\n\t\t\t\t\tdigests.push(digest);\n\n\t\t\t\t\tif let Some(l) = last.take() {\n\n\t\t\t\t\t\tlet diff = Diff::from_texts_nl(&l.1, &s);\n\n\t\t\t\t\t\tdiff_list.push((l.0, *digest, diff));\n\n\t\t\t\t\t} else {\n\n\t\t\t\t\t\troot_blob = Some((*digest, s.to_string()));\n\n\t\t\t\t\t}\n\n\t\t\t\t\tlast = Some((*digest, s));\n", "file_path": "all-crate-storage/blob_crate_storage.rs", "rank": 94, "score": 30443.286373462077 }, { "content": "\t\t\tlet meta_blob_digest = meta_blob_hctx.finish_and_get_digest();\n\n\t\t\t// The blob digest may be already present, e.g. if\n\n\t\t\t// we had been writing this particular crate into the\n\n\t\t\t// BlobStorage previously. In order to be on the safe\n\n\t\t\t// side, check for existence before inserting into\n\n\t\t\t// the blob storage.\n\n\t\t\tif blobs_to_store.insert(meta_blob_digest) {\n\n\t\t\t\temit_task(ParallelTask::CompressBlob(meta_blob_digest, meta_blob));\n\n\t\t\t}\n\n\t\t\t// enter the meta blob into the blob storage\n\n\t\t\tblob_store.name_index.insert(crate_file_name, meta_blob_digest);\n\n\n\n\t\t},\n\n\t\tBlockingTask::StoreBlob(d, blob) => {\n\n\t\t\tlet actually_added = blob_store.insert(d, &blob).unwrap();\n\n\t\t\t// If the blob is already present, it indicates a bug because\n\n\t\t\t// we are supposed to check for presence before we ask for the\n\n\t\t\t// blob to be compressed. If we would just shrug this off, we'd\n\n\t\t\t// waste cycles spent on compressing the blobs.\n\n\t\t\tassert!(actually_added, \"Tried to insert a blob into the storage that was already present\");\n", "file_path": "all-crate-storage/blob_crate_storage.rs", "rank": 95, "score": 30442.058169279146 }, { "content": "use std::io::Cursor;\n\nuse super::blob_storage::{BlobStorage};\n\n\n\n#[test]\n", "file_path": "all-crate-storage/blob_storage_test.rs", "rank": 96, "score": 30441.904185731953 }, { "content": "\t\t\t\tlet mut removed_something = false;\n\n\t\t\t\tif let Some(t) = par_task_backlog.pop() {\n\n\t\t\t\t\tif let Err(e) = pt_tx.try_send(t) {\n\n\t\t\t\t\t\tlet t = match e {\n\n\t\t\t\t\t\t\tTrySendError::Full(t) => t,\n\n\t\t\t\t\t\t\tTrySendError::Disconnected(t) => t,\n\n\t\t\t\t\t\t};\n\n\t\t\t\t\t\tpar_task_backlog.push(t);\n\n\t\t\t\t\t} else {\n\n\t\t\t\t\t\tremoved_something = true;\n\n\t\t\t\t\t\tdone_something = true;\n\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\t\t\t\tif !removed_something {\n\n\t\t\t\t\tbreak;\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tif !done_something && par_task_backlog.is_empty() {\n\n\t\t\t\tbreak;\n\n\t\t\t}\n", "file_path": "all-crate-storage/blob_crate_storage.rs", "rank": 97, "score": 30440.855469633836 }, { "content": "\t\t},\n\n\t\tBlockingTask::StoreMultiBlob(mblob_digest, digests, buf_compressed) => {\n\n\t\t\tlet actually_added = blob_store.insert(mblob_digest, &buf_compressed).unwrap();\n\n\t\t\tfor d in digests.iter() {\n\n\t\t\t\tblob_store.digest_to_multi_blob.insert(*d, mblob_digest);\n\n\t\t\t}\n\n\t\t\t// If the blob is already present, it indicates a bug because\n\n\t\t\t// we are supposed to check for presence before we ask for the\n\n\t\t\t// blob to be compressed. If we would just shrug this off, we'd\n\n\t\t\t// waste cycles spent on compressing the blobs.\n\n\t\t\tassert!(actually_added, \"Tried to insert a blob into the storage that was already present\");\n\n\t\t},\n\n\t}\n\n}\n", "file_path": "all-crate-storage/blob_crate_storage.rs", "rank": 98, "score": 30440.079060326912 }, { "content": "\t\t}\n\n\t\tdrop(bt_tx);\n\n\t\tpt_rx.unsubscribe();\n\n\t\tlet mut par_task_backlog = Vec::new();\n\n\t\tlet mut blobs_to_store = HashSet::new();\n\n\t\tloop {\n\n\t\t\tlet mut done_something = false;\n\n\t\t\tif let Ok(task) = bt_rx.recv_timeout(Duration::new(0, 50_000)) {\n\n\t\t\t\thandle_blocking_task(task, &mut self.b,\n\n\t\t\t\t\t&mut blobs_to_store, |tsk| par_task_backlog.push(tsk));\n\n\t\t\t\tdone_something = true;\n\n\t\t\t}\n\n\t\t\tif par_task_backlog.is_empty() {\n\n\t\t\t\tfor (sp, b, d) in (&mut crate_iter).take(10) {\n\n\t\t\t\t\tlet name = sp.file_name();\n\n\t\t\t\t\tpar_task_backlog.push(ParallelTask::ObtainCrateContentBlobs(name, b, d));\n\n\t\t\t\t\tdone_something = true;\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t\tloop {\n", "file_path": "all-crate-storage/blob_crate_storage.rs", "rank": 99, "score": 30439.359961900293 } ]
Rust
chain/cosmos/src/adapter.rs
Perpetual-Altruism-Ltd/graph-node
abbb2d04713d9e988419814d2e6ca433ee165bd1
use std::collections::HashSet; use prost::Message; use prost_types::Any; use crate::capabilities::NodeCapabilities; use crate::{data_source::DataSource, Chain}; use graph::blockchain as bc; use graph::firehose::EventTypeFilter; use graph::prelude::*; const EVENT_TYPE_FILTER_TYPE_URL: &str = "type.googleapis.com/sf.cosmos.transform.v1.EventTypeFilter"; #[derive(Clone, Debug, Default)] pub struct TriggerFilter { pub(crate) event_type_filter: CosmosEventTypeFilter, pub(crate) block_filter: CosmosBlockFilter, } impl bc::TriggerFilter<Chain> for TriggerFilter { fn extend<'a>(&mut self, data_sources: impl Iterator<Item = &'a DataSource> + Clone) { self.event_type_filter .extend_from_data_sources(data_sources.clone()); self.block_filter.extend_from_data_sources(data_sources); } fn node_capabilities(&self) -> NodeCapabilities { NodeCapabilities {} } fn extend_with_template( &mut self, _data_source: impl Iterator<Item = <Chain as bc::Blockchain>::DataSourceTemplate>, ) { } fn to_firehose_filter(self) -> Vec<prost_types::Any> { if self.block_filter.trigger_every_block { return vec![]; } if self.event_type_filter.event_types.is_empty() { return vec![]; } let filter = EventTypeFilter { event_types: Vec::from_iter(self.event_type_filter.event_types), }; vec![Any { type_url: EVENT_TYPE_FILTER_TYPE_URL.to_string(), value: filter.encode_to_vec(), }] } } pub type EventType = String; #[derive(Clone, Debug, Default)] pub(crate) struct CosmosEventTypeFilter { pub event_types: HashSet<EventType>, } impl CosmosEventTypeFilter { pub(crate) fn matches(&self, event_type: &EventType) -> bool { self.event_types.contains(event_type) } fn extend_from_data_sources<'a>(&mut self, data_sources: impl Iterator<Item = &'a DataSource>) { self.event_types.extend( data_sources.flat_map(|data_source| data_source.events().map(ToString::to_string)), ); } } #[derive(Clone, Debug, Default)] pub(crate) struct CosmosBlockFilter { pub trigger_every_block: bool, } impl CosmosBlockFilter { fn extend_from_data_sources<'a>( &mut self, mut data_sources: impl Iterator<Item = &'a DataSource>, ) { if !self.trigger_every_block { self.trigger_every_block = data_sources.any(DataSource::has_block_handler); } } } #[cfg(test)] mod test { use graph::blockchain::TriggerFilter as _; use super::*; #[test] fn test_trigger_filters() { let cases = [ (TriggerFilter::test_new(false, &[]), None), (TriggerFilter::test_new(true, &[]), None), (TriggerFilter::test_new(true, &["event_1", "event_2"]), None), ( TriggerFilter::test_new(false, &["event_1", "event_2", "event_3"]), Some(event_type_filter_with(&["event_1", "event_2", "event_3"])), ), ]; for (trigger_filter, expected_filter) in cases { let firehose_filter = trigger_filter.to_firehose_filter(); let decoded_filter = decode_filter(firehose_filter); assert_eq!(decoded_filter.is_some(), expected_filter.is_some()); if let (Some(mut expected_filter), Some(mut decoded_filter)) = (expected_filter, decoded_filter) { expected_filter.event_types.sort(); decoded_filter.event_types.sort(); assert_eq!(decoded_filter, expected_filter); } } } impl TriggerFilter { pub(crate) fn test_new(trigger_every_block: bool, event_types: &[&str]) -> TriggerFilter { TriggerFilter { event_type_filter: CosmosEventTypeFilter { event_types: event_types.iter().map(ToString::to_string).collect(), }, block_filter: CosmosBlockFilter { trigger_every_block, }, } } } fn event_type_filter_with(event_types: &[&str]) -> EventTypeFilter { EventTypeFilter { event_types: event_types.iter().map(ToString::to_string).collect(), } } fn decode_filter(proto_filters: Vec<Any>) -> Option<EventTypeFilter> { assert!(proto_filters.len() <= 1); let proto_filter = proto_filters.get(0)?; assert_eq!(proto_filter.type_url, EVENT_TYPE_FILTER_TYPE_URL); let firehose_filter = EventTypeFilter::decode(&*proto_filter.value) .expect("Could not decode EventTypeFilter from protobuf Any"); Some(firehose_filter) } }
use std::collections::HashSet; use prost::Message; use prost_types::Any; use crate::capabilities::NodeCapabilities; use crate::{data_source::DataSource, Chain}; use graph::blockchain as bc; use graph::firehose::EventTypeFilter; use graph::prelude::*; const EVENT_TYPE_FILTER_TYPE_URL: &str = "type.googleapis.com/sf.cosmos.transform.v1.EventTypeFilter"; #[derive(Clone, Debug, Default)] pub struct TriggerFilter { pub(crate) event_type_filter: CosmosEventTypeFilter, pub(crate) block_filter: CosmosBlockFilter, } impl bc::TriggerFilter<Chain> for TriggerFilter { fn extend<'a>(&mut self, data_sources: impl Iterator<Item = &'a DataSource> + Clone) { self.event_type_filter .extend_from_data_sources(data_sources.clone()); self.block_filter.extend_from_data_sources(data_sources); } fn node_capabilities(&self) -> NodeCapabilities { NodeCapabilities {} } fn extend_with_template( &mut self, _data_source: impl Iterator<Item = <Chain as bc::Blockchain>::DataSourceTemplate>, ) { } fn to_firehose_filter(self) -> Vec<prost_types::Any> { if self.block_filter.trigger_every_block { return vec![]; } if self.event_type_filter.event_types.is_empty() { return vec![]; } let filter = EventTypeFilter { event_types: Vec::from_iter(self.event_type_filter.event_types), }; vec![Any { type_url: EVENT_TYPE_FILTER_TYPE_URL.to_string(), value: filter.encode_to_vec(), }] } } pub type EventType = String; #[derive(Clone, Debug, Default)] pub(crate) struct CosmosEventTypeFilter { pub event_types: HashSet<EventType>, } impl CosmosEventTypeFilter { pub(crate) fn matches(&self, event_type: &EventType) -> bool { self.event_types.contains(event_type) } fn extend_from_data_sources<'a>(&mut self, data_sources: impl Iterator<Item = &'a DataSource>) { self.event_types.extend( data_sources.flat_map(|data_source| data_source.events().map(ToString::to_string)), ); } } #[derive(Clone, Debug, Default)] pub(crate) struct CosmosBlockFilter { pub trigger_every_block: bool, } impl CosmosBlockFilter { fn extend_from_data_sources<'a>( &mut self, mut data_sources: impl Iterator<Item = &'a DataSource>, ) { if !self.trigger_every_block { self.trigger_every_block = data_sources.any(DataSource::has_block_handler); } } } #[cfg(test)] mod test { use graph::blockchain::TriggerFilter as _; use super::*; #[test] fn test_trigger_filters() { let cases = [ (TriggerFilter::test_new(false, &[]), None), (TriggerFilter::test_new(true, &[]), None),
vent_types: event_types.iter().map(ToString::to_string).collect(), }, block_filter: CosmosBlockFilter { trigger_every_block, }, } } } fn event_type_filter_with(event_types: &[&str]) -> EventTypeFilter { EventTypeFilter { event_types: event_types.iter().map(ToString::to_string).collect(), } } fn decode_filter(proto_filters: Vec<Any>) -> Option<EventTypeFilter> { assert!(proto_filters.len() <= 1); let proto_filter = proto_filters.get(0)?; assert_eq!(proto_filter.type_url, EVENT_TYPE_FILTER_TYPE_URL); let firehose_filter = EventTypeFilter::decode(&*proto_filter.value) .expect("Could not decode EventTypeFilter from protobuf Any"); Some(firehose_filter) } }
(TriggerFilter::test_new(true, &["event_1", "event_2"]), None), ( TriggerFilter::test_new(false, &["event_1", "event_2", "event_3"]), Some(event_type_filter_with(&["event_1", "event_2", "event_3"])), ), ]; for (trigger_filter, expected_filter) in cases { let firehose_filter = trigger_filter.to_firehose_filter(); let decoded_filter = decode_filter(firehose_filter); assert_eq!(decoded_filter.is_some(), expected_filter.is_some()); if let (Some(mut expected_filter), Some(mut decoded_filter)) = (expected_filter, decoded_filter) { expected_filter.event_types.sort(); decoded_filter.event_types.sort(); assert_eq!(decoded_filter, expected_filter); } } } impl TriggerFilter { pub(crate) fn test_new(trigger_every_block: bool, event_types: &[&str]) -> TriggerFilter { TriggerFilter { event_type_filter: CosmosEventTypeFilter { e
random
[ { "content": "pub fn place(name: &str) -> Result<Option<(Vec<Shard>, Vec<NodeId>)>, String> {\n\n CONFIG.deployment.place(name, NETWORK_NAME)\n\n}\n\n\n\npub async fn create_subgraph(\n\n subgraph_id: &DeploymentHash,\n\n schema: &str,\n\n base: Option<(DeploymentHash, BlockPtr)>,\n\n) -> Result<DeploymentLocator, StoreError> {\n\n let schema = Schema::parse(schema, subgraph_id.clone()).unwrap();\n\n\n\n let manifest = SubgraphManifest::<graph_chain_ethereum::Chain> {\n\n id: subgraph_id.clone(),\n\n spec_version: Version::new(1, 0, 0),\n\n features: BTreeSet::new(),\n\n description: Some(format!(\"manifest for {}\", subgraph_id)),\n\n repository: Some(format!(\"repo for {}\", subgraph_id)),\n\n schema: schema.clone(),\n\n data_sources: vec![],\n\n graft: None,\n", "file_path": "store/test-store/src/store.rs", "rank": 0, "score": 335602.2300761171 }, { "content": "/// Strip parent directories from filenames\n\npub fn basename(path: &impl AsRef<Path>) -> String {\n\n path.as_ref()\n\n .file_name()\n\n .map(OsStr::to_string_lossy)\n\n .map(String::from)\n\n .expect(\"failed to infer basename for path.\")\n\n}\n\n\n", "file_path": "tests/tests/common/helpers.rs", "rank": 1, "score": 322197.3459630002 }, { "content": "/// Parses stdio bytes into a prefixed String\n\npub fn pretty_output(stdio: &[u8], prefix: &str) -> String {\n\n let mut cursor = io::Cursor::new(stdio);\n\n let mut buf = vec![];\n\n let mut string = String::new();\n\n loop {\n\n buf.clear();\n\n let bytes_read = cursor\n\n .read_until(b'\\n', &mut buf)\n\n .expect(\"failed to read from stdio.\");\n\n if bytes_read == 0 {\n\n break;\n\n }\n\n let as_string = String::from_utf8_lossy(&buf);\n\n string.push_str(&prefix);\n\n string.push_str(&as_string); // will contain a newline\n\n }\n\n string\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "tests/tests/common/helpers.rs", "rank": 2, "score": 316120.88220142387 }, { "content": "/// Store the given chain as the blocks for the `network` set the\n\n/// network's genesis block to `genesis_hash`, and head block to\n\n/// `null`\n\npub fn set_chain(chain: FakeBlockList, network: &str) {\n\n let store = crate::store::STORE\n\n .block_store()\n\n .chain_store(network)\n\n .unwrap();\n\n let chain: Vec<&dyn Block> = chain.iter().map(|block| *block as &dyn Block).collect();\n\n store.set_chain(&GENESIS_BLOCK.hash, chain);\n\n}\n", "file_path": "store/test-store/src/block_store.rs", "rank": 3, "score": 307186.6883943041 }, { "content": "/// Parses a list of GraphQL values into a vector of entity field values.\n\nfn list_values(value: Value, filter_type: &str) -> Result<Vec<Value>, QueryExecutionError> {\n\n match value {\n\n Value::List(ref values) if !values.is_empty() => {\n\n // Check that all values in list are of the same type\n\n let root_discriminant = discriminant(&values[0]);\n\n values\n\n .iter()\n\n .map(|value| {\n\n let current_discriminant = discriminant(value);\n\n if root_discriminant == current_discriminant {\n\n Ok(value.clone())\n\n } else {\n\n Err(QueryExecutionError::ListTypesError(\n\n filter_type.to_string(),\n\n vec![values[0].to_string(), value.to_string()],\n\n ))\n\n }\n\n })\n\n .collect::<Result<Vec<_>, _>>()\n\n }\n\n Value::List(ref values) if values.is_empty() => Ok(vec![]),\n\n _ => Err(QueryExecutionError::ListFilterError(\n\n filter_type.to_string(),\n\n )),\n\n }\n\n}\n\n\n", "file_path": "graphql/src/store/query.rs", "rank": 4, "score": 305263.1184193975 }, { "content": "/// Creates a `graphql_parser::query::Value::Object` from key/value pairs.\n\n/// If you don't need to determine which keys are included dynamically at runtime\n\n/// consider using the `object! {}` macro instead.\n\npub fn object_value(data: Vec<(&str, r::Value)>) -> r::Value {\n\n r::Value::Object(Object::from_iter(\n\n data.into_iter().map(|(k, v)| (k.to_string(), v)),\n\n ))\n\n}\n\n\n", "file_path": "graph/src/data/graphql/object_macro.rs", "rank": 5, "score": 301968.28137735685 }, { "content": "pub fn logger(show_debug: bool) -> Logger {\n\n let use_color = isatty::stdout_isatty();\n\n let decorator = slog_term::TermDecorator::new().build();\n\n let drain = CustomFormat::new(decorator, use_color).fuse();\n\n let drain = slog_envlogger::LogBuilder::new(drain)\n\n .filter(\n\n None,\n\n if show_debug {\n\n FilterLevel::Debug\n\n } else {\n\n FilterLevel::Info\n\n },\n\n )\n\n .parse(ENV_VARS.log_levels.as_deref().unwrap_or(\"\"))\n\n .build();\n\n let drain = slog_async::Async::new(drain)\n\n .chan_size(20000)\n\n .build()\n\n .fuse();\n\n Logger::root(drain, o!())\n", "file_path": "graph/src/log/mod.rs", "rank": 6, "score": 301260.08731416846 }, { "content": "pub fn create_ipfs_clients(logger: &Logger, ipfs_addresses: &Vec<String>) -> Vec<IpfsClient> {\n\n // Parse the IPFS URL from the `--ipfs` command line argument\n\n let ipfs_addresses: Vec<_> = ipfs_addresses\n\n .iter()\n\n .map(|uri| {\n\n if uri.starts_with(\"http://\") || uri.starts_with(\"https://\") {\n\n String::from(uri)\n\n } else {\n\n format!(\"http://{}\", uri)\n\n }\n\n })\n\n .collect();\n\n\n\n ipfs_addresses\n\n .into_iter()\n\n .map(|ipfs_address| {\n\n info!(\n\n logger,\n\n \"Trying IPFS node at: {}\",\n\n SafeDisplay(&ipfs_address)\n", "file_path": "node/src/chain.rs", "rank": 7, "score": 295619.2150206282 }, { "content": "// Returns true if the given type is a non-null type.\n\npub fn is_non_null_type(t: &s::Type) -> bool {\n\n match t {\n\n s::Type::NonNullType(_) => true,\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "graphql/src/schema/ast.rs", "rank": 8, "score": 294936.7991247639 }, { "content": "pub fn is_list(field_type: &s::Type) -> bool {\n\n match field_type {\n\n s::Type::NamedType(_) => false,\n\n s::Type::NonNullType(inner) => is_list(inner),\n\n s::Type::ListType(_) => true,\n\n }\n\n}\n\n\n", "file_path": "graphql/src/schema/ast.rs", "rank": 9, "score": 294924.1735264615 }, { "content": "fn query(entity_types: Vec<&str>) -> EntityQuery {\n\n EntityQuery::new(\n\n THINGS_SUBGRAPH_ID.clone(),\n\n BLOCK_NUMBER_MAX,\n\n EntityCollection::All(\n\n entity_types\n\n .into_iter()\n\n .map(|entity_type| (EntityType::from(entity_type), AttributeNames::All))\n\n .collect(),\n\n ),\n\n )\n\n}\n\n\n", "file_path": "store/postgres/tests/relational.rs", "rank": 10, "score": 294227.4762391831 }, { "content": "/// Generates arguments for collection queries of a named type (e.g. User).\n\nfn collection_arguments_for_named_type(type_name: &str) -> Vec<InputValue> {\n\n // `first` and `skip` should be non-nullable, but the Apollo graphql client\n\n // exhibts non-conforming behaviour by erroing if no value is provided for a\n\n // non-nullable field, regardless of the presence of a default.\n\n let mut skip = input_value(&\"skip\".to_string(), \"\", Type::NamedType(\"Int\".to_string()));\n\n skip.default_value = Some(Value::Int(0.into()));\n\n\n\n let mut first = input_value(&\"first\".to_string(), \"\", Type::NamedType(\"Int\".to_string()));\n\n first.default_value = Some(Value::Int(100.into()));\n\n\n\n let args = vec![\n\n skip,\n\n first,\n\n input_value(\n\n &\"orderBy\".to_string(),\n\n \"\",\n\n Type::NamedType(format!(\"{}_orderBy\", type_name)),\n\n ),\n\n input_value(\n\n &\"orderDirection\".to_string(),\n", "file_path": "graphql/src/schema/api.rs", "rank": 11, "score": 293136.6977198935 }, { "content": "pub fn is_introspection_field(name: &str) -> bool {\n\n INTROSPECTION_QUERY_TYPE.field(name).is_some()\n\n}\n", "file_path": "graphql/src/introspection/schema.rs", "rank": 12, "score": 289344.62770292634 }, { "content": "fn sort_by_entity_key(mut mods: Vec<EntityModification>) -> Vec<EntityModification> {\n\n mods.sort_by_key(|m| m.entity_key().clone());\n\n mods\n\n}\n\n\n\n#[tokio::test]\n\nasync fn empty_cache_modifications() {\n\n let store = Arc::new(MockStore::new(BTreeMap::new()));\n\n let cache = EntityCache::new(store.clone());\n\n let result = cache.as_modifications();\n\n assert_eq!(result.unwrap().modifications, vec![]);\n\n}\n\n\n", "file_path": "graph/tests/entity_cache.rs", "rank": 13, "score": 287617.60664577363 }, { "content": "pub fn calls_host_fn(runtime: &[u8], host_fn: &str) -> anyhow::Result<bool> {\n\n use wasmparser::Payload;\n\n\n\n for payload in wasmparser::Parser::new(0).parse_all(runtime) {\n\n if let Payload::ImportSection(s) = payload? {\n\n for import in s {\n\n let import = import?;\n\n if import.field == Some(host_fn) {\n\n return Ok(true);\n\n }\n\n }\n\n }\n\n }\n\n\n\n Ok(false)\n\n}\n\n\n\n#[derive(Clone, Debug, Deserialize)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct Graft {\n", "file_path": "graph/src/data/subgraph/mod.rs", "rank": 14, "score": 287437.6666690109 }, { "content": "/// Returns the value type for a GraphQL field type.\n\npub fn get_field_name(field_type: &s::Type) -> String {\n\n match field_type {\n\n s::Type::NamedType(name) => name.to_string(),\n\n s::Type::NonNullType(inner) => get_field_name(inner),\n\n s::Type::ListType(inner) => get_field_name(inner),\n\n }\n\n}\n\n\n", "file_path": "graphql/src/schema/ast.rs", "rank": 15, "score": 285028.095568404 }, { "content": "/// Looks up the value of an argument in a vector of (name, value) tuples.\n\npub fn get_argument_value<'a>(arguments: &'a [(String, Value)], name: &str) -> Option<&'a Value> {\n\n arguments.iter().find(|(n, _)| n == name).map(|(_, v)| v)\n\n}\n\n\n", "file_path": "graphql/src/query/ast.rs", "rank": 16, "score": 282844.86708939343 }, { "content": "pub fn store_is_sharded() -> bool {\n\n CONFIG.stores.len() > 1\n\n}\n\n\n", "file_path": "store/test-store/src/store.rs", "rank": 17, "score": 280764.8379702188 }, { "content": "pub fn pools(config: &Config, nodes: Vec<String>, shard: bool) -> Result<(), Error> {\n\n // Quietly replace `-` with `_` in node names to make passing in pod names\n\n // from k8s less annoying\n\n let nodes: Vec<_> = nodes\n\n .into_iter()\n\n .map(|name| {\n\n NodeId::new(name.replace(\"-\", \"_\"))\n\n .map_err(|()| anyhow!(\"illegal node name `{}`\", name))\n\n })\n\n .collect::<Result<_, _>>()?;\n\n // node -> shard_name -> size\n\n let mut sizes = BTreeMap::new();\n\n for node in &nodes {\n\n let mut shard_sizes = BTreeMap::new();\n\n for (name, shard) in &config.stores {\n\n let size = shard.pool_size.size_for(node, name)?;\n\n shard_sizes.insert(name.to_string(), size);\n\n for (replica_name, replica) in &shard.replicas {\n\n let qname = format!(\"{}.{}\", name, replica_name);\n\n let size = replica.pool_size.size_for(node, &qname)?;\n", "file_path": "node/src/manager/commands/config.rs", "rank": 18, "score": 279302.01249669876 }, { "content": "/// Generates a `*_filter` input value for the given field name, suffix and value type.\n\nfn input_value(name: &str, suffix: &'static str, value_type: Type) -> InputValue {\n\n InputValue {\n\n position: Pos::default(),\n\n description: None,\n\n name: if suffix.is_empty() {\n\n name.to_owned()\n\n } else {\n\n format!(\"{}_{}\", name, suffix)\n\n },\n\n value_type,\n\n default_value: None,\n\n directives: vec![],\n\n }\n\n}\n\n\n", "file_path": "graphql/src/schema/api.rs", "rank": 19, "score": 276424.39372322324 }, { "content": "/// What we expect the query to return: either a list of musician ids when\n\n/// the query should succeed (`Ok`) or a string that should appear in the\n\n/// error message when the query should return an `Err`. The error string\n\n/// can contain `@DEPLOYMENT@` which will be replaced with the deployment id\n\ntype Expected = Result<Vec<&'static str>, &'static str>;\n\n\n", "file_path": "graphql/tests/query.rs", "rank": 20, "score": 274641.32346746535 }, { "content": "pub fn all_shards() -> Vec<Shard> {\n\n CONFIG\n\n .stores\n\n .keys()\n\n .map(|shard| Shard::new(shard.clone()))\n\n .collect::<Result<Vec<_>, _>>()\n\n .expect(\"all configured shard names are valid\")\n\n}\n\n\n", "file_path": "store/test-store/src/store.rs", "rank": 21, "score": 271287.50635485165 }, { "content": "/// Returns true if the given type is an input type.\n\n///\n\n/// Uses the algorithm outlined on\n\n/// https://facebook.github.io/graphql/draft/#IsInputType().\n\npub fn is_input_type(schema: &s::Document, t: &s::Type) -> bool {\n\n match t {\n\n s::Type::NamedType(name) => {\n\n let named_type = schema.get_named_type(name);\n\n named_type.map_or(false, |type_def| match type_def {\n\n s::TypeDefinition::Scalar(_)\n\n | s::TypeDefinition::Enum(_)\n\n | s::TypeDefinition::InputObject(_) => true,\n\n _ => false,\n\n })\n\n }\n\n s::Type::ListType(inner) => is_input_type(schema, inner),\n\n s::Type::NonNullType(inner) => is_input_type(schema, inner),\n\n }\n\n}\n\n\n", "file_path": "graphql/src/schema/ast.rs", "rank": 22, "score": 269484.0824551583 }, { "content": "pub fn is_entity_type(schema: &s::Document, t: &s::Type) -> bool {\n\n match t {\n\n s::Type::NamedType(name) => schema\n\n .get_named_type(name)\n\n .map_or(false, is_entity_type_definition),\n\n s::Type::ListType(inner_type) => is_entity_type(schema, inner_type),\n\n s::Type::NonNullType(inner_type) => is_entity_type(schema, inner_type),\n\n }\n\n}\n\n\n", "file_path": "graphql/src/schema/ast.rs", "rank": 23, "score": 269467.4194189061 }, { "content": "fn make_band(id: &'static str, data: Vec<(&str, Value)>) -> (EntityKey, Entity) {\n\n (\n\n EntityKey::data(SUBGRAPH_ID.clone(), \"Band\".to_string(), id.into()),\n\n Entity::from(data),\n\n )\n\n}\n\n\n", "file_path": "graph/tests/entity_cache.rs", "rank": 24, "score": 268619.97430250305 }, { "content": "#[track_caller]\n\nfn filter_contains(filter: EntityFilter, sql: &str) {\n\n const SCHEMA: &str = \"\n\n type Thing @entity { \n\n id: Bytes!,\n\n address: Bytes!,\n\n name: String\n\n }\";\n\n let layout = test_layout(SCHEMA);\n\n let table = layout\n\n .table_for_entity(&EntityType::new(\"Thing\".to_string()))\n\n .unwrap();\n\n let filter = QueryFilter::new(&filter, table.as_ref()).unwrap();\n\n let query = debug_query::<Pg, _>(&filter);\n\n assert!(\n\n query.to_string().contains(sql),\n\n \"Expected query /{}/ to contain /{}/\",\n\n query.to_string(),\n\n sql\n\n );\n\n}\n\n\n", "file_path": "store/postgres/src/relational/query_tests.rs", "rank": 25, "score": 267198.25737703266 }, { "content": "pub trait TriggerFilter<C: Blockchain>: Default + Clone + Send + Sync {\n\n fn from_data_sources<'a>(\n\n data_sources: impl Iterator<Item = &'a C::DataSource> + Clone,\n\n ) -> Self {\n\n let mut this = Self::default();\n\n this.extend(data_sources);\n\n this\n\n }\n\n\n\n fn extend_with_template(&mut self, data_source: impl Iterator<Item = C::DataSourceTemplate>);\n\n\n\n fn extend<'a>(&mut self, data_sources: impl Iterator<Item = &'a C::DataSource> + Clone);\n\n\n\n fn node_capabilities(&self) -> C::NodeCapabilities;\n\n\n\n fn to_firehose_filter(self) -> Vec<prost_types::Any>;\n\n}\n\n\n", "file_path": "graph/src/blockchain/mod.rs", "rank": 26, "score": 266767.4293055069 }, { "content": "fn deployment_labels(subgraph: &str) -> HashMap<String, String> {\n\n labels! { String::from(\"deployment\") => String::from(subgraph), }\n\n}\n\n\n", "file_path": "graph/src/components/metrics/mod.rs", "rank": 27, "score": 266062.83900051354 }, { "content": "pub fn postgres_test_database_name(unique_id: &u16) -> String {\n\n format!(\"test_database_{}\", unique_id)\n\n}\n", "file_path": "tests/tests/common/helpers.rs", "rank": 28, "score": 265979.92835674156 }, { "content": "/// An Asc primitive or an `AscPtr` into the Asc heap. A type marked as\n\n/// `AscValue` must have the same byte representation in Rust and Asc, including\n\n/// same size, and size must be equal to alignment.\n\npub trait AscValue: AscType + Copy + Default {}\n\n\n\nimpl AscType for bool {\n\n fn to_asc_bytes(&self) -> Result<Vec<u8>, DeterministicHostError> {\n\n Ok(vec![*self as u8])\n\n }\n\n\n\n fn from_asc_bytes(\n\n asc_obj: &[u8],\n\n _api_version: &Version,\n\n ) -> Result<Self, DeterministicHostError> {\n\n if asc_obj.len() != 1 {\n\n Err(DeterministicHostError::from(anyhow::anyhow!(\n\n \"Incorrect size for bool. Expected 1, got {},\",\n\n asc_obj.len()\n\n )))\n\n } else {\n\n Ok(asc_obj[0] != 0)\n\n }\n\n }\n", "file_path": "graph/src/runtime/mod.rs", "rank": 29, "score": 265601.1555058439 }, { "content": "pub fn insert_ens_name(hash: &str, name: &str) {\n\n use diesel::insert_into;\n\n use diesel::prelude::*;\n\n use graph_store_postgres::command_support::catalog::ens_names;\n\n\n\n let conn = PRIMARY_POOL.get().unwrap();\n\n\n\n insert_into(ens_names::table)\n\n .values((ens_names::hash.eq(hash), ens_names::name.eq(name)))\n\n .on_conflict_do_nothing()\n\n .execute(&conn)\n\n .unwrap();\n\n}\n\n\n\n/// Insert the given entities and wait until all writes have been processed.\n\n/// The inserts all happen at `GENESIS_PTR`, i.e., block 0\n\npub async fn insert_entities(\n\n deployment: &DeploymentLocator,\n\n entities: Vec<(EntityType, Entity)>,\n\n) -> Result<(), StoreError> {\n", "file_path": "store/test-store/src/store.rs", "rank": 30, "score": 264830.48142478673 }, { "content": "/// Generates `Query` fields for the given type name (e.g. `users` and `user`).\n\nfn query_fields_for_type(type_name: &str) -> Vec<Field> {\n\n let mut collection_arguments = collection_arguments_for_named_type(type_name);\n\n collection_arguments.push(block_argument());\n\n\n\n let mut by_id_arguments = vec![\n\n InputValue {\n\n position: Pos::default(),\n\n description: None,\n\n name: \"id\".to_string(),\n\n value_type: Type::NonNullType(Box::new(Type::NamedType(\"ID\".to_string()))),\n\n default_value: None,\n\n directives: vec![],\n\n },\n\n block_argument(),\n\n ];\n\n\n\n collection_arguments.push(subgraph_error_argument());\n\n by_id_arguments.push(subgraph_error_argument());\n\n\n\n vec![\n", "file_path": "graphql/src/schema/api.rs", "rank": 31, "score": 264338.9272346363 }, { "content": "/// Returns the name of a type.\n\npub fn get_type_name(t: &s::TypeDefinition) -> &str {\n\n match t {\n\n s::TypeDefinition::Enum(t) => &t.name,\n\n s::TypeDefinition::InputObject(t) => &t.name,\n\n s::TypeDefinition::Interface(t) => &t.name,\n\n s::TypeDefinition::Object(t) => &t.name,\n\n s::TypeDefinition::Scalar(t) => &t.name,\n\n s::TypeDefinition::Union(t) => &t.name,\n\n }\n\n}\n\n\n", "file_path": "graphql/src/schema/ast.rs", "rank": 32, "score": 261838.29799551045 }, { "content": "pub fn is_entity_type_definition(type_def: &s::TypeDefinition) -> bool {\n\n match type_def {\n\n // Entity types are obvious\n\n s::TypeDefinition::Object(object_type) => {\n\n get_object_type_directive(object_type, String::from(\"entity\")).is_some()\n\n }\n\n\n\n // For now, we'll assume that only entities can implement interfaces;\n\n // thus, any interface type definition is automatically an entity type\n\n s::TypeDefinition::Interface(_) => true,\n\n\n\n // Everything else (unions, scalars, enums) are not considered entity\n\n // types for now\n\n _ => false,\n\n }\n\n}\n\n\n", "file_path": "graphql/src/schema/ast.rs", "rank": 33, "score": 260646.5361006953 }, { "content": "/// Returns the value type for a GraphQL field type.\n\npub fn get_field_value_type(field_type: &s::Type) -> Result<ValueType, Error> {\n\n match field_type {\n\n s::Type::NamedType(ref name) => ValueType::from_str(&name),\n\n s::Type::NonNullType(inner) => get_field_value_type(&inner),\n\n s::Type::ListType(_) => Err(anyhow!(\"Only scalar values are supported in this context\")),\n\n }\n\n}\n\n\n", "file_path": "graphql/src/schema/ast.rs", "rank": 34, "score": 258162.38534345722 }, { "content": "pub fn make_ipfs_uri(ipfs_ports: &MappedPorts) -> String {\n\n let port = ipfs_ports\n\n .0\n\n .get(&IPFS_DEFAULT_PORT)\n\n .expect(\"failed to fetch IPFS port from mapped ports\");\n\n format!(\"http://{host}:{port}\", host = \"localhost\", port = port)\n\n}\n\n\n", "file_path": "tests/tests/common/helpers.rs", "rank": 35, "score": 258101.4364648316 }, { "content": "fn primary_store() -> Vec<String> {\n\n vec![PRIMARY_SHARD.to_string()]\n\n}\n\n\n", "file_path": "node/src/config.rs", "rank": 36, "score": 247730.68132810947 }, { "content": "fn wasm_file_path(wasm_file: &str, api_version: Version) -> String {\n\n format!(\n\n \"wasm_test/api_version_{}_{}_{}/{}\",\n\n api_version.major, api_version.minor, api_version.patch, wasm_file\n\n )\n\n}\n\n\n", "file_path": "runtime/test/src/test.rs", "rank": 37, "score": 247548.2253625014 }, { "content": "// Build a Ganache connection string. Returns the port number and the URI.\n\npub fn make_ganache_uri(ganache_ports: &MappedPorts) -> (u16, String) {\n\n let port = ganache_ports\n\n .0\n\n .get(&GANACHE_DEFAULT_PORT)\n\n .expect(\"failed to fetch Ganache port from mapped ports\");\n\n let uri = format!(\"test:http://{host}:{port}\", host = \"localhost\", port = port);\n\n (*port, uri)\n\n}\n\n\n", "file_path": "tests/tests/common/helpers.rs", "rank": 38, "score": 247110.72870613157 }, { "content": "/// Returns the interface type with the given name.\n\npub fn get_interface_type_mut<'a>(\n\n schema: &'a mut s::Document,\n\n name: &str,\n\n) -> Option<&'a mut s::InterfaceType> {\n\n use graphql_parser::schema::TypeDefinition::*;\n\n\n\n get_named_type_definition_mut(schema, name).and_then(|type_def| match type_def {\n\n Interface(interface_type) => Some(interface_type),\n\n _ => None,\n\n })\n\n}\n\n\n", "file_path": "graphql/src/schema/ast.rs", "rank": 39, "score": 245438.10317455826 }, { "content": "/// Returns the object type with the given name.\n\npub fn get_object_type_mut<'a>(\n\n schema: &'a mut s::Document,\n\n name: &str,\n\n) -> Option<&'a mut s::ObjectType> {\n\n use graphql_parser::schema::TypeDefinition::*;\n\n\n\n get_named_type_definition_mut(schema, name).and_then(|type_def| match type_def {\n\n Object(object_type) => Some(object_type),\n\n _ => None,\n\n })\n\n}\n\n\n", "file_path": "graphql/src/schema/ast.rs", "rank": 40, "score": 245438.10317455826 }, { "content": "fn subgraph_id_with_api_version(subgraph_id: &str, api_version: Version) -> String {\n\n format!(\n\n \"{}_{}_{}_{}\",\n\n subgraph_id, api_version.major, api_version.minor, api_version.patch\n\n )\n\n}\n\n\n\nasync fn test_valid_module_and_store(\n\n subgraph_id: &str,\n\n data_source: DataSource,\n\n api_version: Version,\n\n) -> (\n\n WasmInstance<Chain>,\n\n Arc<impl SubgraphStore>,\n\n DeploymentLocator,\n\n) {\n\n test_valid_module_and_store_with_timeout(subgraph_id, data_source, api_version, None).await\n\n}\n\n\n\nasync fn test_valid_module_and_store_with_timeout(\n", "file_path": "runtime/test/src/test.rs", "rank": 41, "score": 243493.29571841017 }, { "content": "pub fn current_servers(conn: &PgConnection) -> Result<Vec<String>, StoreError> {\n\n #[derive(QueryableByName)]\n\n struct Srv {\n\n #[sql_type = \"Text\"]\n\n srvname: String,\n\n }\n\n Ok(sql_query(\"select srvname from pg_foreign_server\")\n\n .get_results::<Srv>(conn)?\n\n .into_iter()\n\n .map(|srv| srv.srvname)\n\n .collect())\n\n}\n\n\n", "file_path": "store/postgres/src/catalog.rs", "rank": 42, "score": 242811.5954150237 }, { "content": "/// Returns all type definitions in the schema.\n\npub fn get_type_definitions(schema: &s::Document) -> Vec<&s::TypeDefinition> {\n\n schema\n\n .definitions\n\n .iter()\n\n .filter_map(|d| match d {\n\n s::Definition::TypeDefinition(typedef) => Some(typedef),\n\n _ => None,\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "graphql/src/schema/ast.rs", "rank": 43, "score": 241768.37867276632 }, { "content": "// We call our test strings aN so that\n\n// aN = \"a\" * (STRING_PREFIX_SIZE - 2 + N)\n\n// chosen so that they straddle the boundary between strings that fit into\n\n// the index, and strings that have only a prefix in the index\n\n// Return (a1, a2, a2b, a3)\n\n// Note that that is the order for these ids, though the\n\n// underlying strings are in the order a1 < a2 < a3 < a2b\n\nfn ferrets() -> (String, String, String, String) {\n\n (\n\n \"a\".repeat(STRING_PREFIX_SIZE - 1),\n\n \"a\".repeat(STRING_PREFIX_SIZE),\n\n format!(\"{}b\", \"a\".repeat(STRING_PREFIX_SIZE)),\n\n \"a\".repeat(STRING_PREFIX_SIZE + 1),\n\n )\n\n}\n\n\n", "file_path": "store/postgres/tests/relational.rs", "rank": 44, "score": 241649.40910615132 }, { "content": "/// We pass the root node of the result around as a vec of nodes, not as\n\n/// a single node so that we can use the same functions on interior node\n\n/// lists which are the result of querying the database. The root list\n\n/// consists of exactly one entry, and that entry has an empty\n\n/// (not even a `__typename`) entity.\n\n///\n\n/// That distinguishes it from both the result of a query that matches\n\n/// nothing (an empty `Vec`), and a result that finds just one entity\n\n/// (the entity is not completely empty)\n\nfn is_root_node<'a>(mut nodes: impl Iterator<Item = &'a Node>) -> bool {\n\n if let Some(node) = nodes.next() {\n\n node.entity.is_empty()\n\n } else {\n\n false\n\n }\n\n}\n\n\n", "file_path": "graphql/src/store/prefetch.rs", "rank": 45, "score": 240646.7351338274 }, { "content": "/// Convert Postgres string representation of bytes \"\\xdeadbeef\"\n\n/// to ours of just \"deadbeef\".\n\nfn bytes_as_str(id: &str) -> String {\n\n id.trim_start_matches(\"\\\\x\").to_owned()\n\n}\n\n\n\nimpl IdType {\n\n /// Add `ids` as a bind variable to `out`, using the right SQL type\n\n fn bind_ids<S>(&self, ids: &[S], out: &mut AstPass<Pg>) -> QueryResult<()>\n\n where\n\n S: AsRef<str> + diesel::serialize::ToSql<Text, Pg>,\n\n {\n\n match self {\n\n IdType::String => out.push_bind_param::<Array<Text>, _>(&ids)?,\n\n IdType::Bytes => {\n\n let ids = ids\n\n .iter()\n\n .map(|id| str_as_bytes(id.as_ref()))\n\n .collect::<Result<Vec<scalar::Bytes>, _>>()?;\n\n let id_slices = ids.iter().map(|id| id.as_slice()).collect::<Vec<_>>();\n\n out.push_bind_param::<Array<Binary>, _>(&id_slices)?;\n\n }\n\n }\n\n // Generate '::text[]' or '::bytea[]'\n\n out.push_sql(\"::\");\n\n out.push_sql(self.sql_type());\n\n out.push_sql(\"[]\");\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "store/postgres/src/relational_queries.rs", "rank": 46, "score": 239880.41172640224 }, { "content": "/// Return the enclosed named type for a field type, i.e., the type after\n\n/// stripping List and NonNull.\n\nfn named_type(field_type: &q::Type) -> &str {\n\n match field_type {\n\n q::Type::NamedType(name) => name.as_str(),\n\n q::Type::ListType(child) => named_type(child),\n\n q::Type::NonNullType(child) => named_type(child),\n\n }\n\n}\n\n\n", "file_path": "store/postgres/src/relational.rs", "rank": 47, "score": 239165.54209380882 }, { "content": "/// Hashes a string to a H256 hash.\n\nfn string_to_h256(s: &str) -> H256 {\n\n let mut result = [0u8; 32];\n\n let data = s.replace(\" \", \"\").into_bytes();\n\n let mut sponge = Keccak::new_keccak256();\n\n sponge.update(&data);\n\n sponge.finalize(&mut result);\n\n\n\n // This was deprecated but the replacement seems to not be available in the\n\n // version web3 uses.\n\n #[allow(deprecated)]\n\n H256::from_slice(&result)\n\n}\n\n\n\n#[derive(Clone, Debug, Default, Hash, Eq, PartialEq, Deserialize)]\n\npub struct TemplateSource {\n\n pub abi: String,\n\n}\n", "file_path": "chain/ethereum/src/data_source.rs", "rank": 48, "score": 238924.76622607282 }, { "content": "/// Replace the host portion of `url` and return a new URL with `host`\n\n/// as the host portion\n\n///\n\n/// Panics if `url` is not a valid URL (which won't happen in our case since\n\n/// we would have paniced before getting here as `url` is the connection for\n\n/// the primary Postgres instance)\n\nfn replace_host(url: &str, host: &str) -> String {\n\n let mut url = match Url::parse(url) {\n\n Ok(url) => url,\n\n Err(_) => panic!(\"Invalid Postgres URL {}\", url),\n\n };\n\n if let Err(e) = url.set_host(Some(host)) {\n\n panic!(\"Invalid Postgres url {}: {}\", url, e.to_string());\n\n }\n\n String::from(url)\n\n}\n\n\n", "file_path": "node/src/config.rs", "rank": 49, "score": 238227.66616982702 }, { "content": "pub fn usage(msg: &str) -> ! {\n\n println!(\"{}\", msg);\n\n println!(\"usage: schema schema.graphql\");\n\n println!(\"\\nPrint the API schema we derive from the given input schema\");\n\n std::process::exit(1);\n\n}\n\n\n", "file_path": "graphql/examples/schema.rs", "rank": 50, "score": 237609.38501206588 }, { "content": "fn insert_entity(conn: &PgConnection, layout: &Layout, entity_type: &str, entities: Vec<Entity>) {\n\n insert_entity_at(conn, layout, entity_type, entities, 0);\n\n}\n\n\n", "file_path": "store/postgres/tests/relational.rs", "rank": 51, "score": 236651.60410096764 }, { "content": "pub fn generate_entity_id() -> String {\n\n // Fast crypto RNG from operating system\n\n let mut rng = OsRng::default();\n\n\n\n // 128 random bits\n\n let id_bytes: [u8; 16] = rng.gen();\n\n\n\n // 32 hex chars\n\n // Comparable to uuidv4, but without the hyphens,\n\n // and without spending bits on a version identifier.\n\n hex::encode(id_bytes)\n\n}\n", "file_path": "graph/src/data/subgraph/schema.rs", "rank": 52, "score": 233861.0034837126 }, { "content": "// Build a postgres connection string\n\npub fn make_postgres_uri(unique_id: &u16, postgres_ports: &MappedPorts) -> String {\n\n let port = postgres_ports\n\n .0\n\n .get(&POSTGRESQL_DEFAULT_PORT)\n\n .expect(\"failed to fetch Postgres port from mapped ports\");\n\n format!(\n\n \"postgresql://{user}:{password}@{host}:{port}/{database_name}\",\n\n user = \"postgres\",\n\n password = \"password\",\n\n host = \"localhost\",\n\n port = port,\n\n database_name = postgres_test_database_name(unique_id),\n\n )\n\n}\n\n\n", "file_path": "tests/tests/common/helpers.rs", "rank": 53, "score": 233408.09566875634 }, { "content": "/// Evaluates if a given transaction was successful.\n\n///\n\n/// Returns `true` on success and `false` on failure.\n\n/// If a receipt does not have a status value (EIP-658), assume the transaction was successful.\n\npub fn evaluate_transaction_status(receipt_status: Option<U64>) -> bool {\n\n receipt_status\n\n .map(|status| !status.is_zero())\n\n .unwrap_or(true)\n\n}\n\n\n\n#[derive(Clone, Debug, Default, Deserialize, Serialize, PartialEq)]\n\npub struct EthereumBlock {\n\n pub block: Arc<LightEthereumBlock>,\n\n pub transaction_receipts: Vec<Arc<TransactionReceipt>>,\n\n}\n\n\n\n#[derive(Debug, Default, Clone, PartialEq)]\n\npub struct EthereumCall {\n\n pub from: Address,\n\n pub to: Address,\n\n pub value: U256,\n\n pub gas_used: U256,\n\n pub input: Bytes,\n\n pub output: Bytes,\n", "file_path": "graph/src/components/ethereum/types.rs", "rank": 54, "score": 233187.26302970407 }, { "content": "/// Returns `true` if the deployment `id` exists and is synced\n\npub fn exists_and_synced(conn: &PgConnection, id: &str) -> Result<bool, StoreError> {\n\n use subgraph_deployment as d;\n\n\n\n let synced = d::table\n\n .filter(d::deployment.eq(id))\n\n .select(d::synced)\n\n .first(conn)\n\n .optional()?\n\n .unwrap_or(false);\n\n Ok(synced)\n\n}\n\n\n", "file_path": "store/postgres/src/deployment.rs", "rank": 55, "score": 233040.11001468913 }, { "content": "pub fn usage(msg: &str) -> ! {\n\n println!(\"layout: {}\", msg);\n\n println!(\"Try 'layout --help' for more information.\");\n\n std::process::exit(1);\n\n}\n\n\n", "file_path": "store/postgres/examples/layout.rs", "rank": 56, "score": 232909.3010368385 }, { "content": "/// Test harness for running database integration tests.\n\nfn run_test<F>(chain: FakeBlockList, test: F)\n\nwhere\n\n F: Fn(Arc<DieselChainStore>, Arc<DieselStore>) -> Result<(), Error> + Send + 'static,\n\n{\n\n run_test_sequentially(|store| async move {\n\n for name in vec![NETWORK_NAME, FAKE_NETWORK_SHARED] {\n\n block_store::set_chain(chain.clone(), name);\n\n\n\n let chain_store = store.block_store().chain_store(name).expect(\"chain store\");\n\n\n\n // Run test\n\n test(chain_store.cheap_clone(), store.cheap_clone())\n\n .expect(&format!(\"test finishes successfully on network {}\", name));\n\n }\n\n });\n\n}\n\n\n", "file_path": "store/postgres/tests/chain_head.rs", "rank": 57, "score": 232075.8025277976 }, { "content": "fn type_object(schema: &Schema, type_objects: &mut TypeObjectsMap, t: &s::Type) -> r::Value {\n\n match t {\n\n // We store the name of the named type here to be able to resolve it dynamically later\n\n s::Type::NamedType(s) => r::Value::String(s.to_owned()),\n\n s::Type::ListType(ref inner) => list_type_object(schema, type_objects, inner),\n\n s::Type::NonNullType(ref inner) => non_null_type_object(schema, type_objects, inner),\n\n }\n\n}\n\n\n", "file_path": "graphql/src/introspection/resolver.rs", "rank": 58, "score": 231623.80015558374 }, { "content": "/// Run a test with a connection into the primary database, not a full store\n\npub fn run_test_with_conn<F>(test: F)\n\nwhere\n\n F: FnOnce(&PgConnection) -> (),\n\n{\n\n // Lock regardless of poisoning. This also forces sequential test execution.\n\n let _lock = match SEQ_LOCK.lock() {\n\n Ok(guard) => guard,\n\n Err(err) => err.into_inner(),\n\n };\n\n\n\n let conn = PRIMARY_POOL\n\n .get()\n\n .expect(\"failed to get connection for primary database\");\n\n\n\n test(&conn);\n\n}\n\n\n", "file_path": "store/test-store/src/store.rs", "rank": 59, "score": 230485.31043273475 }, { "content": "#[test]\n\nfn can_use_nested_filter() {\n\n const QUERY: &str = \"\n\n query {\n\n musicians(orderBy: id) {\n\n name\n\n bands(where: { originalSongs: [@S1@, @S3@, @S4@] }) { id }\n\n }\n\n }\n\n \";\n\n\n\n run_query(QUERY, |result, _| {\n\n let exp = object! {\n\n musicians: vec![\n\n object! {\n\n name: \"John\",\n\n bands: vec![ object! { id: \"b2\" }]\n\n },\n\n object! {\n\n name: \"Lisa\",\n\n bands: Vec::<r::Value>::new(),\n", "file_path": "graphql/tests/query.rs", "rank": 60, "score": 229364.29924378893 }, { "content": "/// Helper function to redact passwords from URLs\n\nfn display_url(url: &str) -> String {\n\n let mut url = match Url::parse(url) {\n\n Ok(url) => url,\n\n Err(_) => return String::from(url),\n\n };\n\n\n\n if url.password().is_some() {\n\n url.set_password(Some(\"HIDDEN_PASSWORD\"))\n\n .expect(\"failed to redact password\");\n\n }\n\n\n\n String::from(url)\n\n}\n\n\n\npub struct SafeDisplay<T>(pub T);\n\n\n\nimpl<T: fmt::Display> fmt::Display for SafeDisplay<T> {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n // First, format the inner value\n\n let inner = format!(\"{}\", self.0);\n", "file_path": "graph/src/util/security.rs", "rank": 61, "score": 229039.9944175976 }, { "content": "pub fn contains_subslice<T: PartialEq>(data: &[T], needle: &[T]) -> bool {\n\n data.windows(needle.len()).any(|w| w == needle)\n\n}\n\n\n", "file_path": "tests/tests/common/helpers.rs", "rank": 62, "score": 227867.9502661193 }, { "content": "fn combined_origins_err(event_type: &str) -> Error {\n\n anyhow!(\n\n \"data source has combined origin and no-origin {} event handlers\",\n\n event_type\n\n )\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n use graph::blockchain::DataSource as _;\n\n\n\n #[test]\n\n fn test_event_handlers_origin_validation() {\n\n let cases = [\n\n (\n\n DataSource::with_event_handlers(vec![\n\n MappingEventHandler::with_origin(\"event_1\", None),\n\n MappingEventHandler::with_origin(\"event_2\", None),\n", "file_path": "chain/cosmos/src/data_source.rs", "rank": 63, "score": 223307.4206963797 }, { "content": "fn make_entity_change(entity_type: &str) -> EntityChange {\n\n EntityChange::Data {\n\n subgraph_id: TEST_SUBGRAPH_ID.clone(),\n\n entity_type: EntityType::new(entity_type.to_owned()),\n\n }\n\n}\n\n\n\n// Get as events until we've seen all the expected events or we time out waiting\n\nasync fn check_events(\n\n stream: StoreEventStream<impl Stream<Item = Arc<StoreEvent>, Error = ()> + Send>,\n\n expected: Vec<StoreEvent>,\n\n) {\n\n fn as_set(events: Vec<Arc<StoreEvent>>) -> HashSet<EntityChange> {\n\n events.into_iter().fold(HashSet::new(), |mut set, event| {\n\n set.extend(event.changes.iter().map(|change| change.clone()));\n\n set\n\n })\n\n }\n\n\n\n let expected = Mutex::new(as_set(\n", "file_path": "store/postgres/tests/store.rs", "rank": 64, "score": 223106.99074750362 }, { "content": "fn formatted_timestamp_local(io: &mut impl io::Write) -> io::Result<()> {\n\n write!(\n\n io,\n\n \"{}\",\n\n chrono::Local::now().format(ENV_VARS.log_time_format.as_str())\n\n )\n\n}\n", "file_path": "graph/src/log/mod.rs", "rank": 65, "score": 222829.74549519946 }, { "content": "pub fn mock_data_source(path: &str, api_version: Version) -> DataSource {\n\n let runtime = std::fs::read(path).unwrap();\n\n\n\n DataSource {\n\n kind: String::from(\"ethereum/contract\"),\n\n name: String::from(\"example data source\"),\n\n network: Some(String::from(\"mainnet\")),\n\n address: Some(Address::from_str(\"0123123123012312312301231231230123123123\").unwrap()),\n\n start_block: 0,\n\n mapping: Mapping {\n\n kind: String::from(\"ethereum/events\"),\n\n api_version,\n\n language: String::from(\"wasm/assemblyscript\"),\n\n entities: vec![],\n\n abis: vec![],\n\n event_handlers: vec![],\n\n call_handlers: vec![],\n\n block_handlers: vec![],\n\n link: Link {\n\n link: \"link\".to_owned(),\n\n },\n\n runtime: Arc::new(runtime.clone()),\n\n },\n\n context: Default::default(),\n\n creation_block: None,\n\n contract_abi: Arc::new(mock_abi()),\n\n }\n\n}\n", "file_path": "runtime/test/src/common.rs", "rank": 66, "score": 222446.52405184245 }, { "content": "/// Create an error for a missing field in a type.\n\nfn missing_field_error(type_name: &str, field_name: &str) -> DeterministicHostError {\n\n DeterministicHostError::from(anyhow!(\"{} missing {}\", type_name, field_name))\n\n}\n\n\n", "file_path": "chain/cosmos/src/runtime/abi.rs", "rank": 67, "score": 221393.20063137758 }, { "content": "fn run_test_async<R, F>(chain: FakeBlockList, test: F)\n\nwhere\n\n F: Fn(Arc<DieselChainStore>, Arc<DieselStore>) -> R + Send + Sync + 'static,\n\n R: Future<Output = ()> + Send + 'static,\n\n{\n\n run_test_sequentially(|store| async move {\n\n for name in vec![NETWORK_NAME, FAKE_NETWORK_SHARED] {\n\n block_store::set_chain(chain.clone(), name);\n\n\n\n let chain_store = store.block_store().chain_store(name).expect(\"chain store\");\n\n\n\n // Run test\n\n test(chain_store.cheap_clone(), store.clone()).await;\n\n }\n\n });\n\n}\n\n\n", "file_path": "store/postgres/tests/chain_head.rs", "rank": 68, "score": 220567.84908103888 }, { "content": "/// Run the `test` after performing `setup`. The result of `setup` is passed\n\n/// into `test`. All tests using `run_test_sequentially` are run in sequence,\n\n/// never in parallel. The `test` is passed a `Store`, but it is permissible\n\n/// for tests to access the global `STORE` from this module, too.\n\npub fn run_test_sequentially<R, F>(test: F)\n\nwhere\n\n F: FnOnce(Arc<Store>) -> R + Send + 'static,\n\n R: std::future::Future<Output = ()> + Send + 'static,\n\n{\n\n // Lock regardless of poisoning. This also forces sequential test execution.\n\n let _lock = match SEQ_LOCK.lock() {\n\n Ok(guard) => guard,\n\n Err(err) => err.into_inner(),\n\n };\n\n\n\n STORE_RUNTIME.handle().block_on(async {\n\n let store = STORE.clone();\n\n test(store).await\n\n })\n\n}\n\n\n", "file_path": "store/test-store/src/store.rs", "rank": 69, "score": 220529.6624401696 }, { "content": "/// Look up the debug fork for the given subgraph in the database and\n\n/// return it. Returns `None` if the deployment does not have\n\n/// a debug fork.\n\npub fn debug_fork(\n\n conn: &PgConnection,\n\n id: &DeploymentHash,\n\n) -> Result<Option<DeploymentHash>, StoreError> {\n\n use subgraph_deployment as sd;\n\n\n\n let debug_fork: Option<String> = sd::table\n\n .select(sd::debug_fork)\n\n .filter(sd::deployment.eq(id.as_str()))\n\n .first(conn)?;\n\n\n\n match debug_fork {\n\n Some(fork) => Ok(Some(DeploymentHash::new(fork.clone()).map_err(|_| {\n\n StoreError::Unknown(anyhow!(\n\n \"the debug fork for a subgraph must be a valid subgraph id but is `{}`\",\n\n fork\n\n ))\n\n })?)),\n\n None => Ok(None),\n\n }\n\n}\n\n\n", "file_path": "store/postgres/src/deployment.rs", "rank": 70, "score": 218518.69391432387 }, { "content": "pub fn load(\n\n conn: &PgConnection,\n\n site: &Site,\n\n block: BlockNumber,\n\n) -> Result<Vec<StoredDynamicDataSource>, StoreError> {\n\n match site.schema_version.private_data_sources() {\n\n true => DataSourcesTable::new(site.namespace.clone()).load(conn, block),\n\n false => shared::load(conn, site.deployment.as_str(), block),\n\n }\n\n}\n\n\n\npub(crate) fn insert(\n\n conn: &PgConnection,\n\n site: &Site,\n\n data_sources: &[StoredDynamicDataSource],\n\n block_ptr: &BlockPtr,\n\n) -> Result<usize, StoreError> {\n\n match site.schema_version.private_data_sources() {\n\n true => DataSourcesTable::new(site.namespace.clone()).insert(\n\n conn,\n", "file_path": "store/postgres/src/dynds/mod.rs", "rank": 71, "score": 218176.46398243052 }, { "content": "pub fn execute_subscription(\n\n subscription: Subscription,\n\n schema: Arc<ApiSchema>,\n\n options: SubscriptionExecutionOptions,\n\n) -> Result<SubscriptionResult, SubscriptionError> {\n\n let query = crate::execution::Query::new(\n\n &options.logger,\n\n schema,\n\n None,\n\n subscription.query,\n\n options.max_complexity,\n\n options.max_depth,\n\n )?;\n\n execute_prepared_subscription(query, options)\n\n}\n\n\n\npub(crate) fn execute_prepared_subscription(\n\n query: Arc<crate::execution::Query>,\n\n options: SubscriptionExecutionOptions,\n\n) -> Result<SubscriptionResult, SubscriptionError> {\n", "file_path": "graphql/src/subscription/mod.rs", "rank": 72, "score": 218176.46398243052 }, { "content": "pub fn mock_context(\n\n deployment: DeploymentLocator,\n\n data_source: DataSource,\n\n store: Arc<impl SubgraphStore>,\n\n api_version: Version,\n\n) -> MappingContext<Chain> {\n\n MappingContext {\n\n logger: Logger::root(slog::Discard, o!()),\n\n block_ptr: BlockPtr {\n\n hash: Default::default(),\n\n number: 0,\n\n },\n\n host_exports: Arc::new(mock_host_exports(\n\n deployment.hash.clone(),\n\n data_source,\n\n store.clone(),\n\n api_version,\n\n )),\n\n state: BlockState::new(\n\n futures03::executor::block_on(store.writable(LOGGER.clone(), deployment.id)).unwrap(),\n\n Default::default(),\n\n ),\n\n proof_of_indexing: None,\n\n host_fns: Arc::new(Vec::new()),\n\n debug_fork: None,\n\n }\n\n}\n\n\n", "file_path": "runtime/test/src/common.rs", "rank": 73, "score": 217233.01625198376 }, { "content": "pub fn truncate(chain_store: Arc<ChainStore>, skip_confirmation: bool) -> anyhow::Result<()> {\n\n if !skip_confirmation && !helpers::prompt_for_confirmation()? {\n\n println!(\"Aborting.\");\n\n return Ok(());\n\n }\n\n\n\n chain_store\n\n .truncate_block_cache()\n\n .with_context(|| format!(\"Failed to truncate block cache for {}\", chain_store.chain))\n\n}\n\n\n\nasync fn run(\n\n block_hash: &H256,\n\n chain_store: &ChainStore,\n\n ethereum_adapter: &EthereumAdapter,\n\n logger: &Logger,\n\n) -> anyhow::Result<()> {\n\n let cached_block = steps::fetch_single_cached_block(*block_hash, &chain_store)?;\n\n let provider_block =\n\n steps::fetch_single_provider_block(&block_hash, ethereum_adapter, logger).await?;\n", "file_path": "node/src/manager/commands/check_blocks.rs", "rank": 74, "score": 217094.39293762067 }, { "content": "fn is_object_type(field_type: &q::Type, enums: &EnumMap) -> bool {\n\n let name = named_type(field_type);\n\n\n\n !enums.contains_key(&*name) && !ValueType::is_scalar(name)\n\n}\n\n\n", "file_path": "store/postgres/src/relational.rs", "rank": 75, "score": 216397.68408009005 }, { "content": "/// Returns the type definition for a type.\n\npub fn get_type_definition_from_type<'a>(\n\n schema: &'a s::Document,\n\n t: &s::Type,\n\n) -> Option<&'a s::TypeDefinition> {\n\n match t {\n\n s::Type::NamedType(name) => schema.get_named_type(name),\n\n s::Type::ListType(inner) => get_type_definition_from_type(schema, inner),\n\n s::Type::NonNullType(inner) => get_type_definition_from_type(schema, inner),\n\n }\n\n}\n\n\n", "file_path": "graphql/src/schema/ast.rs", "rank": 76, "score": 216316.5476900388 }, { "content": "/// Create an unregistered counter with labels\n\npub fn counter_with_labels(\n\n name: &str,\n\n help: &str,\n\n const_labels: HashMap<String, String>,\n\n) -> Result<Counter, PrometheusError> {\n\n let opts = Opts::new(name, help).const_labels(const_labels);\n\n Counter::with_opts(opts)\n\n}\n\n\n", "file_path": "graph/src/components/metrics/mod.rs", "rank": 77, "score": 213485.98835899582 }, { "content": "/// Create an unregistered gauge with labels\n\npub fn gauge_with_labels(\n\n name: &str,\n\n help: &str,\n\n const_labels: HashMap<String, String>,\n\n) -> Result<Gauge, PrometheusError> {\n\n let opts = Opts::new(name, help).const_labels(const_labels);\n\n Gauge::with_opts(opts)\n\n}\n\n\n", "file_path": "graph/src/components/metrics/mod.rs", "rank": 78, "score": 213485.98835899582 }, { "content": "pub fn remove_subgraphs() {\n\n SUBGRAPH_STORE\n\n .delete_all_entities_for_test_use_only()\n\n .expect(\"deleting test entities succeeds\");\n\n}\n\n\n", "file_path": "store/test-store/src/store.rs", "rank": 79, "score": 212574.18627212342 }, { "content": "fn check_musicians_at(query0: &str, block_var: r::Value, expected: Expected, qid: &'static str) {\n\n run_query((query0, block_var), move |result, id_type| {\n\n match &expected {\n\n Ok(ids) => {\n\n let ids: Vec<_> = ids.into_iter().map(|id| object! { id: *id }).collect();\n\n let expected = Some(object_value(vec![(\"musicians\", r::Value::List(ids))]));\n\n let data = match result.to_result() {\n\n Err(errors) => panic!(\"unexpected error: {:?} ({})\\n\", errors, qid),\n\n Ok(data) => data,\n\n };\n\n assert_eq!(data, expected, \"failed query: ({})\", qid);\n\n }\n\n Err(msg) => {\n\n let errors = match result.to_result() {\n\n Err(errors) => errors,\n\n Ok(_) => panic!(\n\n \"expected error `{}` but got successful result ({})\",\n\n msg, qid\n\n ),\n\n };\n", "file_path": "graphql/tests/query.rs", "rank": 80, "score": 212535.97814097628 }, { "content": "#[test]\n\nfn test_reserved_filter_and_group_by_types_validation() {\n\n const SCHEMA: &str = r#\"\n\n type Gravatar @entity {\n\n _: Boolean\n\n }\n\n type Gravatar_filter @entity {\n\n _: Boolean\n\n }\n\n type Gravatar_orderBy @entity {\n\n _: Boolean\n\n }\n\n \"#;\n\n\n\n let dummy_hash = DeploymentHash::new(\"dummy\").unwrap();\n\n\n\n let schema = Schema::parse(SCHEMA, dummy_hash).unwrap();\n\n\n\n let errors = schema.validate(&HashMap::new()).unwrap_err();\n\n\n\n // The only problem in the schema is the usage of reserved types\n", "file_path": "graph/src/data/schema.rs", "rank": 81, "score": 211583.63156216434 }, { "content": "fn string_to_h160(string: &str) -> Result<H160, DeterministicHostError> {\n\n // `H160::from_str` takes a hex string with no leading `0x`.\n\n let s = string.trim_start_matches(\"0x\");\n\n H160::from_str(s)\n\n .with_context(|| format!(\"Failed to convert string to Address/H160: '{}'\", s))\n\n .map_err(DeterministicHostError::from)\n\n}\n\n\n", "file_path": "runtime/wasm/src/host_exports.rs", "rank": 82, "score": 211202.74800321122 }, { "content": "/// Fetches a unique number for naming Ganache containers\n\npub fn get_unique_ganache_counter() -> u16 {\n\n increase_atomic_counter(&GANACHE_CONTAINER_COUNT)\n\n}\n", "file_path": "tests/tests/common/helpers.rs", "rank": 83, "score": 211095.5531593025 }, { "content": "/// Fetches a unique port number\n\npub fn get_unique_port_number() -> u16 {\n\n increase_atomic_counter(&PORT_NUMBER_COUNTER)\n\n}\n\n\n", "file_path": "tests/tests/common/helpers.rs", "rank": 84, "score": 211095.5531593025 }, { "content": "/// Fetches a unique number for naming Postgres databases\n\npub fn get_unique_postgres_counter() -> u16 {\n\n increase_atomic_counter(&POSTGRES_DATABASE_COUNT)\n\n}\n", "file_path": "tests/tests/common/helpers.rs", "rank": 85, "score": 211095.5531593025 }, { "content": "/// Tap into store events sent when running `f` and return those events. This\n\n/// intercepts `StoreEvent` when they are sent and therefore does not require\n\n/// the delicate timing that actually listening to events in the database\n\n/// requires. Of course, this does not test that events that are sent are\n\n/// actually received by anything, but makes ensuring that the right events\n\n/// get sent much more convenient than trying to receive them\n\npub fn tap_store_events<F, R>(f: F) -> (R, Vec<StoreEvent>)\n\nwhere\n\n F: FnOnce() -> R,\n\n{\n\n use graph_store_postgres::layout_for_tests::{EVENT_TAP, EVENT_TAP_ENABLED};\n\n\n\n EVENT_TAP.lock().unwrap().clear();\n\n *EVENT_TAP_ENABLED.lock().unwrap() = true;\n\n let res = f();\n\n *EVENT_TAP_ENABLED.lock().unwrap() = false;\n\n (res, EVENT_TAP.lock().unwrap().clone())\n\n}\n\n\n\n/// Run a GraphQL query against the `STORE`\n\npub async fn execute_subgraph_query(query: Query, target: QueryTarget) -> QueryResults {\n\n execute_subgraph_query_internal(query, target, None, None).await\n\n}\n\n\n\npub async fn execute_subgraph_query_with_deadline(\n\n query: Query,\n", "file_path": "store/test-store/src/store.rs", "rank": 86, "score": 209214.13182300222 }, { "content": "/// Looks up a directive in a object type, if it is provided.\n\npub fn get_object_type_directive(\n\n object_type: &s::ObjectType,\n\n name: String,\n\n) -> Option<&s::Directive> {\n\n object_type\n\n .directives\n\n .iter()\n\n .find(|directive| directive.name == name)\n\n}\n\n\n", "file_path": "graphql/src/schema/ast.rs", "rank": 87, "score": 209160.1741944569 }, { "content": "fn bytes_to_string(logger: &Logger, bytes: Vec<u8>) -> String {\n\n let s = String::from_utf8_lossy(&bytes);\n\n\n\n // If the string was re-allocated, that means it was not UTF8.\n\n if matches!(s, std::borrow::Cow::Owned(_)) {\n\n warn!(\n\n logger,\n\n \"Bytes contain invalid UTF8. This may be caused by attempting \\\n\n to convert a value such as an address that cannot be parsed to a unicode string. \\\n\n You may want to use 'toHexString()' instead. String (truncated to 1024 chars): '{}'\",\n\n &s.chars().take(1024).collect::<String>(),\n\n )\n\n }\n\n\n\n // The string may have been encoded in a fixed length buffer and padded with null\n\n // characters, so trim trailing nulls.\n\n s.trim_end_matches('\\u{0000}').to_string()\n\n}\n\n\n", "file_path": "runtime/wasm/src/host_exports.rs", "rank": 88, "score": 208664.73741065853 }, { "content": "/// Asserts that the response is a failed GraphQL response; returns its `\"errors\"` field.\n\npub fn assert_error_response(\n\n response: Response<Body>,\n\n expected_status: StatusCode,\n\n graphql_response: bool,\n\n) -> Vec<serde_json::Value> {\n\n assert_eq!(response.status(), expected_status);\n\n assert_expected_headers(&response);\n\n let body = String::from_utf8(\n\n futures03::executor::block_on(hyper::body::to_bytes(response.into_body()))\n\n .unwrap()\n\n .to_vec(),\n\n )\n\n .unwrap();\n\n\n\n // In case of a non-graphql response, return the body.\n\n if !graphql_response {\n\n return vec![serde_json::Value::String(body)];\n\n }\n\n\n\n let json: serde_json::Value =\n", "file_path": "server/http/src/test_utils.rs", "rank": 89, "score": 208223.39007975298 }, { "content": "/// Asserts that the response is a successful GraphQL response; returns its `\"data\"` field.\n\npub fn assert_successful_response(\n\n response: Response<Body>,\n\n) -> serde_json::Map<String, serde_json::Value> {\n\n assert_eq!(response.status(), StatusCode::OK);\n\n assert_expected_headers(&response);\n\n futures03::executor::block_on(\n\n hyper::body::to_bytes(response.into_body())\n\n .map_ok(|chunk| {\n\n let json: serde_json::Value =\n\n serde_json::from_slice(&chunk).expect(\"GraphQL response is not valid JSON\");\n\n\n\n json.as_object()\n\n .expect(\"GraphQL response must be an object\")\n\n .get(\"data\")\n\n .expect(\"GraphQL response must contain a \\\"data\\\" field\")\n\n .as_object()\n\n .expect(\"GraphQL \\\"data\\\" field must be an object\")\n\n .clone()\n\n })\n\n .map_err(|e| panic!(\"Truncated response body {:?}\", e)),\n\n )\n\n .unwrap()\n\n}\n\n\n", "file_path": "server/http/src/test_utils.rs", "rank": 90, "score": 208223.39007975298 }, { "content": "// From https://github.com/serde-rs/serde/issues/889#issuecomment-295988865\n\nfn string_or_vec<'de, D>(deserializer: D) -> Result<Vec<String>, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n struct StringOrVec;\n\n\n\n impl<'de> Visitor<'de> for StringOrVec {\n\n type Value = Vec<String>;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n formatter.write_str(\"string or list of strings\")\n\n }\n\n\n\n fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>\n\n where\n\n E: de::Error,\n\n {\n\n Ok(vec![s.to_owned()])\n\n }\n\n\n", "file_path": "node/src/config.rs", "rank": 91, "score": 207235.40003059877 }, { "content": "type ValueMap = MapMeasure<String, q::Value>;\n\n\n\nimpl ValueMap {\n\n fn make_map(size: usize, mut rng: Option<&mut SmallRng>) -> Self {\n\n let mut map = BTreeMap::new();\n\n let modulus = if *NESTED_MAP { 9 } else { 8 };\n\n\n\n for i in 0..size {\n\n let kind = rng\n\n .as_deref_mut()\n\n .map(|rng| rng.gen_range(0..modulus))\n\n .unwrap_or(i % modulus);\n\n\n\n let value = match kind {\n\n 0 => q::Value::Boolean(i % 11 > 5),\n\n 1 => q::Value::Int((i as i32).into()),\n\n 2 => q::Value::Null,\n\n 3 => q::Value::Float(i as f64 / 17.0),\n\n 4 => q::Value::Enum(format!(\"enum{}\", i)),\n\n 5 => q::Value::String(format!(\"string{}\", i)),\n", "file_path": "graph/examples/stress.rs", "rank": 92, "score": 206550.5022863325 }, { "content": "/// Run a GraphQL query against the `test_schema` and call the `test`\n\n/// function with the result. The query is actually run twice: once against\n\n/// the test schema where the `id` of `Song` and `SongStats` has type\n\n/// `String`, and once where it has type `Bytes`. The second argument to\n\n/// `test` indicates which type is being used for the id.\n\n///\n\n/// The query can contain placeholders `@S1@` .. `@S4@` which will be\n\n/// replaced with the id's of songs 1 through 4 before running the query.\n\nfn run_query<F>(args: impl Into<QueryArgs>, test: F)\n\nwhere\n\n F: Fn(QueryResult, IdType) -> () + Send + 'static,\n\n{\n\n let QueryArgs {\n\n query,\n\n variables,\n\n max_complexity,\n\n } = args.into();\n\n run_test_sequentially(move |store| async move {\n\n for id_type in [IdType::String, IdType::Bytes] {\n\n let name = id_type.deployment_id();\n\n\n\n let deployment = setup(store.as_ref(), name, BTreeSet::new(), id_type).await;\n\n\n\n let mut query = query.clone();\n\n for (i, id) in id_type.songs().iter().enumerate() {\n\n let pat = format!(\"@S{i}@\");\n\n let repl = format!(\"\\\"{id}\\\"\");\n\n query = query.replace(&pat, &repl);\n", "file_path": "graphql/tests/query.rs", "rank": 93, "score": 205692.83658154355 }, { "content": "fn schema_directive_objects(schema: &Schema, type_objects: &mut TypeObjectsMap) -> r::Value {\n\n r::Value::List(\n\n schema\n\n .document\n\n .definitions\n\n .iter()\n\n .filter_map(|d| match d {\n\n s::Definition::DirectiveDefinition(dd) => Some(dd),\n\n _ => None,\n\n })\n\n .map(|dd| directive_object(schema, type_objects, dd))\n\n .collect(),\n\n )\n\n}\n\n\n", "file_path": "graphql/src/introspection/resolver.rs", "rank": 94, "score": 205467.17318000188 }, { "content": "fn create_ipfs_clients(logger: &Logger, ipfs_addresses: &Vec<String>) -> Vec<IpfsClient> {\n\n // Parse the IPFS URL from the `--ipfs` command line argument\n\n let ipfs_addresses: Vec<_> = ipfs_addresses\n\n .iter()\n\n .map(|uri| {\n\n if uri.starts_with(\"http://\") || uri.starts_with(\"https://\") {\n\n String::from(uri)\n\n } else {\n\n format!(\"http://{}\", uri)\n\n }\n\n })\n\n .collect();\n\n\n\n ipfs_addresses\n\n .into_iter()\n\n .map(|ipfs_address| {\n\n info!(\n\n logger,\n\n \"Trying IPFS node at: {}\",\n\n SafeDisplay(&ipfs_address)\n", "file_path": "node/src/manager/commands/run.rs", "rank": 95, "score": 204334.84271824156 }, { "content": "/// Returns all operation definitions in the document.\n\npub fn get_operations(document: &Document) -> Vec<&OperationDefinition> {\n\n document\n\n .definitions\n\n .iter()\n\n .filter_map(|d| match d {\n\n Definition::Operation(op) => Some(op),\n\n _ => None,\n\n })\n\n .collect()\n\n}\n\n\n", "file_path": "graphql/src/query/ast.rs", "rank": 96, "score": 202305.39944140558 }, { "content": "fn test_layout(gql: &str) -> Layout {\n\n let subgraph = DeploymentHash::new(\"subgraph\").unwrap();\n\n let schema = Schema::parse(gql, subgraph.clone()).expect(\"Test schema invalid\");\n\n let namespace = Namespace::new(\"sgd0815\".to_owned()).unwrap();\n\n let site = Arc::new(make_dummy_site(subgraph, namespace, \"anet\".to_string()));\n\n let catalog = Catalog::for_tests(site.clone()).expect(\"Can not create catalog\");\n\n Layout::new(site, &schema, catalog).expect(\"Failed to construct Layout\")\n\n}\n\n\n", "file_path": "store/postgres/src/relational/ddl_tests.rs", "rank": 97, "score": 201618.17044243356 }, { "content": "fn test_layout(gql: &str) -> Layout {\n\n let subgraph = DeploymentHash::new(\"subgraph\").unwrap();\n\n let schema = Schema::parse(gql, subgraph.clone()).expect(\"Test schema invalid\");\n\n let namespace = Namespace::new(\"sgd0815\".to_owned()).unwrap();\n\n let site = Arc::new(make_dummy_site(subgraph, namespace, \"anet\".to_string()));\n\n let catalog = Catalog::for_tests(site.clone()).expect(\"Can not create catalog\");\n\n Layout::new(site, &schema, catalog).expect(\"Failed to construct Layout\")\n\n}\n\n\n", "file_path": "store/postgres/src/relational/query_tests.rs", "rank": 98, "score": 201618.17044243356 }, { "content": "pub fn get_referenced_entity_type<'a>(\n\n schema: &'a s::Document,\n\n field: &s::Field,\n\n) -> Option<&'a s::TypeDefinition> {\n\n unpack_type(schema, &field.field_type).filter(|ty| is_entity_type_definition(ty))\n\n}\n\n\n", "file_path": "graphql/src/schema/ast.rs", "rank": 99, "score": 201550.04664897505 } ]
Rust
sudachi/src/dic/build/conn.rs
bignumorg/sudachi.rs
df9997ed6b95af8dc9f9cc77c60c359b78a7105b
/* * Copyright (c) 2021 Works Applications Co., Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use std::fs::File; use std::io::{BufReader, Write}; use std::path::Path; use lazy_static::lazy_static; use regex::Regex; use crate::dic::build::error::{BuildFailure, DicBuildError, DicCompilationCtx, DicWriteResult}; use crate::dic::build::parse::{it_next, parse_i16}; use crate::error::SudachiResult; pub struct ConnBuffer { matrix: Vec<u8>, ctx: DicCompilationCtx, line: String, num_left: i16, num_right: i16, } lazy_static! { static ref SPLIT_REGEX: Regex = Regex::new(r"\s+").unwrap(); static ref EMPTY_LINE: Regex = Regex::new(r"^\s*$").unwrap(); } impl ConnBuffer { pub fn new() -> Self { Self { matrix: Vec::new(), ctx: DicCompilationCtx::default(), line: String::new(), num_left: 0, num_right: 0, } } #[allow(unused)] pub fn matrix(&self) -> &[u8] { &self.matrix } #[allow(unused)] pub fn left(&self) -> i16 { self.num_left } #[allow(unused)] pub fn right(&self) -> i16 { self.num_right } pub fn write_to<W: Write>(&self, writer: &mut W) -> SudachiResult<usize> { if self.num_left < 0 { return num_error("left", self.num_left); } if self.num_right < 0 { return num_error("right", self.num_right); } writer.write_all(&i16::to_le_bytes(self.num_left))?; writer.write_all(&i16::to_le_bytes(self.num_right))?; writer.write_all(&self.matrix)?; Ok(4 + self.matrix.len()) } pub fn read_file(&mut self, path: &Path) -> SudachiResult<()> { let file = File::open(path)?; let bufrd = BufReader::with_capacity(32 * 1024, file); let filename = path.to_str().unwrap_or("unknown").to_owned(); let old = self.ctx.set_filename(filename); let status = self.read(bufrd); self.ctx.set_filename(old); status } pub fn read<R: std::io::BufRead>(&mut self, mut reader: R) -> SudachiResult<()> { self.ctx.set_line(0); loop { let nread = reader.read_line(&mut self.line)?; if nread == 0 { todo!() } self.ctx.add_line(1); if !EMPTY_LINE.is_match(&self.line) { break; } } let result = self.parse_header(); let (left, right) = self.ctx.transform(result)?; if left < 0 { return num_error("left", left); } if right < 0 { return num_error("right", right); } let size = left as usize * right as usize * 2; self.matrix.resize(size, 0); self.num_left = left; self.num_right = right; loop { self.line.clear(); let nread = reader.read_line(&mut self.line)?; if nread == 0 { break; } self.ctx.add_line(1); if EMPTY_LINE.is_match(&self.line) { continue; } let status = self.parse_line(); self.ctx.transform(status)?; } Ok(()) } fn parse_header(&mut self) -> DicWriteResult<(i16, i16)> { let mut items = SPLIT_REGEX.splitn(&self.line.trim(), 2); let left = it_next(&self.line, &mut items, "left_num", parse_i16)?; let right = it_next(&self.line, &mut items, "right_num", parse_i16)?; Ok((left, right)) } fn parse_line(&mut self) -> DicWriteResult<()> { let mut items = SPLIT_REGEX.splitn(&self.line.trim(), 3); let left = it_next(&self.line, &mut items, "left", parse_i16)?; let right = it_next(&self.line, &mut items, "right", parse_i16)?; let cost = it_next(&self.line, &mut items, "cost", parse_i16)?; self.write_elem(left, right, cost) } fn write_elem(&mut self, left: i16, right: i16, cost: i16) -> DicWriteResult<()> { let index = right as usize * self.num_left as usize + left as usize; let index = index * 2; let bytes = cost.to_le_bytes(); self.matrix[index] = bytes[0]; self.matrix[index + 1] = bytes[1]; Ok(()) } } fn num_error<T>(part: &'static str, value: i16) -> SudachiResult<T> { return Err(DicBuildError { file: "<connection>".to_owned(), line: 0, cause: BuildFailure::InvalidConnSize(part, value), } .into()); } #[cfg(test)] mod test { use crate::dic::build::conn::ConnBuffer; use crate::dic::connect::ConnectionMatrix; #[test] fn parse_simple2x2() { let data = " 2 2 0 0 0 0 1 1 1 0 2 1 1 3"; let mut parser = ConnBuffer::new(); parser.read(data.as_bytes()).unwrap(); let cost = ConnectionMatrix::from_offset_size( parser.matrix(), 0, parser.left() as _, parser.right() as _, ) .unwrap(); assert_eq!(cost.cost(0, 0), 0); assert_eq!(cost.cost(0, 1), 1); assert_eq!(cost.cost(1, 0), 2); assert_eq!(cost.cost(1, 1), 3); } }
/* * Copyright (c) 2021 Works Applications Co., Ltd. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ use std::fs::File; use std::io::{BufReader, Write}; use std::path::Path; use lazy_static::lazy_static; use regex::Regex; use crate::dic::build::error::{BuildFailure, DicBuildError, DicCompilationCtx, DicWriteResult}; use crate::dic::build::parse::{it_next, parse_i16}; use crate::error::SudachiResult; pub struct ConnBuffer { matrix: Vec<u8>, ctx: DicCompilationCtx, line: String, num_left: i16, num_right: i16, } lazy_static! { static ref SPLIT_REGEX: Regex = Regex::new(r"\s+").unwrap(); static ref EMPTY_LINE: Regex = Regex::new(r"^\s*$").unwrap(); } impl ConnBuffer { pub fn new() -> Self { Self { matrix: Vec::new(), ctx: DicCompilationCtx::default(), line: String::new(), num_left: 0, num_right: 0, } } #[allow(unused)] pub fn matrix(&self) -> &[u8] { &self.matrix } #[allow(unused)] pub fn left(&self) -> i16 { self.num_left } #[allow(unused)] pub fn right(&self) -> i16 { self.num_right } pub fn write_to<W: Write>(&self, writer: &mut W) -> SudachiResult<usize> { if self.num_left < 0 { return num_error("left", self.num_left); } if self.num_right < 0 { return num_error("right", self.num_right); } writer.write_all(&i16::to_le_bytes(self.num_left))?; writer.write_all(&i16::to_le_bytes(self.num_right))?; writer.write_all(&self.matrix)?; Ok(4 + self.matrix.len()) } pub fn read_file(&mut self, path: &Path) -> SudachiResult<()> { let file = File::open(path)?; let bufrd = BufReader::with_capacity(32 * 1024, file); let filename = path.to_str().unwrap_or("unknown").to_owned(); let old = self.ctx.set_filename(filename); let status = self.read(bufrd); self.ctx.set_filename(old); status } pub fn read<R: std::io::BufRead>(&mut self, mut reader: R) -> SudachiResult<()> { self.ctx.set_line(0); loop { let nread = reader.read_line(&mut self.line)?; if nread == 0 { todo!() } self.ctx.add_line(1); if !EMPTY_LINE.is_match(&self.line) { break; } } let result = self.parse_header(); let (left, right) = self.ctx.transform(result)?; if left < 0 { return num_error("left", left); } if right < 0 { return num_error("right", right); } let size = left as usize * right as usize * 2; self.matrix.resize(size, 0); self.num_left = left; self.num_right = right; loop { self.line.clear(); let nread = reader.read_line(&mut self.line)?; if nread == 0 { break; } self.ctx.add_line(1); if EMPTY_LINE.is_match(&self.line) { continue; } let status = self.parse_line(); self.ctx.transform(status)?; } Ok(()) } fn parse_header(&mut self) -> DicWriteResult<(i16, i16)> { let mut items = SPLIT_REGEX.splitn(&self.line.trim(), 2); let left = it_next(&self.line, &mut items, "left_num", parse_i16)?; let right = it_next(&self.line, &mut items, "right_num", parse_i16)?; Ok((left, right)) } fn parse_line(&mut self) -> DicWriteResult<()> { let mut items = SPLIT_REGEX.splitn(&self.line.trim(), 3); let left = it_next(&self.line, &mut items, "left", parse_i16)?; let right = it_next(&self.line, &mut items, "right", parse_i16)?; let cost = it_next(&self.line, &mut items, "cost", parse_i16)?; self.write_elem(left, right, cost) } fn write_elem(&mut self, left: i16, right: i16, cost: i16) -> DicWriteResult<()> { le
self.matrix[index] = bytes[0]; self.matrix[index + 1] = bytes[1]; Ok(()) } } fn num_error<T>(part: &'static str, value: i16) -> SudachiResult<T> { return Err(DicBuildError { file: "<connection>".to_owned(), line: 0, cause: BuildFailure::InvalidConnSize(part, value), } .into()); } #[cfg(test)] mod test { use crate::dic::build::conn::ConnBuffer; use crate::dic::connect::ConnectionMatrix; #[test] fn parse_simple2x2() { let data = " 2 2 0 0 0 0 1 1 1 0 2 1 1 3"; let mut parser = ConnBuffer::new(); parser.read(data.as_bytes()).unwrap(); let cost = ConnectionMatrix::from_offset_size( parser.matrix(), 0, parser.left() as _, parser.right() as _, ) .unwrap(); assert_eq!(cost.cost(0, 0), 0); assert_eq!(cost.cost(0, 1), 1); assert_eq!(cost.cost(1, 0), 2); assert_eq!(cost.cost(1, 1), 3); } }
t index = right as usize * self.num_left as usize + left as usize; let index = index * 2; let bytes = cost.to_le_bytes();
function_block-random_span
[ { "content": "pub fn dictionary_bytes_from_path<P: AsRef<Path>>(dictionary_path: P) -> SudachiResult<Vec<u8>> {\n\n let dictionary_path = dictionary_path.as_ref();\n\n let dictionary_stat = fs::metadata(&dictionary_path)?;\n\n let mut dictionary_file = File::open(dictionary_path)?;\n\n let mut dictionary_bytes = Vec::with_capacity(dictionary_stat.len() as usize);\n\n dictionary_file.read_to_end(&mut dictionary_bytes)?;\n\n\n\n Ok(dictionary_bytes)\n\n}\n\n\n\npub const LEX_CSV: &[u8] = include_bytes!(\"../resources/lex.csv\");\n\n\n\nlazy_static! {\n\n pub static ref TEST_CONFIG: Config = {\n\n let test_config_path = \"tests/resources/sudachi.json\";\n\n let conf = Config::new(Some(PathBuf::from(test_config_path)), None, None)\n\n .expect(\"Failed to read config file for test\");\n\n println!(\"{:?}\", conf);\n\n conf\n\n };\n", "file_path": "sudachi/tests/common/mod.rs", "rank": 0, "score": 296584.80527743907 }, { "content": "pub fn wrap_ctx<T, E: Display, C: Debug + ?Sized>(v: Result<T, E>, ctx: &C) -> PyResult<T> {\n\n match v {\n\n Ok(v) => Ok(v),\n\n Err(e) => Err(PyException::new_err(format!(\"{:?}: {}\", ctx, e))),\n\n }\n\n}\n", "file_path": "python/src/errors.rs", "rank": 1, "score": 295924.29526144266 }, { "content": "fn dump_gids<W: Write>(w: &mut W, data: &[u32]) -> SudachiResult<()> {\n\n for (i, e) in data.iter().enumerate() {\n\n write!(w, \"{}\", e)?;\n\n if i + 1 != data.len() {\n\n w.write_all(b\"/\")?;\n\n }\n\n }\n\n Ok(())\n\n}\n", "file_path": "sudachi-cli/src/build.rs", "rank": 2, "score": 290622.33711752004 }, { "content": "fn dump_wids<W: Write>(w: &mut W, data: &[WordId]) -> SudachiResult<()> {\n\n for (i, e) in data.iter().enumerate() {\n\n let prefix = match e.dic() {\n\n 0 => \"\",\n\n _ => \"U\",\n\n };\n\n write!(w, \"{}{}\", prefix, e.word())?;\n\n if i + 1 != data.len() {\n\n w.write_all(b\"/\")?;\n\n }\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "sudachi-cli/src/build.rs", "rank": 3, "score": 287326.563228018 }, { "content": "fn dump_matrix<W: Write>(grammar: &Grammar, w: &mut W) {\n\n let conn = grammar.conn_matrix();\n\n write!(w, \"{} {}\", conn.num_left(), conn.num_right()).unwrap();\n\n\n\n for left in 0..conn.num_left() {\n\n for right in 0..conn.num_right() {\n\n let cost = conn.cost(left as _, right as _);\n\n write!(w, \"{} {} {}\\n\", left, right, cost).unwrap();\n\n }\n\n }\n\n}\n\n\n", "file_path": "sudachi-cli/src/build.rs", "rank": 4, "score": 285450.9547487581 }, { "content": "fn dump_word_info<W: Write>(lex: &LexiconSet, w: &mut W) -> SudachiResult<()> {\n\n let size = lex.size();\n\n for i in 0..size {\n\n let wid = WordId::checked(0, i)?;\n\n let (left, right, cost) = lex.get_word_param(wid);\n\n let winfo = lex.get_word_info(wid)?;\n\n write!(w, \"{},{},{},\", left, right, cost)?;\n\n write!(w, \"{},\", winfo.surface())?;\n\n write!(w, \"{},\", winfo.head_word_length())?;\n\n write!(w, \"{},\", winfo.normalized_form())?;\n\n write!(w, \"{},\", winfo.dictionary_form_word_id())?;\n\n write!(w, \"{},\", winfo.reading_form())?;\n\n dump_wids(w, winfo.a_unit_split())?;\n\n w.write_all(b\",\")?;\n\n dump_wids(w, winfo.b_unit_split())?;\n\n w.write_all(b\",\")?;\n\n dump_wids(w, winfo.word_structure())?;\n\n w.write_all(b\",\")?;\n\n dump_gids(w, winfo.synonym_group_ids())?;\n\n w.write_all(b\"\\n\")?;\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "sudachi-cli/src/build.rs", "rank": 5, "score": 284144.20753188553 }, { "content": "pub fn utf16_string_parser(input: &[u8]) -> SudachiNomResult<&[u8], String> {\n\n utf16_string_data(input).and_then(|(rest, data)| {\n\n if data.is_empty() {\n\n Ok((rest, String::new()))\n\n } else {\n\n // most Japanese chars are 3-bytes in utf-8 and 2 in utf-16\n\n let capacity = (data.len() + 1) * 3 / 2;\n\n let mut result = String::with_capacity(capacity);\n\n let iter = U16CodeUnits::new(data);\n\n for c in char::decode_utf16(iter) {\n\n match c {\n\n Err(_) => return Err(nom::Err::Failure(SudachiNomError::Utf16String)),\n\n Ok(c) => result.push(c),\n\n }\n\n }\n\n Ok((rest, result))\n\n }\n\n })\n\n}\n\n\n", "file_path": "sudachi/src/dic/read/u16str.rs", "rank": 6, "score": 281382.73160132486 }, { "content": "pub fn skip_u16_string(input: &[u8]) -> SudachiNomResult<&[u8], String> {\n\n utf16_string_data(input).map(|(rest, _)| (rest, String::new()))\n\n}\n\n\n", "file_path": "sudachi/src/dic/read/u16str.rs", "rank": 7, "score": 281382.7316013249 }, { "content": "#[inline]\n\npub fn utf16_string_data(input: &[u8]) -> SudachiNomResult<&[u8], &[u8]> {\n\n let (rest, length) = string_length_parser(input)?;\n\n if length == 0 {\n\n return Ok((rest, &[]));\n\n }\n\n let num_bytes = (length * 2) as usize;\n\n if rest.len() < num_bytes {\n\n return Err(nom::Err::Failure(SudachiNomError::Utf16String));\n\n }\n\n\n\n let (data, rest) = rest.split_at(num_bytes);\n\n\n\n Ok((rest, data))\n\n}\n\n\n", "file_path": "sudachi/src/dic/read/u16str.rs", "rank": 8, "score": 262195.545243972 }, { "content": "pub fn string_length_parser(input: &[u8]) -> SudachiNomResult<&[u8], u16> {\n\n let (rest, length) = le_u8(input)?;\n\n // word length can be 1 or 2 bytes\n\n let (rest, opt_low) = nom::combinator::cond(length >= 128, le_u8)(rest)?;\n\n Ok((\n\n rest,\n\n match opt_low {\n\n Some(low) => ((length as u16 & 0x7F) << 8) | low as u16,\n\n None => length as u16,\n\n },\n\n ))\n\n}\n\n\n\n/// Read UTF-16 code units from non-aligned storage\n\npub struct U16CodeUnits<'a> {\n\n data: &'a [u8],\n\n offset: usize,\n\n}\n\n\n\nimpl<'a> U16CodeUnits<'a> {\n", "file_path": "sudachi/src/dic/read/u16str.rs", "rank": 9, "score": 250846.14374001144 }, { "content": "fn dump_pos<W: Write>(grammar: &Grammar, w: &mut W) {\n\n for p in grammar.pos_list.iter() {\n\n for (i, e) in p.iter().enumerate() {\n\n w.write_all(e.as_bytes()).unwrap();\n\n if (i + 1) == p.len() {\n\n w.write_all(b\"\\n\").unwrap();\n\n } else {\n\n w.write_all(b\",\").unwrap();\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "sudachi-cli/src/build.rs", "rank": 10, "score": 247285.9036781834 }, { "content": "fn create_file(p: &Path) -> std::io::Result<File> {\n\n if p.exists() {\n\n std::fs::remove_file(p)?;\n\n }\n\n\n\n OpenOptions::new().create_new(true).write(true).open(p)\n\n}\n\n\n", "file_path": "python/src/build.rs", "rank": 11, "score": 242083.63091436337 }, { "content": "fn map_file(path: &Path) -> SudachiResult<Storage> {\n\n let file = File::open(path)?;\n\n let mapping = unsafe { Mmap::map(&file) }?;\n\n Ok(Storage::File(mapping))\n\n}\n\n\n", "file_path": "sudachi/src/dic/dictionary.rs", "rank": 12, "score": 238631.24640917196 }, { "content": "fn trie_array_parser(input: &[u8], offset: usize, trie_size: u32) -> SudachiResult<&[u8]> {\n\n let trie_start = offset;\n\n let trie_end = offset + (trie_size as usize) * size_of::<u32>();\n\n if input.len() < trie_start {\n\n return Err(SudachiError::InvalidRange(trie_start, trie_end));\n\n }\n\n if input.len() < trie_end {\n\n return Err(SudachiError::InvalidRange(trie_start, trie_end));\n\n }\n\n let trie_data = &input[trie_start..trie_end];\n\n Ok(trie_data)\n\n}\n", "file_path": "sudachi/src/dic/lexicon/mod.rs", "rank": 13, "score": 238019.8426238351 }, { "content": "fn description_parser(input: &[u8]) -> SudachiNomResult<&[u8], String> {\n\n let (rest, description_bytes) = take(Header::DESCRIPTION_SIZE)(input)?;\n\n Ok((rest, nul_terminated_str_from_slice(description_bytes)))\n\n}\n\n\n", "file_path": "sudachi/src/dic/header.rs", "rank": 14, "score": 228111.52756953717 }, { "content": "pub fn u32_parser(input: &[u8]) -> SudachiNomResult<&[u8], u32> {\n\n le_u32(input)\n\n}\n", "file_path": "sudachi/src/dic/read/mod.rs", "rank": 15, "score": 218111.44890470922 }, { "content": "fn header_parser(input: &[u8]) -> SudachiNomResult<&[u8], (u64, u64, String)> {\n\n nom::sequence::tuple((le_u64, le_u64, description_parser))(input)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n fn header_from_parts<T: AsRef<[u8]>>(\n\n version: u64,\n\n create_time: u64,\n\n description: T,\n\n ) -> Result<Header, HeaderError> {\n\n let mut bytes = Vec::new();\n\n bytes.extend(&version.to_le_bytes());\n\n bytes.extend(&create_time.to_le_bytes());\n\n bytes.extend(description.as_ref());\n\n\n\n Header::parse(&bytes)\n\n }\n", "file_path": "sudachi/src/dic/header.rs", "rank": 16, "score": 214570.55086127127 }, { "content": "fn pos_list_parser(input: &[u8]) -> SudachiNomResult<&[u8], Vec<Vec<String>>> {\n\n let (rest, pos_size) = le_u16(input)?;\n\n nom::multi::count(\n\n nom::multi::count(utf16_string_parser, POS_DEPTH),\n\n pos_size as usize,\n\n )(rest)\n\n}\n\n\n", "file_path": "sudachi/src/dic/grammar.rs", "rank": 17, "score": 212050.307959591 }, { "content": "pub fn skip_u32_array(input: &[u8]) -> SudachiNomResult<&[u8], Vec<u32>> {\n\n let (rest, length) = le_u8(input)?;\n\n let num_bytes = length as usize * 4;\n\n let next = &rest[num_bytes..];\n\n Ok((next, Vec::new()))\n\n}\n\n\n", "file_path": "sudachi/src/dic/read/mod.rs", "rank": 18, "score": 209323.06454858766 }, { "content": "pub fn u32_array_parser(input: &[u8]) -> SudachiNomResult<&[u8], Vec<u32>> {\n\n let (rest, length) = le_u8(input)?;\n\n nom::multi::count(le_u32, length as usize)(rest)\n\n}\n\n\n", "file_path": "sudachi/src/dic/read/mod.rs", "rank": 19, "score": 209323.06454858766 }, { "content": "fn build_node_ai(start: usize, end: usize, cost: i32) -> ResultNode {\n\n build_node(start, end, cost, \"アイ\")\n\n}\n\n\n", "file_path": "sudachi/src/plugin/path_rewrite/join_katakana_oov/tests.rs", "rank": 20, "score": 208027.47115573497 }, { "content": "fn build_node_aiu(start: usize, end: usize, cost: i32) -> ResultNode {\n\n build_node(start, end, cost, \"アイウ\")\n\n}\n\n\n", "file_path": "sudachi/src/plugin/path_rewrite/join_katakana_oov/tests.rs", "rank": 21, "score": 208027.47115573497 }, { "content": "fn build_system(mut cmd: BuildCmd, matrix: PathBuf) {\n\n let mut builder = DictBuilder::new_system();\n\n builder.set_description(std::mem::take(&mut cmd.description));\n\n builder\n\n .read_conn(matrix.as_path())\n\n .expect(\"failed to read matrix\");\n\n for d in cmd.inputs.iter() {\n\n builder\n\n .read_lexicon(d.as_path())\n\n .unwrap_or_else(|e| panic!(\"failed to read {:?}\\n{:?}\", d, e));\n\n }\n\n builder.resolve().expect(\"failed to resolve references\");\n\n let file = output_file(&cmd.output_file);\n\n let mut buf_writer = BufWriter::with_capacity(16 * 1024, file);\n\n builder\n\n .compile(&mut buf_writer)\n\n .expect(\"failed to compile dictionary\");\n\n buf_writer.flush().expect(\"failed to flush\");\n\n print_stats(builder.report());\n\n}\n\n\n", "file_path": "sudachi-cli/src/build.rs", "rank": 22, "score": 207436.10752944072 }, { "content": "pub fn skip_wid_array(input: &[u8]) -> SudachiNomResult<&[u8], Vec<WordId>> {\n\n let (rest, length) = le_u8(input)?;\n\n let num_bytes = length as usize * 4;\n\n let next = &rest[num_bytes..];\n\n Ok((next, Vec::new()))\n\n}\n\n\n", "file_path": "sudachi/src/dic/read/mod.rs", "rank": 23, "score": 206967.39882490528 }, { "content": "pub fn u32_wid_array_parser(input: &[u8]) -> SudachiNomResult<&[u8], Vec<WordId>> {\n\n let (rest, length) = le_u8(input)?;\n\n nom::multi::count(le_u32.map(|id| WordId::from_raw(id)), length as usize)(rest)\n\n}\n\n\n", "file_path": "sudachi/src/dic/read/mod.rs", "rank": 24, "score": 204687.32999432503 }, { "content": "fn output_file(p: &Path) -> File {\n\n if p.exists() {\n\n std::fs::remove_file(p).unwrap_or_else(|e| panic!(\"failed to delete {:?}\\n{:?}\", p, e));\n\n }\n\n\n\n OpenOptions::new()\n\n .write(true)\n\n .create_new(true)\n\n .open(&p)\n\n .unwrap_or_else(|e| panic!(\"failed to open {:?} for writing:\\n{:?}\", p, e))\n\n}\n\n\n", "file_path": "sudachi-cli/src/build.rs", "rank": 25, "score": 202915.21273116049 }, { "content": "fn build_node(start: usize, end: usize, cost: i32, surface: &str) -> ResultNode {\n\n let cstart = start / 3;\n\n let node = Node::new(\n\n cstart as u16,\n\n (cstart + surface.chars().count()) as u16,\n\n 7,\n\n 7,\n\n 3000,\n\n WordId::new(0, 4),\n\n );\n\n ResultNode::new(\n\n node,\n\n cost,\n\n start as u16,\n\n end as u16,\n\n WordInfoData {\n\n surface: surface.to_string(),\n\n normalized_form: surface.to_string(),\n\n dictionary_form: surface.to_string(),\n\n pos_id: 4,\n\n dictionary_form_word_id: -1,\n\n ..Default::default()\n\n }\n\n .into(),\n\n )\n\n}\n\n\n", "file_path": "sudachi/src/plugin/path_rewrite/join_katakana_oov/tests.rs", "rank": 26, "score": 201398.44923188866 }, { "content": "fn build_node_oov(start: usize, end: usize, cost: i32, surface: &str) -> ResultNode {\n\n let cstart = start / 3;\n\n let node = Node::new(\n\n cstart as u16,\n\n (cstart + surface.chars().count()) as u16,\n\n 8,\n\n 8,\n\n 6000,\n\n WordId::oov(4),\n\n );\n\n ResultNode::new(\n\n node,\n\n cost,\n\n start as u16,\n\n end as u16,\n\n WordInfoData {\n\n surface: surface.to_string(),\n\n normalized_form: surface.to_string(),\n\n dictionary_form: surface.to_string(),\n\n pos_id: 4,\n\n dictionary_form_word_id: -1,\n\n ..Default::default()\n\n }\n\n .into(),\n\n )\n\n}\n\n\n", "file_path": "sudachi/src/plugin/path_rewrite/join_katakana_oov/tests.rs", "rank": 27, "score": 198795.09747400606 }, { "content": "/// Returns Grammar with empty data\n\npub fn zero_grammar() -> Grammar<'static> {\n\n Grammar::parse(ZERO_GRAMMAR_BYTES, 0).expect(\"Failed to make grammar\")\n\n}\n\n\n\nconst TEST_CHAR_DEF: &[u8] = include_bytes!(\"../tests/resources/char.def\");\n\n\n\nlazy_static! {\n\n pub static ref CHAR_CAT: CharacterCategory =\n\n CharacterCategory::from_reader(TEST_CHAR_DEF).unwrap();\n\n}\n\n\n", "file_path": "sudachi/src/test.rs", "rank": 28, "score": 192659.70844123285 }, { "content": "/// Returns grammar that has test character categories\n\npub fn cat_grammar() -> Grammar<'static> {\n\n let mut grammar = zero_grammar();\n\n grammar.set_character_category(CHAR_CAT.clone());\n\n grammar\n\n}\n\n\n", "file_path": "sudachi/src/test.rs", "rank": 29, "score": 192659.66799342557 }, { "content": "fn u32_parser_offset(input: &[u8], offset: usize) -> SudachiNomResult<&[u8], u32> {\n\n nom::sequence::preceded(take(offset), le_u32)(input)\n\n}\n\n\n", "file_path": "sudachi/src/dic/lexicon/mod.rs", "rank": 30, "score": 191682.00000212723 }, { "content": "// Returns if eos is the middle of phrase\n\nfn is_continuous_phrase(s: &str, eos: usize) -> SudachiResult<bool> {\n\n lazy_static! {\n\n static ref QUOTE_MARKER: Regex = Regex::new(&format!(\n\n \"(!|?|\\\\!|\\\\?|[{}])(と|っ|です)\",\n\n CLOSE_PARENTHESIS\n\n ))\n\n .unwrap();\n\n static ref EOS_ITEMIZE_HEADER: Regex =\n\n Regex::new(&format!(\"([{}])([{}])\\\\z\", ALPHABET_OR_NUMBER, DOT)).unwrap();\n\n }\n\n\n\n // we can safely unwrap since eos > 0\n\n let last_char_len = s[..eos].chars().last().unwrap().to_string().len();\n\n if let Some(mat) = QUOTE_MARKER.find(&s[(eos - last_char_len)..])? {\n\n if mat.start() == 0 {\n\n return Ok(true);\n\n }\n\n }\n\n\n\n // we can safely unwrap since eos < s.len()\n", "file_path": "sudachi/src/sentence_detector.rs", "rank": 31, "score": 187004.86274257017 }, { "content": "/// Create String from UTF-8 bytes up to NUL byte or end of slice (whichever is first)\n\nfn nul_terminated_str_from_slice(buf: &[u8]) -> String {\n\n let str_bytes: &[u8] = if let Some(nul_idx) = buf.iter().position(|b| *b == 0) {\n\n &buf[..nul_idx]\n\n } else {\n\n &buf\n\n };\n\n String::from_utf8_lossy(str_bytes).to_string()\n\n}\n\n\n", "file_path": "sudachi/src/dic/header.rs", "rank": 32, "score": 185339.73958585117 }, { "content": "fn copy_if_empty(v1: String, v2: &String) -> String {\n\n if v1.is_empty() {\n\n v2.clone()\n\n } else {\n\n v1\n\n }\n\n}\n\n\n\nimpl From<WordInfo> for PyWordInfo {\n\n fn from(word_info: WordInfo) -> Self {\n\n let word_info: WordInfoData = word_info.into();\n\n Self {\n\n head_word_length: word_info.head_word_length,\n\n pos_id: word_info.pos_id,\n\n normalized_form: copy_if_empty(word_info.normalized_form, &word_info.surface),\n\n dictionary_form_word_id: word_info.dictionary_form_word_id,\n\n dictionary_form: copy_if_empty(word_info.dictionary_form, &word_info.surface),\n\n reading_form: copy_if_empty(word_info.reading_form, &word_info.surface),\n\n surface: word_info.surface,\n\n // WordId is repr(transparent) with a single u32 field so transmute is safe\n", "file_path": "python/src/word_info.rs", "rank": 33, "score": 185328.5785941648 }, { "content": "#[inline]\n\nfn write64(mut hash: u64, mut bytes: &[u8]) -> u64 {\n\n while bytes.len() >= 8 {\n\n let (data, rem) = bytes.split_at(8);\n\n hash.hash_word(u64::from_ne_bytes(data.try_into().unwrap()));\n\n bytes = rem;\n\n }\n\n\n\n if bytes.len() >= 4 {\n\n let (data, rem) = bytes.split_at(4);\n\n hash.hash_word(u32::from_ne_bytes(data.try_into().unwrap()) as u64);\n\n bytes = rem;\n\n }\n\n\n\n if bytes.len() >= 2 {\n\n let (data, rem) = bytes.split_at(2);\n\n hash.hash_word(u16::from_ne_bytes(data.try_into().unwrap()) as u64);\n\n bytes = rem;\n\n }\n\n\n\n if let Some(&byte) = bytes.first() {\n", "file_path": "sudachi/src/util/fxhash.rs", "rank": 34, "score": 184951.17137071968 }, { "content": "fn copy_of_bytes<T: ReadLE>(data: &[u8]) -> Vec<T> {\n\n let size_t = std::mem::size_of::<T>();\n\n assert_eq!(data.len() % size_t, 0);\n\n let nelems = data.len() / size_t;\n\n let mut result = Vec::with_capacity(nelems);\n\n for i in (0..data.len()).step_by(size_t) {\n\n let sl = &data[i..i + size_t];\n\n result.push(T::from_le_bytes(sl).unwrap());\n\n }\n\n result\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn aligned_1() {\n\n assert!(is_aligned(0, 1));\n\n assert!(is_aligned(1, 1));\n", "file_path": "sudachi/src/util/cow_array.rs", "rank": 35, "score": 181363.9719035728 }, { "content": "#[cfg(target_os = \"macos\")]\n\nfn make_system_specific_name(s: &str) -> String {\n\n format!(\"lib{}.dylib\", s)\n\n}\n\n\n", "file_path": "sudachi/src/plugin/loader.rs", "rank": 36, "score": 163390.8231105243 }, { "content": "pub fn wrap<T, E: Display>(v: Result<T, E>) -> PyResult<T> {\n\n match v {\n\n Ok(v) => Ok(v),\n\n Err(e) => Err(PyException::new_err(format!(\"{}\", e))),\n\n }\n\n}\n\n\n", "file_path": "python/src/errors.rs", "rank": 37, "score": 162398.7291514175 }, { "content": "fn build_mock_grammar() -> Grammar<'static> {\n\n let mut grammar = zero_grammar();\n\n let char_cat = build_character_category();\n\n grammar.set_character_category(char_cat);\n\n grammar\n\n}\n\n\n\nlazy_static! {\n\n static ref GRAMMAR: Grammar<'static> = build_mock_grammar();\n\n}\n", "file_path": "sudachi/src/plugin/path_rewrite/join_numeric/test.rs", "rank": 38, "score": 160748.71061004035 }, { "content": "fn system_specific_name(s: &str) -> Option<String> {\n\n if s.contains('.') {\n\n None\n\n } else {\n\n let p = std::path::Path::new(s);\n\n let fname = p\n\n .file_name()\n\n .and_then(|np| np.to_str())\n\n .map(|f| make_system_specific_name(f));\n\n let parent = p.parent().and_then(|np| np.to_str());\n\n match (parent, fname) {\n\n (Some(p), Some(c)) => Some(format!(\"{}/{}\", p, c)),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n\nimpl<'a, T: PluginCategory + ?Sized> PluginLoader<'a, T> {\n\n pub fn new(grammar: &'a Grammar, config: &'a Config) -> PluginLoader<'a, T> {\n\n PluginLoader {\n", "file_path": "sudachi/src/plugin/loader.rs", "rank": 39, "score": 159601.770404502 }, { "content": "/// Returns a byte length of chars at the beggining of str, which cannot be a bos\n\nfn prohibited_bos(s: &str) -> SudachiResult<usize> {\n\n lazy_static! {\n\n static ref PROHIBITED_BOS: Regex = Regex::new(&format!(\n\n \"\\\\A([{}{}{}])+\",\n\n CLOSE_PARENTHESIS, COMMA, PERIODS\n\n ))\n\n .unwrap();\n\n }\n\n\n\n if let Some(mat) = PROHIBITED_BOS.find(s)? {\n\n Ok(mat.end())\n\n } else {\n\n Ok(0)\n\n }\n\n}\n\n\n", "file_path": "sudachi/src/sentence_detector.rs", "rank": 40, "score": 159479.63478967332 }, { "content": "/// Returns the count of non-closed open parentheses remaining at the end of input.\n\nfn parenthesis_level(s: &str) -> SudachiResult<usize> {\n\n lazy_static! {\n\n static ref PARENTHESIS: Regex = Regex::new(&format!(\n\n \"([{}])|([{}])\",\n\n OPEN_PARENTHESIS, CLOSE_PARENTHESIS\n\n ))\n\n .unwrap();\n\n }\n\n let mut level = 0;\n\n for caps in PARENTHESIS.captures_iter(s) {\n\n if let Some(_) = caps?.get(1) {\n\n // open\n\n level += 1;\n\n } else if level > 0 {\n\n level -= 1;\n\n }\n\n }\n\n Ok(level)\n\n}\n\n\n", "file_path": "sudachi/src/sentence_detector.rs", "rank": 41, "score": 159479.558392505 }, { "content": "pub fn register_functions(m: &PyModule) -> PyResult<()> {\n\n m.add_function(wrap_pyfunction!(build_system_dic, m)?)?;\n\n m.add_function(wrap_pyfunction!(build_user_dic, m)?)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "python/src/build.rs", "rank": 42, "score": 159028.04095217458 }, { "content": "fn build_mock_grammar() -> Grammar<'static> {\n\n let mut grammar = zero_grammar();\n\n let char_cat = build_character_category();\n\n grammar.set_character_category(char_cat);\n\n grammar\n\n}\n\n\n\nlazy_static! {\n\n static ref GRAMMAR: Grammar<'static> = build_mock_grammar();\n\n}\n", "file_path": "sudachi/src/plugin/path_rewrite/join_katakana_oov/tests.rs", "rank": 43, "score": 158649.6434565933 }, { "content": "/// Helper function to load the plugins of a single category\n\n/// Should be called with turbofish syntax and trait object type:\n\n/// `let plugins = load_plugins_of::<dyn InputText>(...)`.\n\npub fn load_plugins_of<T: PluginCategory + ?Sized>(\n\n cfg: &Config,\n\n grammar: &Grammar,\n\n) -> SudachiResult<PluginContainer<T>> {\n\n let mut loader: PluginLoader<T> = PluginLoader::new(grammar, cfg);\n\n loader.load()?;\n\n Ok(loader.freeze())\n\n}\n", "file_path": "sudachi/src/plugin/loader.rs", "rank": 44, "score": 156866.9338987156 }, { "content": "/// Accessor trait for the full path cost\n\npub trait PathCost {\n\n fn total_cost(&self) -> i32;\n\n\n\n #[inline]\n\n fn is_connected_to_bos(&self) -> bool {\n\n self.total_cost() != i32::MAX\n\n }\n\n}\n\n\n", "file_path": "sudachi/src/analysis/node.rs", "rank": 45, "score": 155666.785154561 }, { "content": "fn dump_part(dict: PathBuf, part: String, output: PathBuf) {\n\n let file = File::open(&dict).expect(\"open failed\");\n\n let data = unsafe { Mmap::map(&file) }.expect(\"mmap failed\");\n\n let loader =\n\n unsafe { DictionaryLoader::read_any_dictionary(&data) }.expect(\"failed to load dictionary\");\n\n let dict = loader.to_loaded().expect(\"should contain grammar\");\n\n\n\n let outf = output_file(&output);\n\n let mut writer = BufWriter::new(outf);\n\n\n\n match part.as_str() {\n\n \"pos\" => dump_pos(dict.grammar(), &mut writer),\n\n \"matrix\" => dump_matrix(dict.grammar(), &mut writer),\n\n \"winfo\" => dump_word_info(dict.lexicon(), &mut writer).unwrap(),\n\n _ => unimplemented!(),\n\n }\n\n writer.flush().unwrap();\n\n}\n\n\n", "file_path": "sudachi-cli/src/build.rs", "rank": 46, "score": 155088.98832097027 }, { "content": "#[test]\n\nfn size() {\n\n assert_eq!(39, LEXICON.size())\n\n}\n", "file_path": "sudachi/tests/lexicon.rs", "rank": 47, "score": 151994.12441335115 }, { "content": "fn build_node_num(surface: &str, normalized: &str, start_cp: usize, start_b: usize) -> ResultNode {\n\n ResultNode::new(\n\n Node::new(\n\n start_cp as u16,\n\n (start_cp + surface.chars().count()) as u16,\n\n 9,\n\n 9,\n\n 2478,\n\n WordId::new(0, 1),\n\n ),\n\n 9,\n\n start_b as u16,\n\n (start_b + surface.len()) as u16,\n\n WordInfoData {\n\n surface: surface.to_string(),\n\n head_word_length: surface.len() as u16,\n\n pos_id: 7,\n\n normalized_form: normalized.to_string(),\n\n dictionary_form_word_id: -1,\n\n dictionary_form: surface.to_string(),\n\n ..Default::default()\n\n }\n\n .into(),\n\n )\n\n}\n", "file_path": "sudachi/src/plugin/path_rewrite/join_numeric/test.rs", "rank": 48, "score": 151027.75421530448 }, { "content": "fn build_node_oov(surface: &str, normalized: &str, start_cp: usize, start_b: usize) -> ResultNode {\n\n ResultNode::new(\n\n Node::new(\n\n start_cp as u16,\n\n (start_cp + surface.chars().count()) as u16,\n\n 8,\n\n 8,\n\n 6000,\n\n WordId::oov(4),\n\n ),\n\n 9,\n\n start_b as u16,\n\n (start_b + surface.len()) as u16,\n\n WordInfoData {\n\n surface: surface.to_string(),\n\n head_word_length: surface.len() as u16,\n\n pos_id: 4,\n\n normalized_form: normalized.to_string(),\n\n dictionary_form_word_id: -1,\n\n dictionary_form: surface.to_string(),\n\n ..Default::default()\n\n }\n\n .into(),\n\n )\n\n}\n\n\n", "file_path": "sudachi/src/plugin/path_rewrite/join_numeric/test.rs", "rank": 49, "score": 151027.75421530448 }, { "content": "#[test]\n\nfn version() {\n\n assert_eq!(\n\n HeaderVersion::SystemDict(SystemDictVersion::Version2),\n\n HEADER.version\n\n );\n\n}\n\n\n", "file_path": "sudachi/tests/dictionary_header.rs", "rank": 50, "score": 149964.67281902628 }, { "content": "fn build_user(mut cmd: BuildCmd, system: PathBuf) {\n\n let cfg =\n\n Config::new(None, None, Some(system)).expect(\"failed to create default configuration\");\n\n let dict = JapaneseDictionary::from_cfg(&cfg).expect(\"failed to load system dictionary\");\n\n\n\n let mut builder = DictBuilder::new_user(&dict);\n\n builder.set_description(std::mem::take(&mut cmd.description));\n\n for d in cmd.inputs.iter() {\n\n builder\n\n .read_lexicon(d.as_path())\n\n .unwrap_or_else(|e| panic!(\"failed to read {:?}\\n{:?}\", d, e));\n\n }\n\n builder.resolve().expect(\"failed to resolve references\");\n\n let file = output_file(&cmd.output_file);\n\n let mut buf_writer = BufWriter::with_capacity(16 * 1024, file);\n\n builder\n\n .compile(&mut buf_writer)\n\n .expect(\"failed to compile dictionary\");\n\n buf_writer.flush().expect(\"failed to flush\");\n\n print_stats(builder.report());\n\n}\n\n\n", "file_path": "sudachi-cli/src/build.rs", "rank": 51, "score": 149134.02125231025 }, { "content": "fn current_exe_dir() -> String {\n\n let exe = current_exe().unwrap_or_else(|e| panic!(\"Current exe is not available {:?}\", e));\n\n\n\n let parent = exe\n\n .parent()\n\n .unwrap_or_else(|| panic!(\"Path to executable must have a parent\"));\n\n\n\n parent.to_str().map(|s| s.to_owned()).unwrap_or_else(|| {\n\n panic!(\"placing Sudachi in directories with non-utf paths is not supported\")\n\n })\n\n}\n\n\n\nlazy_static! {\n\n static ref CURRENT_EXE_DIR: String = current_exe_dir();\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::config::Config;\n\n use crate::prelude::SudachiResult;\n", "file_path": "sudachi/src/config.rs", "rank": 52, "score": 138888.65611984968 }, { "content": "pub fn build_main() {\n\n let args: BuildCli = BuildCli::from_args();\n\n\n\n match args {\n\n BuildCli::System { common, matrix } => build_system(common, matrix),\n\n BuildCli::User { common, dictionary } => build_user(common, dictionary),\n\n BuildCli::Dump { dict, part, output } => dump_part(dict, part, output),\n\n }\n\n}\n\n\n", "file_path": "sudachi-cli/src/build.rs", "rank": 53, "score": 138436.77189212927 }, { "content": "/// Concatenate the nodes in the range and replace normalized_form if given.\n\npub fn concat_nodes(\n\n mut path: Vec<ResultNode>,\n\n begin: usize,\n\n end: usize,\n\n normalized_form: Option<String>,\n\n) -> SudachiResult<Vec<ResultNode>> {\n\n if begin >= end {\n\n return Err(SudachiError::InvalidRange(begin, end));\n\n }\n\n\n\n let end_bytes = path[end - 1].end_bytes();\n\n let beg_bytes = path[begin].begin_bytes();\n\n\n\n let mut surface = String::with_capacity(end_bytes - beg_bytes);\n\n let mut reading_form = String::with_capacity(end_bytes - beg_bytes);\n\n let mut dictionary_form = String::with_capacity(end_bytes - beg_bytes);\n\n let mut head_word_length: u16 = 0;\n\n\n\n for node in path[begin..end].iter() {\n\n let data = node.word_info().borrow_data();\n", "file_path": "sudachi/src/analysis/node.rs", "rank": 54, "score": 138436.77189212927 }, { "content": "/// Concatenate the nodes in the range and set pos_id.\n\npub fn concat_oov_nodes(\n\n mut path: Vec<ResultNode>,\n\n begin: usize,\n\n end: usize,\n\n pos_id: u16,\n\n) -> SudachiResult<Vec<ResultNode>> {\n\n if begin >= end {\n\n return Err(SudachiError::InvalidRange(begin, end));\n\n }\n\n\n\n let capa = path[end - 1].end_bytes() - path[begin].begin_bytes();\n\n\n\n let mut surface = String::with_capacity(capa);\n\n let mut head_word_length: u16 = 0;\n\n let mut wid = WordId::from_raw(0);\n\n\n\n for node in path[begin..end].iter() {\n\n let data = node.word_info().borrow_data();\n\n surface.push_str(&data.surface);\n\n head_word_length += data.head_word_length;\n", "file_path": "sudachi/src/analysis/node.rs", "rank": 55, "score": 136655.01114426117 }, { "content": "// Edits are assumed to be sorted (from start to end) and non-overlapping.\n\n// This is not checked right now (may be we should check this in debug mode)\n\n// Current plugin implementations satisfy this criteria.\n\npub fn resolve_edits(\n\n source: &str,\n\n source_mapping: &Vec<usize>,\n\n target: &mut String,\n\n target_mapping: &mut Vec<usize>,\n\n edits: &mut Vec<ReplaceOp>,\n\n) -> usize {\n\n let mut start: usize = 0;\n\n let mut cur_len: isize = source.len() as isize;\n\n for edit in edits.drain(..) {\n\n target.push_str(&source[start..edit.what.start]);\n\n target_mapping.extend(source_mapping[start..edit.what.start].iter());\n\n start = edit.what.end;\n\n cur_len += match edit.with {\n\n ReplaceTgt::Str(s) => {\n\n add_replace(source_mapping, target, target_mapping, edit.what, &s)\n\n }\n\n ReplaceTgt::Ref(s) => add_replace(source_mapping, target, target_mapping, edit.what, s),\n\n ReplaceTgt::Char(c) => add_replace(\n\n source_mapping,\n", "file_path": "sudachi/src/input_text/buffer/edit.rs", "rank": 56, "score": 134956.76174736873 }, { "content": "fn find_dict_path(py: Python, dict_type: &str) -> PyResult<PathBuf> {\n\n let pyfunc = PyModule::import(py, \"sudachipy\")?.getattr(\"_find_dict_path\")?;\n\n let path = pyfunc\n\n .call1((dict_type,))?\n\n .cast_as::<PyString>()?\n\n .to_str()?;\n\n Ok(PathBuf::from(path))\n\n}\n\n\n", "file_path": "python/src/dictionary.rs", "rank": 57, "score": 133805.6969686145 }, { "content": "/// Check that the first argument is a subcommand and the file with the same name does\n\n/// not exists.\n\n/// If the file does exists, probably it's safer to use default Sudachi analysis mode.\n\npub fn is_build_mode() -> bool {\n\n let mut args = std::env::args_os();\n\n let _ = args.next();\n\n let arg = args.next();\n\n match arg {\n\n Some(x) => {\n\n if !(x == \"build\" || x == \"ubuild\" || x == \"dump\") {\n\n return false;\n\n }\n\n\n\n if Path::new(&x).exists() {\n\n false\n\n } else {\n\n true\n\n }\n\n }\n\n None => false,\n\n }\n\n}\n\n\n", "file_path": "sudachi-cli/src/build.rs", "rank": 58, "score": 131154.1993971764 }, { "content": "fn build_mock_bytes() -> Vec<u8> {\n\n let mut buf = Vec::new();\n\n // encode pos for oov\n\n buf.extend(&(1 as i16).to_le_bytes());\n\n let pos = vec![\"補助記号\", \"一般\", \"*\", \"*\", \"*\", \"*\"];\n\n for s in pos {\n\n let utf16: Vec<_> = s.encode_utf16().collect();\n\n buf.extend(&(utf16.len() as u8).to_le_bytes());\n\n for c in utf16 {\n\n buf.extend(&(c).to_le_bytes());\n\n }\n\n }\n\n // set 10 for left and right id sizes\n\n buf.extend(&(10 as i16).to_le_bytes());\n\n buf.extend(&(10 as i16).to_le_bytes());\n\n for i in 0..10 {\n\n for j in 0..10 {\n\n let val = i * 100 + j;\n\n buf.extend(&(val as i16).to_le_bytes());\n\n }\n\n }\n\n\n\n buf\n\n}\n\n\n", "file_path": "sudachi/src/plugin/oov/mecab_oov/tests.rs", "rank": 59, "score": 125048.89438649188 }, { "content": "fn build_mock_grammar(bytes: &[u8]) -> Grammar {\n\n let mut grammar = Grammar::parse(bytes, 0).expect(\"Failed to create grammar\");\n\n grammar.set_character_category(char_cats());\n\n grammar\n\n}\n", "file_path": "sudachi/src/plugin/oov/mecab_oov/tests.rs", "rank": 60, "score": 119062.73729042857 }, { "content": "fn setup() -> (IgnoreYomiganaPlugin, Grammar<'static>) {\n\n let settings = build_mock_setting();\n\n let config = Config::default();\n\n let mut grammar = zero_grammar();\n\n grammar.set_character_category(build_character_category());\n\n let mut plugin = IgnoreYomiganaPlugin::default();\n\n plugin\n\n .set_up(&settings, &config, &grammar)\n\n .expect(\"Failed to setup plugin\");\n\n (plugin, grammar)\n\n}\n\n\n", "file_path": "sudachi/src/plugin/input_text/ignore_yomigana/tests.rs", "rank": 61, "score": 117940.15901721772 }, { "content": "fn extract_plugin_class(val: &Value) -> SudachiResult<&str> {\n\n let obj = match val {\n\n Value::Object(v) => v,\n\n o => {\n\n return Err(SudachiError::ConfigError(ConfigError::InvalidFormat(\n\n format!(\"plugin config must be an object, was {}\", o),\n\n )));\n\n }\n\n };\n\n match obj.get(\"class\") {\n\n Some(Value::String(v)) => Ok(v),\n\n _ => Err(SudachiError::ConfigError(ConfigError::InvalidFormat(\n\n \"plugin config must have 'class' key to indicate plugin SO file\".to_owned(),\n\n ))),\n\n }\n\n}\n\n\n", "file_path": "sudachi/src/plugin/loader.rs", "rank": 62, "score": 116723.11279802117 }, { "content": "fn load_system_dic(cfg: &Config) -> SudachiResult<Storage> {\n\n match &cfg.system_dict {\n\n Some(p) => map_file(p).map_err(|e| e.with_context(p.as_os_str().to_string_lossy())),\n\n None => return Err(ConfigError(MissingArgument(String::from(\"system_dict\")))),\n\n }\n\n}\n\nimpl JapaneseDictionary {\n\n /// Creates a dictionary from the specified configuration\n\n /// Dictionaries will be read from disk\n\n pub fn from_cfg(cfg: &Config) -> SudachiResult<JapaneseDictionary> {\n\n let mut sb = SudachiDicData::new(load_system_dic(cfg)?);\n\n\n\n for udic in cfg.user_dicts.iter() {\n\n sb.add_user(\n\n map_file(udic.as_path())\n\n .map_err(|e| e.with_context(udic.as_os_str().to_string_lossy()))?,\n\n )\n\n }\n\n\n\n Self::from_cfg_storage(cfg, sb)\n", "file_path": "sudachi/src/dic/dictionary.rs", "rank": 63, "score": 116723.11279802117 }, { "content": "/// It is a syntax sugar for accessing Morpheme reference\n\n/// Without it binding implementations become much less readable\n\nstruct MorphemeRef<'py> {\n\n #[allow(unused)] // need to keep this around for correct reference count\n\n list: PyRef<'py, PyMorphemeListWrapper>,\n\n morph: Morpheme<'py, Arc<PyDicData>>,\n\n}\n\n\n\nimpl<'py> Deref for MorphemeRef<'py> {\n\n type Target = Morpheme<'py, Arc<PyDicData>>;\n\n\n\n fn deref(&self) -> &Self::Target {\n\n &self.morph\n\n }\n\n}\n\n\n\n#[pyclass(module = \"sudachipy.morpheme\", name = \"Morpheme\")]\n\npub struct PyMorpheme {\n\n list: Py<PyMorphemeListWrapper>,\n\n index: usize,\n\n}\n\n\n", "file_path": "python/src/morpheme.rs", "rank": 64, "score": 116243.09287609752 }, { "content": "#[allow(non_snake_case)]\n\n#[derive(Deserialize)]\n\nstruct PluginSettings {\n\n inhibitPair: Vec<(i16, i16)>,\n\n}\n\n\n\nimpl InhibitConnectionPlugin {\n\n fn inhibit_connection(grammar: &mut Grammar, left: i16, right: i16) {\n\n grammar.set_connect_cost(left, right, Grammar::INHIBITED_CONNECTION);\n\n }\n\n}\n\n\n\nimpl EditConnectionCostPlugin for InhibitConnectionPlugin {\n\n fn set_up(\n\n &mut self,\n\n settings: &Value,\n\n _config: &Config,\n\n _grammar: &Grammar,\n\n ) -> SudachiResult<()> {\n\n let settings: PluginSettings = serde_json::from_value(settings.clone())?;\n\n let inhibit_pairs = settings.inhibitPair;\n\n self.inhibit_pairs = inhibit_pairs;\n", "file_path": "sudachi/src/plugin/connect_cost/inhibit_connection.rs", "rank": 65, "score": 114418.9284115184 }, { "content": "/// Accessor trait for right connection id\n\npub trait RightId {\n\n fn right_id(&self) -> u16;\n\n}\n\n\n", "file_path": "sudachi/src/analysis/node.rs", "rank": 66, "score": 114029.07445679468 }, { "content": "#[test]\n\nfn get_part_of_speech_size() {\n\n // pos from system test dict\n\n assert_eq!(8, GRAMMAR.pos_list.len());\n\n\n\n // user test dict contains another pos\n\n let tokenizer = TestTokenizer::new();\n\n assert_eq!(9, tokenizer.dict().grammar().pos_list.len());\n\n}\n\n\n", "file_path": "sudachi/tests/dictionary.rs", "rank": 67, "score": 112292.50585547936 }, { "content": "#[allow(non_snake_case)]\n\n#[derive(Deserialize)]\n\nstruct PluginSettings {\n\n enableNormalize: Option<bool>,\n\n}\n\n\n\nimpl JoinNumericPlugin {\n\n fn concat(\n\n &self,\n\n mut path: Vec<ResultNode>,\n\n begin: usize,\n\n end: usize,\n\n parser: &mut NumericParser,\n\n ) -> SudachiResult<Vec<ResultNode>> {\n\n let word_info = path[begin].word_info();\n\n\n\n if word_info.pos_id() != self.numeric_pos_id {\n\n return Ok(path);\n\n }\n\n\n\n if self.enable_normalize {\n\n let normalized_form = parser.get_normalized();\n", "file_path": "sudachi/src/plugin/path_rewrite/join_numeric/mod.rs", "rank": 68, "score": 112218.6154955839 }, { "content": "pub trait ReadLE {\n\n fn from_le_bytes(bytes: &[u8]) -> Result<Self, TryFromSliceError>\n\n where\n\n Self: Sized;\n\n}\n\n\n\nimpl ReadLE for i16 {\n\n fn from_le_bytes(bytes: &[u8]) -> Result<Self, TryFromSliceError> {\n\n bytes.try_into().map(|b| Self::from_le_bytes(b))\n\n }\n\n}\n\n\n\nimpl ReadLE for u32 {\n\n fn from_le_bytes(bytes: &[u8]) -> Result<Self, TryFromSliceError>\n\n where\n\n Self: Sized,\n\n {\n\n bytes.try_into().map(|b| Self::from_le_bytes(b))\n\n }\n\n}\n", "file_path": "sudachi/src/util/cow_array.rs", "rank": 69, "score": 112211.88573281677 }, { "content": "#[test]\n\nfn get_part_of_speech_string() {\n\n let pos = &GRAMMAR.pos_list[0];\n\n assert!(!pos.is_empty());\n\n assert_eq!(\"助動詞\", pos[0]);\n\n}\n\n\n\n// fn creat_with_merging_settings\n\n// fn creat_with_merging_null_settings\n", "file_path": "sudachi/tests/dictionary.rs", "rank": 70, "score": 112180.41855798913 }, { "content": "#[allow(non_snake_case)]\n\n#[derive(Deserialize)]\n\nstruct PluginSettings {\n\n oovPOS: Vec<String>,\n\n minLength: usize,\n\n}\n\n\n\nimpl JoinKatakanaOovPlugin {\n\n fn is_katakana_node<T: InputTextIndex>(&self, text: &T, node: &ResultNode) -> bool {\n\n text.cat_of_range(node.begin()..node.end())\n\n .contains(CategoryType::KATAKANA)\n\n }\n\n\n\n // fn is_one_char(&self, text: &Utf8InputText, node: &Node) -> bool {\n\n // let b = node.begin;\n\n // b + text.get_code_points_offset_length(b, 1) == node.end\n\n // }\n\n\n\n fn can_oov_bow_node<T: InputTextIndex>(&self, text: &T, node: &ResultNode) -> bool {\n\n !text\n\n .cat_at_char(node.begin())\n\n .contains(CategoryType::NOOOVBOW)\n", "file_path": "sudachi/src/plugin/path_rewrite/join_katakana_oov/mod.rs", "rank": 71, "score": 110631.36609599454 }, { "content": "/// Trait of plugin to rewrite the path from lattice\n\npub trait PathRewritePlugin: Sync + Send {\n\n /// Loads necessary information for the plugin\n\n fn set_up(&mut self, settings: &Value, config: &Config, grammar: &Grammar)\n\n -> SudachiResult<()>;\n\n\n\n /// Returns a rewritten path\n\n fn rewrite(\n\n &self,\n\n text: &InputBuffer,\n\n path: Vec<ResultNode>,\n\n lattice: &Lattice,\n\n ) -> SudachiResult<Vec<ResultNode>>;\n\n}\n\n\n\nimpl PluginCategory for dyn PathRewritePlugin {\n\n type BoxType = Box<dyn PathRewritePlugin + Sync + Send>;\n\n type InitFnType = unsafe fn() -> SudachiResult<Self::BoxType>;\n\n fn configurations(cfg: &Config) -> &[Value] {\n\n &cfg.path_rewrite_plugins\n\n }\n", "file_path": "sudachi/src/plugin/path_rewrite/mod.rs", "rank": 72, "score": 110467.17642293547 }, { "content": "/// Trait of plugin to edit connection cost in the grammar\n\npub trait EditConnectionCostPlugin: Sync + Send {\n\n /// Loads necessary information for the plugin\n\n fn set_up(&mut self, settings: &Value, config: &Config, grammar: &Grammar)\n\n -> SudachiResult<()>;\n\n\n\n /// Edits the grammar\n\n fn edit(&self, grammar: &mut Grammar);\n\n}\n\n\n\nimpl PluginCategory for dyn EditConnectionCostPlugin {\n\n type BoxType = Box<dyn EditConnectionCostPlugin + Sync + Send>;\n\n type InitFnType = unsafe fn() -> SudachiResult<Self::BoxType>;\n\n\n\n fn configurations(cfg: &Config) -> &[Value] {\n\n &cfg.connection_cost_plugins\n\n }\n\n\n\n fn bundled_impl(name: &str) -> Option<Self::BoxType> {\n\n match name {\n\n \"InhibitConnectionPlugin\" => Some(Box::new(InhibitConnectionPlugin::default())),\n", "file_path": "sudachi/src/plugin/connect_cost/mod.rs", "rank": 73, "score": 109778.40314408802 }, { "content": "#[test]\n\nfn new_build() {\n\n let mut buffer = InputBuffer::new();\n\n buffer.reset().push_str(\"宇宙人\");\n\n buffer.start_build().expect(\"does not fail\");\n\n assert_eq!(buffer.current(), \"宇宙人\")\n\n}\n\n\n", "file_path": "sudachi/src/input_text/buffer/test_basic.rs", "rank": 74, "score": 108828.17902897467 }, { "content": "#[test]\n\nfn get_eos_with_non_break_checker() {\n\n let text = \"ばな。なです。\";\n\n let tokenizer = TestTokenizer::new();\n\n let lexicon = tokenizer.dict().lexicon();\n\n let checker = NonBreakChecker::new(lexicon);\n\n\n\n let sd = SentenceDetector::new();\n\n assert_eq!(sd.get_eos(text, Some(&checker)).unwrap(), 21);\n\n}\n", "file_path": "sudachi/tests/sentence_detector.rs", "rank": 75, "score": 108828.17902897467 }, { "content": "#[test]\n\nfn digit_1() {\n\n let plugin = build_plugin();\n\n let text = build_text(\"123円20銭\");\n\n let path = vec![\n\n build_node_num(\"1\", \"1\", 0, 0),\n\n build_node_num(\"2\", \"2\", 1, 1),\n\n build_node_num(\"3\", \"3\", 2, 2),\n\n build_node_oov(\"円\", \"円\", 3, 3),\n\n build_node_num(\"2\", \"2\", 4, 6),\n\n build_node_num(\"0\", \"0\", 5, 7),\n\n build_node_oov(\"銭\", \"銭\", 6, 8),\n\n ];\n\n let path = plugin\n\n .rewrite(&text, path, &Lattice::default())\n\n .expect(\"Failed to rewrite path\");\n\n assert_eq!(4, path.len());\n\n assert_eq!(\"123\", path[0].word_info().surface());\n\n assert_eq!(\"20\", path[2].word_info().surface());\n\n}\n\n\n", "file_path": "sudachi/src/plugin/path_rewrite/join_numeric/test.rs", "rank": 76, "score": 108205.00325705734 }, { "content": "#[test]\n\nfn comma() {\n\n let plugin = build_plugin();\n\n let text = build_text(\"2,00,000,000円\");\n\n let path = vec![\n\n build_node_num(\"2\", \"2\", 0, 0),\n\n build_node_oov(\",\", \",\", 1, 1),\n\n build_node_num(\"0\", \"0\", 2, 2),\n\n build_node_num(\"0\", \"0\", 3, 3),\n\n build_node_oov(\",\", \",\", 4, 4),\n\n build_node_num(\"0\", \"0\", 5, 5),\n\n build_node_num(\"0\", \"0\", 6, 6),\n\n build_node_num(\"0\", \"0\", 7, 7),\n\n build_node_oov(\",\", \",\", 8, 8),\n\n build_node_num(\"0\", \"0\", 9, 9),\n\n build_node_num(\"0\", \"0\", 10, 10),\n\n build_node_num(\"0\", \"0\", 11, 11),\n\n build_node_oov(\"円\", \"円\", 12, 12),\n\n ];\n\n let path = plugin\n\n .rewrite(&text, path, &Lattice::default())\n", "file_path": "sudachi/src/plugin/path_rewrite/join_numeric/test.rs", "rank": 77, "score": 108205.00325705734 }, { "content": "#[test]\n\nfn normalize() {\n\n let plugin = build_plugin();\n\n let text = build_text(\"一二三万二千円\");\n\n let path = vec![\n\n build_node_num(\"一\", \"一\", 0, 0),\n\n build_node_num(\"二\", \"二\", 1, 3),\n\n build_node_num(\"三\", \"三\", 2, 6),\n\n build_node_oov(\"万\", \"万\", 3, 9),\n\n build_node_num(\"二\", \"二\", 4, 12),\n\n build_node_oov(\"千\", \"千\", 5, 15),\n\n build_node_oov(\"円\", \"円\", 6, 18),\n\n ];\n\n let path = plugin\n\n .rewrite(&text, path, &Lattice::default())\n\n .expect(\"Failed to rewrite path\");\n\n assert_eq!(2, path.len());\n\n assert_eq!(\"1232000\", path[0].word_info().normalized_form());\n\n}\n\n\n", "file_path": "sudachi/src/plugin/path_rewrite/join_numeric/test.rs", "rank": 78, "score": 108205.00325705734 }, { "content": "#[test]\n\nfn point() {\n\n let plugin = build_plugin();\n\n let text = build_text(\"1.002\");\n\n let path = vec![\n\n build_node_num(\"1\", \"1\", 0, 0),\n\n build_node_oov(\".\", \".\", 1, 1),\n\n build_node_num(\"0\", \"0\", 2, 2),\n\n build_node_num(\"0\", \"0\", 3, 3),\n\n build_node_num(\"2\", \"2\", 4, 4),\n\n ];\n\n let path = plugin\n\n .rewrite(&text, path, &Lattice::default())\n\n .expect(\"Failed to rewrite path\");\n\n assert_eq!(1, path.len());\n\n assert_eq!(\"1.002\", path[0].word_info().normalized_form());\n\n\n\n let text = build_text(\".002\");\n\n let path = vec![\n\n build_node_oov(\".\", \".\", 0, 0),\n\n build_node_num(\"0\", \"0\", 1, 1),\n", "file_path": "sudachi/src/plugin/path_rewrite/join_numeric/test.rs", "rank": 79, "score": 108205.00325705734 }, { "content": "#[test]\n\nfn digit_2() {\n\n let plugin = build_plugin();\n\n let text = build_text(\"080-121\");\n\n let path = vec![\n\n build_node_num(\"0\", \"0\", 0, 0),\n\n build_node_num(\"8\", \"8\", 1, 1),\n\n build_node_num(\"0\", \"0\", 2, 2),\n\n build_node_oov(\"-\", \"-\", 3, 3),\n\n build_node_num(\"1\", \"1\", 4, 4),\n\n build_node_num(\"2\", \"2\", 5, 5),\n\n build_node_num(\"1\", \"1\", 6, 6),\n\n ];\n\n let path = plugin\n\n .rewrite(&text, path, &Lattice::default())\n\n .expect(\"Failed to rewrite path\");\n\n assert_eq!(3, path.len());\n\n assert_eq!(\"080\", path[0].word_info().surface());\n\n assert_eq!(\"121\", path[2].word_info().surface());\n\n}\n\n\n", "file_path": "sudachi/src/plugin/path_rewrite/join_numeric/test.rs", "rank": 80, "score": 108205.00325705734 }, { "content": "pub trait LatticeNode: RightId {\n\n fn begin(&self) -> usize;\n\n fn end(&self) -> usize;\n\n fn cost(&self) -> i16;\n\n fn word_id(&self) -> WordId;\n\n fn left_id(&self) -> u16;\n\n\n\n /// Is true when the word does not come from the dictionary.\n\n /// BOS and EOS are also treated as OOV.\n\n #[inline]\n\n fn is_oov(&self) -> bool {\n\n self.word_id().is_oov()\n\n }\n\n\n\n /// If a node is a special system node like BOS or EOS.\n\n /// Java name isSystem (which is similar to a regular node coming from the system dictionary)\n\n #[inline]\n\n fn is_special_node(&self) -> bool {\n\n self.word_id().is_special()\n\n }\n", "file_path": "sudachi/src/analysis/node.rs", "rank": 81, "score": 107535.6867593023 }, { "content": "#[test]\n\nfn kanji_numeric() {\n\n let plugin = build_plugin();\n\n let text = build_text(\"一二三万二千円\");\n\n let path = vec![\n\n build_node_num(\"一\", \"一\", 0, 0),\n\n build_node_num(\"二\", \"二\", 1, 3),\n\n build_node_num(\"三\", \"三\", 2, 6),\n\n build_node_oov(\"万\", \"万\", 3, 9),\n\n build_node_num(\"二\", \"二\", 4, 12),\n\n build_node_oov(\"千\", \"千\", 5, 15),\n\n build_node_oov(\"円\", \"円\", 6, 18),\n\n ];\n\n let path = plugin\n\n .rewrite(&text, path, &Lattice::default())\n\n .expect(\"Failed to rewrite path\");\n\n assert_eq!(2, path.len());\n\n assert_eq!(\"一二三万二千\", path[0].word_info().surface());\n\n\n\n let text = build_text(\"二百百\");\n\n let path = vec![\n\n build_node_num(\"二\", \"二\", 0, 0),\n\n build_node_oov(\"百\", \"百\", 1, 3),\n\n build_node_oov(\"百\", \"百\", 2, 6),\n\n ];\n\n let path = plugin\n\n .rewrite(&text, path, &Lattice::default())\n\n .expect(\"Failed to rewrite path\");\n\n assert_eq!(3, path.len());\n\n}\n\n\n", "file_path": "sudachi/src/plugin/path_rewrite/join_numeric/test.rs", "rank": 82, "score": 106576.54633812042 }, { "content": "#[test]\n\nfn single_node() {\n\n let mut plugin = build_plugin();\n\n let text = build_text(\"猫三匹\");\n\n let _path = vec![\n\n build_node_oov(\"猫\", \"猫\", 0, 0),\n\n build_node_num(\"三\", \"三\", 1, 3),\n\n build_node_oov(\"匹\", \"匹\", 2, 6),\n\n ];\n\n\n\n let path = plugin\n\n .rewrite(&text, _path.clone(), &Lattice::default())\n\n .expect(\"Failed to rewrite path\");\n\n assert_eq!(3, path.len());\n\n assert_eq!(\"3\", path[1].word_info().normalized_form());\n\n\n\n plugin.enable_normalize = false;\n\n let path = plugin\n\n .rewrite(&text, _path.clone(), &Lattice::default())\n\n .expect(\"Failed to rewrite path\");\n\n assert_eq!(3, path.len());\n\n assert_eq!(\"三\", path[1].word_info().normalized_form());\n\n}\n\n\n", "file_path": "sudachi/src/plugin/path_rewrite/join_numeric/test.rs", "rank": 83, "score": 106576.54633812042 }, { "content": "#[test]\n\nfn normalized_with_not_numeric() {\n\n let plugin = build_plugin();\n\n let text = build_text(\"六三四\");\n\n // 六三四 is in the dictionary\n\n let path = vec![build_node_oov(\"六三四\", \"六三四\", 0, 0)];\n\n let path = plugin\n\n .rewrite(&text, path, &Lattice::default())\n\n .expect(\"Failed to rewrite path\");\n\n assert_eq!(1, path.len());\n\n assert_eq!(\"六三四\", path[0].word_info().normalized_form());\n\n}\n\n\n", "file_path": "sudachi/src/plugin/path_rewrite/join_numeric/test.rs", "rank": 84, "score": 106576.54633812042 }, { "content": "#[test]\n\nfn with_noovbow() {\n\n let mut plugin = JoinKatakanaOovPlugin::default();\n\n plugin.min_length = 3;\n\n\n\n let text = build_text(\"ァアイアイウ\");\n\n\n\n let path = vec![\n\n build_node_oov(0, 3, 6447, \"ァ\"),\n\n build_node_aiu(3, 9, 13969),\n\n build_node_ai(9, 18, 20985),\n\n ];\n\n let path = plugin\n\n .rewrite(&text, path, &Lattice::default())\n\n .expect(\"Failed to rewrite path\");\n\n assert_eq!(2, path.len());\n\n assert_eq!(\"ァ\", path[0].word_info().surface());\n\n\n\n let text = build_text(\"アイウァアイウ\");\n\n let path = vec![\n\n build_node_aiu(0, 9, 5562),\n\n build_node_oov(9, 12, 13613, \"ァ\"),\n\n build_node_aiu(12, 21, 21135),\n\n ];\n\n let path = plugin\n\n .rewrite(&text, path, &Lattice::default())\n\n .expect(\"Failed to rewrite path\");\n\n assert_eq!(1, path.len());\n\n}\n\n\n", "file_path": "sudachi/src/plugin/path_rewrite/join_katakana_oov/tests.rs", "rank": 85, "score": 106576.54633812042 }, { "content": "#[test]\n\nfn provide_oov_without_cinfo() {\n\n let plugin = build_plugin();\n\n let text = input_text(\"あいうeo\");\n\n let mut nodes: Vec<Node> = vec![];\n\n\n\n plugin\n\n .provide_oov(&text, 0, false, &mut nodes)\n\n .expect(\"Failed to generate oovs\");\n\n assert!(nodes.is_empty());\n\n}\n\n\n", "file_path": "sudachi/src/plugin/oov/mecab_oov/tests.rs", "rank": 86, "score": 105605.04034682932 }, { "content": "#[test]\n\nfn replacement_works() {\n\n let plugin = test_plugin();\n\n let mut buffer = InputBuffer::from(\"ウ゛\");\n\n plugin.rewrite(&mut buffer).expect(\"rewrite failed\");\n\n assert_eq!(buffer.current(), \"ヴ\");\n\n}\n\n\n", "file_path": "sudachi/src/plugin/input_text/default_input_text/tests.rs", "rank": 87, "score": 105574.3550019561 }, { "content": "#[test]\n\nfn nfkc_works() {\n\n let plugin = test_plugin();\n\n let mut buffer = InputBuffer::from(\"ひBら①がⅢな\");\n\n plugin.rewrite(&mut buffer).expect(\"rewrite failed\");\n\n assert_eq!(buffer.current(), \"ひbら1がⅲな\");\n\n}\n\n\n", "file_path": "sudachi/src/plugin/input_text/default_input_text/tests.rs", "rank": 88, "score": 105574.3550019561 }, { "content": "fn is_aligned(offset: usize, alignment: usize) -> bool {\n\n debug_assert!(alignment.is_power_of_two());\n\n offset % alignment == 0\n\n}\n\n\n", "file_path": "sudachi/src/util/cow_array.rs", "rank": 89, "score": 105137.7130082075 }, { "content": "#[test]\n\nfn start_with_tail() {\n\n let mut plugin = JoinKatakanaOovPlugin::default();\n\n let text = build_text(\"アイウアイウアイ\");\n\n let path = vec![\n\n build_node_aiu(0, 9, 5562),\n\n build_node_aiu(9, 18, 12578),\n\n build_node_ai(18, 24, 19594),\n\n ];\n\n\n\n plugin.min_length = 3;\n\n let path = plugin\n\n .rewrite(&text, path, &Lattice::default())\n\n .expect(\"Failed to rewrite path\");\n\n assert_eq!(1, path.len());\n\n}\n\n\n", "file_path": "sudachi/src/plugin/path_rewrite/join_katakana_oov/tests.rs", "rank": 90, "score": 105013.74007261587 }, { "content": "#[test]\n\nfn part_of_speech() {\n\n let mut plugin = JoinKatakanaOovPlugin::default();\n\n let text = build_text(\"アイアイウ\");\n\n let path = vec![build_node_ai(0, 6, 5562), build_node_aiu(6, 15, 12578)];\n\n\n\n plugin.min_length = 3;\n\n let path = plugin\n\n .rewrite(&text, path, &Lattice::default())\n\n .expect(\"Failed to rewrite path\");\n\n assert_eq!(1, path.len());\n\n assert!(!path[0].is_oov());\n\n}\n\n\n", "file_path": "sudachi/src/plugin/path_rewrite/join_katakana_oov/tests.rs", "rank": 91, "score": 105013.74007261587 }, { "content": "#[test]\n\nfn start_with_middle() {\n\n let mut plugin = JoinKatakanaOovPlugin::default();\n\n let text = build_text(\"アイウアイアイウ\");\n\n let path = vec![\n\n build_node_aiu(0, 9, 5562),\n\n build_node_ai(9, 15, 12578),\n\n build_node_aiu(15, 24, 19594),\n\n ];\n\n\n\n plugin.min_length = 3;\n\n let path = plugin\n\n .rewrite(&text, path, &Lattice::default())\n\n .expect(\"Failed to rewrite path\");\n\n assert_eq!(1, path.len());\n\n}\n\n\n", "file_path": "sudachi/src/plugin/path_rewrite/join_katakana_oov/tests.rs", "rank": 92, "score": 105013.74007261587 }, { "content": "#[test]\n\nfn katakana_length() {\n\n let mut plugin = JoinKatakanaOovPlugin::default();\n\n let text = build_text(\"アイアイウ\");\n\n let _path = vec![build_node_ai(0, 6, 5562), build_node_aiu(6, 15, 12578)];\n\n\n\n plugin.min_length = 0;\n\n let path = plugin\n\n .rewrite(&text, _path.clone(), &Lattice::default())\n\n .expect(\"Failed to rewrite path\");\n\n assert_eq!(2, path.len());\n\n\n\n plugin.min_length = 1;\n\n let path = plugin\n\n .rewrite(&text, _path.clone(), &Lattice::default())\n\n .expect(\"Failed to rewrite path\");\n\n assert_eq!(2, path.len());\n\n\n\n plugin.min_length = 2;\n\n let path = plugin\n\n .rewrite(&text, _path.clone(), &Lattice::default())\n\n .expect(\"Failed to rewrite path\");\n\n assert_eq!(2, path.len());\n\n\n\n plugin.min_length = 3;\n\n let path = plugin\n\n .rewrite(&text, _path.clone(), &Lattice::default())\n\n .expect(\"Failed to rewrite path\");\n\n assert_eq!(1, path.len());\n\n}\n\n\n", "file_path": "sudachi/src/plugin/path_rewrite/join_katakana_oov/tests.rs", "rank": 93, "score": 105013.74007261587 }, { "content": "#[test]\n\nfn provide_oov_without_oov_list() {\n\n let mut plugin = build_plugin();\n\n plugin.categories.insert(\n\n CategoryType::ALPHA,\n\n CategoryInfo {\n\n category_type: CategoryType::ALPHA,\n\n is_invoke: false,\n\n is_group: true,\n\n length: 0,\n\n },\n\n );\n\n let text = input_text(\"あいうeo\");\n\n let mut nodes: Vec<Node> = vec![];\n\n\n\n plugin\n\n .provide_oov(&text, 0, false, &mut nodes)\n\n .expect(\"Failed to generate oovs\");\n\n assert!(nodes.is_empty());\n\n}\n\n\n", "file_path": "sudachi/src/plugin/oov/mecab_oov/tests.rs", "rank": 94, "score": 104092.30963474016 }, { "content": "#[test]\n\nfn full_normalization_works() {\n\n let plugin = test_plugin();\n\n let mut buffer = InputBuffer::from(ORIGINAL_TEXT);\n\n plugin.rewrite(&mut buffer).expect(\"rewrite failed\");\n\n assert_eq!(buffer.current(), NORMALIZED_TEXT);\n\n}\n\n\n", "file_path": "sudachi/src/plugin/input_text/default_input_text/tests.rs", "rank": 95, "score": 104062.23060253504 }, { "content": "#[test]\n\nfn lowercasing_works_difficult() {\n\n let plugin = test_plugin();\n\n let mut buffer = InputBuffer::from(\"ひらİがẞなΣ\");\n\n plugin.rewrite(&mut buffer).expect(\"rewrite failed\");\n\n assert_eq!(buffer.current(), \"ひらi\\u{307}がßなσ\");\n\n}\n\n\n", "file_path": "sudachi/src/plugin/input_text/default_input_text/tests.rs", "rank": 96, "score": 104062.23060253504 }, { "content": "#[test]\n\nfn lowercasing_works_simple() {\n\n let plugin = test_plugin();\n\n let mut buffer = InputBuffer::from(\"ひЗДらTESTがЕСЬな\");\n\n plugin.rewrite(&mut buffer).expect(\"rewrite failed\");\n\n assert_eq!(buffer.current(), \"ひздらtestがесьな\");\n\n}\n\n\n", "file_path": "sudachi/src/plugin/input_text/default_input_text/tests.rs", "rank": 97, "score": 104062.23060253504 }, { "content": "/*\n\n * Copyright (c) 2021 Works Applications Co., Ltd.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\n#[derive(Debug)]\n\npub struct StringNumber {\n\n significand: String,\n\n scale: usize,\n", "file_path": "sudachi/src/plugin/path_rewrite/join_numeric/numeric_parser/string_number.rs", "rank": 98, "score": 126.64033649934123 }, { "content": "/*\n\n * Copyright (c) 2021 Works Applications Co., Ltd.\n\n *\n\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n\n * you may not use this file except in compliance with the License.\n\n * You may obtain a copy of the License at\n\n *\n\n * http://www.apache.org/licenses/LICENSE-2.0\n\n *\n\n * Unless required by applicable law or agreed to in writing, software\n\n * distributed under the License is distributed on an \"AS IS\" BASIS,\n\n * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n\n * See the License for the specific language governing permissions and\n\n * limitations under the License.\n\n */\n\n\n\nuse std::fs::File;\n\nuse std::path::Path;\n\n\n\nuse memmap2::Mmap;\n", "file_path": "sudachi/src/dic/dictionary.rs", "rank": 99, "score": 124.12102174864646 } ]
Rust
miri/bin/miri.rs
oli-obk/miri
4654d6d04e2b2bf1fc72f6fe5a3c4353c560d2c6
#![feature(rustc_private, i128_type)] extern crate getopts; extern crate miri; extern crate rustc; extern crate rustc_driver; extern crate rustc_errors; extern crate env_logger; extern crate log_settings; extern crate syntax; extern crate log; use rustc::session::Session; use rustc::middle::cstore::CrateStore; use rustc_driver::{Compilation, CompilerCalls, RustcDefaultCalls}; use rustc_driver::driver::{CompileState, CompileController}; use rustc::session::config::{self, Input, ErrorOutputType}; use rustc::hir::{self, itemlikevisit}; use rustc::ty::TyCtxt; use syntax::ast::{self, MetaItemKind, NestedMetaItemKind}; use std::path::PathBuf; struct MiriCompilerCalls { default: RustcDefaultCalls, } impl<'a> CompilerCalls<'a> for MiriCompilerCalls { fn early_callback( &mut self, matches: &getopts::Matches, sopts: &config::Options, cfg: &ast::CrateConfig, descriptions: &rustc_errors::registry::Registry, output: ErrorOutputType, ) -> Compilation { self.default.early_callback( matches, sopts, cfg, descriptions, output, ) } fn no_input( &mut self, matches: &getopts::Matches, sopts: &config::Options, cfg: &ast::CrateConfig, odir: &Option<PathBuf>, ofile: &Option<PathBuf>, descriptions: &rustc_errors::registry::Registry, ) -> Option<(Input, Option<PathBuf>)> { self.default.no_input( matches, sopts, cfg, odir, ofile, descriptions, ) } fn late_callback( &mut self, matches: &getopts::Matches, sess: &Session, cstore: &CrateStore, input: &Input, odir: &Option<PathBuf>, ofile: &Option<PathBuf>, ) -> Compilation { self.default.late_callback(matches, sess, cstore, input, odir, ofile) } fn build_controller( &mut self, sess: &Session, matches: &getopts::Matches, ) -> CompileController<'a> { let mut control = self.default.build_controller(sess, matches); control.after_hir_lowering.callback = Box::new(after_hir_lowering); control.after_analysis.callback = Box::new(after_analysis); if sess.target.target != sess.host { control.after_analysis.stop = Compilation::Stop; } control } } fn after_hir_lowering(state: &mut CompileState) { let attr = ( String::from("miri"), syntax::feature_gate::AttributeType::Whitelisted, ); state.session.plugin_attributes.borrow_mut().push(attr); } fn after_analysis<'a, 'tcx>(state: &mut CompileState<'a, 'tcx>) { state.session.abort_if_errors(); let tcx = state.tcx.unwrap(); let limits = resource_limits_from_attributes(state); if std::env::args().any(|arg| arg == "--test") { struct Visitor<'a, 'tcx: 'a>( miri::ResourceLimits, TyCtxt<'a, 'tcx, 'tcx>, &'a CompileState<'a, 'tcx> ); impl<'a, 'tcx: 'a, 'hir> itemlikevisit::ItemLikeVisitor<'hir> for Visitor<'a, 'tcx> { fn visit_item(&mut self, i: &'hir hir::Item) { if let hir::Item_::ItemFn(_, _, _, _, _, body_id) = i.node { if i.attrs.iter().any(|attr| { attr.name().map_or(false, |n| n == "test") }) { let did = self.1.hir.body_owner_def_id(body_id); println!( "running test: {}", self.1.def_path_debug_str(did), ); miri::eval_main(self.1, did, None, self.0); self.2.session.abort_if_errors(); } } } fn visit_trait_item(&mut self, _trait_item: &'hir hir::TraitItem) {} fn visit_impl_item(&mut self, _impl_item: &'hir hir::ImplItem) {} } state.hir_crate.unwrap().visit_all_item_likes( &mut Visitor(limits, tcx, state), ); } else if let Some((entry_node_id, _)) = *state.session.entry_fn.borrow() { let entry_def_id = tcx.hir.local_def_id(entry_node_id); let start_wrapper = tcx.lang_items().start_fn().and_then(|start_fn| { if tcx.is_mir_available(start_fn) { Some(start_fn) } else { None } }); miri::eval_main(tcx, entry_def_id, start_wrapper, limits); state.session.abort_if_errors(); } else { println!("no main function found, assuming auxiliary build"); } } fn resource_limits_from_attributes(state: &CompileState) -> miri::ResourceLimits { let mut limits = miri::ResourceLimits::default(); let krate = state.hir_crate.as_ref().unwrap(); let err_msg = "miri attributes need to be in the form `miri(key = value)`"; let extract_int = |lit: &syntax::ast::Lit| -> u128 { match lit.node { syntax::ast::LitKind::Int(i, _) => i, _ => { state.session.span_fatal( lit.span, "expected an integer literal", ) } } }; for attr in krate.attrs.iter().filter(|a| { a.name().map_or(false, |n| n == "miri") }) { if let Some(items) = attr.meta_item_list() { for item in items { if let NestedMetaItemKind::MetaItem(ref inner) = item.node { if let MetaItemKind::NameValue(ref value) = inner.node { match &inner.name().as_str()[..] { "memory_size" => limits.memory_size = extract_int(value) as u64, "step_limit" => limits.step_limit = extract_int(value) as u64, "stack_limit" => limits.stack_limit = extract_int(value) as usize, _ => state.session.span_err(item.span, "unknown miri attribute"), } } else { state.session.span_err(inner.span, err_msg); } } else { state.session.span_err(item.span, err_msg); } } } else { state.session.span_err(attr.span, err_msg); } } limits } fn init_logger() { let format = |record: &log::LogRecord| { if record.level() == log::LogLevel::Trace { let indentation = log_settings::settings().indentation; format!( "{indentation}:{lvl}:{module}: {text}", lvl = record.level(), module = record.location().module_path(), indentation = indentation, text = record.args(), ) } else { format!( "{lvl}:{module}: {text}", lvl = record.level(), module = record.location().module_path(), text = record.args(), ) } }; let mut builder = env_logger::LogBuilder::new(); builder.format(format).filter( None, log::LogLevelFilter::Info, ); if std::env::var("MIRI_LOG").is_ok() { builder.parse(&std::env::var("MIRI_LOG").unwrap()); } builder.init().unwrap(); } fn find_sysroot() -> String { if let Ok(sysroot) = std::env::var("MIRI_SYSROOT") { return sysroot; } let home = option_env!("RUSTUP_HOME").or(option_env!("MULTIRUST_HOME")); let toolchain = option_env!("RUSTUP_TOOLCHAIN").or(option_env!("MULTIRUST_TOOLCHAIN")); match (home, toolchain) { (Some(home), Some(toolchain)) => format!("{}/toolchains/{}", home, toolchain), _ => { option_env!("RUST_SYSROOT") .expect( "need to specify RUST_SYSROOT env var or use rustup or multirust", ) .to_owned() } } } fn main() { init_logger(); let mut args: Vec<String> = std::env::args().collect(); let sysroot_flag = String::from("--sysroot"); if !args.contains(&sysroot_flag) { args.push(sysroot_flag); args.push(find_sysroot()); } args.push("-Zalways-encode-mir".to_owned()); rustc_driver::run_compiler(&args, &mut MiriCompilerCalls { default: RustcDefaultCalls, }, None, None); }
#![feature(rustc_private, i128_type)] extern crate getopts; extern crate miri; extern crate rustc; extern crate rustc_driver; extern crate rustc_errors; extern crate env_logger; extern crate log_settings; extern crate syntax; extern crate log; use rustc::session::Session; use rustc::middle::cstore::CrateStore; use rustc_driver::{Compilation, CompilerCalls, RustcDefaultCalls}; use rustc_driver::driver::{CompileState, CompileController}; use rustc::session::config::{self, Input, ErrorOutputType}; use rustc::hir::{self, itemlikevisit}; use rustc::ty::TyCtxt; use syntax::ast::{self, MetaItemKind, NestedMetaItemKind}; use std::path::PathBuf; struct MiriCompilerCalls { default: RustcDefaultCalls, } impl<'a> CompilerCalls<'a> for MiriCompilerCalls { fn early_callback( &mut self, matches: &getopts::Matches, sopts: &config::Options, cfg: &ast::CrateConfig, descriptions: &rustc_errors::registry::Registry, output: ErrorOutputType, ) -> Compilation { self.default.early_callback( matches, sopts, cfg, descriptions, output, ) } fn no_input( &mut self, matches: &getopts::Matches, sopts: &config::Options, cfg: &ast::CrateConfig, odir: &Option<PathBuf>, ofile: &Option<PathBuf>, descriptions: &rustc_errors::registry::Registry, ) -> Option<(Input, Option<PathBuf>)> { self.default.no_input( matches, sopts, cfg, odir, ofile, descriptions, ) } fn late_callback( &mut self, matches: &getopts::Matches, sess: &Session, cstore: &CrateStore, input: &Input, odir: &Option<PathBuf>, ofile: &Option<PathBuf>, ) -> Compilation { self.default.late_callback(matches, sess, cstore, input, odir, ofile) } fn build_controller( &mut self, sess: &Session, matches: &getopts::Matches, ) -> CompileController<'a> { let mut control = self.default.build_controller(sess, matches); control.after_hir_lowering.callback = Box::new(after_hir_lowering); control.after_analysis.callback = Box::new(after_analysis); if sess.target.target != sess.host { control.after_analysis.stop = Compilation::Stop; } control } } fn after_hir_lowering(state: &mut CompileState) { let attr = ( String::from("miri"), syntax::feature_gate::AttributeType::Whitelisted, ); state.session.plugin_attributes.borrow_mut().push(attr); } fn after_analysis<'a, 'tcx>(state: &mut CompileState<'a, 'tcx>) { state.session.abort_if_errors(); let tcx = state.tcx.unwrap(); let limits = resource_limits_from_attributes(state); if std::env::args().any(|arg| arg == "--test") { struct Visitor<'a, 'tcx: 'a>( miri::ResourceLimits, TyCtxt<'a, 'tcx, 'tcx>, &'a CompileState<'a, 'tcx> ); impl<'a, 'tcx: 'a, 'hir> itemlikevisit::ItemLikeVisitor<'hir> for Visitor<'a, 'tcx> { fn visit_item(&mut self, i: &'hir hir::Item) { if let hir::Item_::ItemFn(_, _, _, _, _, body_id) = i.node { if i.attrs.iter().any(|attr| { attr.name().map_or(false, |n| n == "test") }) { let did = self.1.hir.body_owner_def_id(body_id); println!( "running test: {}", self.1.def_path_debug_str(did), ); miri::eval_main(self.1, did, None, self.0); self.2.session.abort_if_errors(); } } } fn visit_trait_item(&mut self, _trait_item: &'hir hir::TraitItem) {} fn visit_impl_item(&mut self, _impl_item: &'hir hir::ImplItem) {} } state.hir_crate.unwrap().visit_all_item_likes( &mut Visitor(limits, tcx, state), ); } else if let Some((entry_node_id, _)) = *state.session.entry_fn.borrow() { let entry_def_id = tcx.hir.local_def_id(entry_node_id); let start_wrapper = tcx.lang_items().start_fn().and_then(|start_fn| { if tcx.is_mir_available(start_fn) { Some(start_fn) } else { None } }); miri::eval_main(tcx, entry_def_id, start_wrapper, limits); state.session.abort_if_errors(); } else { println!("no main function found, assuming auxiliary build"); } } fn resource_limits_from_attributes(state: &CompileState) -> miri::ResourceLimits { let mut limits = miri::ResourceLimits::default(); let krate = state.hir_crate.as_ref().unwrap(); let err_msg = "miri attributes need to be in the form `miri(key = value)`"; let extract_int = |lit: &syntax::ast::Lit| -> u128 { match lit.node { syntax::ast::LitKind::Int(i, _) => i, _ => { state.session.span_fatal( lit.span, "expected an integer literal", ) } } }; for attr in krate.attrs.iter().filter(|a| { a.name().map_or(false, |n| n == "miri") }) { if let Some(items) = attr.meta_item_list() { for item in items { if let NestedMetaItemKind::MetaItem(ref inner) = item.node { if let MetaItemKind::NameValue(ref value) = inner.node { match &inner.name().as_str()[..] { "memory_size" => limits.memory_size = extract_int(value) as u64, "step_limit" => limits.step_limit = extract_int(value) as u64, "stack_limit" => limits.stack_limit = extract_int(value) as usize, _ => state.session.span_err(item.span, "unknown miri attribute"), } } else { state.session.span_err(inner.span, err_msg); } } else { state.session.span_err(item.span, err_msg); } } } else { state.session.span_err(attr.span, err_msg); } } limits } fn init_logger() { let format = |record: &log::LogRecord| { if record.level() == log::LogLevel::Trace { let indentation = log_settings::settings().indentation; format!( "{indentation}:{lvl}:{module}: {text}", lvl = record.level(), module = record.location().module_path(), indentation = indentation, text = record.args(), ) } else { format!( "{lvl}:{module}: {text}", lvl = record.level(), module = record.location().module_path(), text = record.args(), ) } }; let mut builder = env_logger::LogBuilder::new(); builder.format(format).filter( None, log::LogLevelFilter::Info, ); if std::env::var("MIRI_LOG").is_ok() { builder.parse(&std::env::var("MIRI_LOG").unwrap()); } builder.init().unwrap(); } fn find_sysroot() -> String { if let Ok(sysroot) = std::env::var("MIRI_SYSRO
up or multirust", ) .to_owned() } } } fn main() { init_logger(); let mut args: Vec<String> = std::env::args().collect(); let sysroot_flag = String::from("--sysroot"); if !args.contains(&sysroot_flag) { args.push(sysroot_flag); args.push(find_sysroot()); } args.push("-Zalways-encode-mir".to_owned()); rustc_driver::run_compiler(&args, &mut MiriCompilerCalls { default: RustcDefaultCalls, }, None, None); }
OT") { return sysroot; } let home = option_env!("RUSTUP_HOME").or(option_env!("MULTIRUST_HOME")); let toolchain = option_env!("RUSTUP_TOOLCHAIN").or(option_env!("MULTIRUST_TOOLCHAIN")); match (home, toolchain) { (Some(home), Some(toolchain)) => format!("{}/toolchains/{}", home, toolchain), _ => { option_env!("RUST_SYSROOT") .expect( "need to specify RUST_SYSROOT env var or use rust
function_block-random_span
[ { "content": "fn after_analysis<'a, 'tcx>(state: &mut CompileState<'a, 'tcx>) {\n\n state.session.abort_if_errors();\n\n\n\n let tcx = state.tcx.unwrap();\n\n let limits = Default::default();\n\n\n\n if std::env::args().any(|arg| arg == \"--test\") {\n\n struct Visitor<'a, 'tcx: 'a>(miri::ResourceLimits, TyCtxt<'a, 'tcx, 'tcx>, &'a CompileState<'a, 'tcx>);\n\n impl<'a, 'tcx: 'a, 'hir> itemlikevisit::ItemLikeVisitor<'hir> for Visitor<'a, 'tcx> {\n\n fn visit_item(&mut self, i: &'hir hir::Item) {\n\n if let hir::Item_::ItemFn(_, _, _, _, _, body_id) = i.node {\n\n if i.attrs.iter().any(|attr| attr.name().map_or(false, |n| n == \"test\")) {\n\n let did = self.1.hir.body_owner_def_id(body_id);\n\n println!(\"running test: {}\", self.1.def_path_debug_str(did));\n\n miri::eval_main(self.1, did, None, self.0);\n\n self.2.session.abort_if_errors();\n\n }\n\n }\n\n }\n\n fn visit_trait_item(&mut self, _trait_item: &'hir hir::TraitItem) {}\n", "file_path": "rustc_tests/src/main.rs", "rank": 0, "score": 428853.6558997313 }, { "content": "fn after_hir_lowering(state: &mut CompileState) {\n\n let attr = (String::from(\"miri\"), syntax::feature_gate::AttributeType::Whitelisted);\n\n state.session.plugin_attributes.borrow_mut().push(attr);\n\n}\n\n\n", "file_path": "rustc_tests/src/main.rs", "rank": 1, "score": 402998.5647806899 }, { "content": "fn main() {\n\n let f: &Fn(i32) = &foo;\n\n f(42);\n\n}\n", "file_path": "tests/run-pass/fn_item_with_args_as_closure_trait_object.rs", "rank": 2, "score": 295385.6131309434 }, { "content": "fn main() {\n\n let f: &Fn(i32, i32) = &foo;\n\n f(42, 55);\n\n let f: &Fn(i32, i32, f32) = &bar;\n\n f(42, 55, 3.14159);\n\n}\n", "file_path": "tests/run-pass/fn_item_with_multiple_args_as_closure_trait_object.rs", "rank": 3, "score": 290840.181538129 }, { "content": "struct MiriCompilerCalls {\n\n default: RustcDefaultCalls,\n\n /// whether we are building for the host\n\n host_target: bool,\n\n}\n\n\n\nimpl<'a> CompilerCalls<'a> for MiriCompilerCalls {\n\n fn early_callback(\n\n &mut self,\n\n matches: &getopts::Matches,\n\n sopts: &config::Options,\n\n cfg: &ast::CrateConfig,\n\n descriptions: &rustc_errors::registry::Registry,\n\n output: ErrorOutputType\n\n ) -> Compilation {\n\n self.default.early_callback(matches, sopts, cfg, descriptions, output)\n\n }\n\n fn no_input(\n\n &mut self,\n\n matches: &getopts::Matches,\n", "file_path": "rustc_tests/src/main.rs", "rank": 4, "score": 277378.6183066518 }, { "content": "fn main() {\n\n let x = 5;\n\n unsafe {\n\n std::intrinsics::assume(x < 10);\n\n std::intrinsics::assume(x > 1);\n\n std::intrinsics::assume(x > 42); //~ ERROR: `assume` argument was false\n\n }\n\n}\n", "file_path": "tests/compile-fail/assume.rs", "rank": 5, "score": 273144.4547128587 }, { "content": "fn main() {\n\n assert_eq!(empty(), \"\");\n\n assert_eq!(hello(), \"Hello, world!\");\n\n assert_eq!(hello_bytes(), b\"Hello, world!\");\n\n assert_eq!(hello_bytes_fat(), b\"Hello, world!\");\n\n fat_pointer_on_32_bit(); // Should run without crashing.\n\n}\n", "file_path": "tests/run-pass/strings.rs", "rank": 6, "score": 268866.31254654087 }, { "content": "fn main() {\n\n bar();\n\n}\n", "file_path": "tests/compile-fail/stack_limit.rs", "rank": 7, "score": 268185.7474930907 }, { "content": "fn main() {\n\n assert!(std::char::from_u32(-1_i32 as u32).is_none());\n\n match unsafe { std::mem::transmute::<i32, char>(-1) } { //~ERROR tried to interpret an invalid 32-bit value as a char: 4294967295\n\n 'a' => {},\n\n 'b' => {},\n\n _ => {},\n\n }\n\n}\n", "file_path": "tests/compile-fail/match_char.rs", "rank": 8, "score": 268135.5015876549 }, { "content": "fn print_vec<W: std::io::Write>(stderr: &mut W, v: Vec<String>) {\n\n writeln!(stderr, \"```\").unwrap();\n\n for (n, s) in vec_to_hist(v).into_iter().rev() {\n\n writeln!(stderr, \"{:4} {}\", n, s).unwrap();\n\n }\n\n writeln!(stderr, \"```\").unwrap();\n\n}\n\n\n", "file_path": "rustc_tests/src/main.rs", "rank": 9, "score": 265611.07476415345 }, { "content": "//ignore-msvc\n\nfn main() {\n\n println!(\"Hello {}\", 13);\n\n}\n", "file_path": "tests/run-pass-fullmir/format.rs", "rank": 10, "score": 264082.2506815083 }, { "content": "fn main() {\n\n vec![()].into_iter();\n\n}\n", "file_path": "tests/run-pass/assume_bug.rs", "rank": 11, "score": 264075.69917110534 }, { "content": "fn main() {\n\n let x: u128 = 0xFFFF_FFFF_FFFF_FFFF__FFFF_FFFF_FFFF_FFFF;\n\n assert_eq!(0, !x);\n\n assert_eq!(0, !x);\n\n let y: u128 = 0xFFFF_FFFF_FFFF_FFFF__FFFF_FFFF_FFFF_FFFE;\n\n assert_eq!(!1, y);\n\n assert_eq!(x, y | 1);\n\n assert_eq!(0xFAFF_0000_FF8F_0000__FFFF_0000_FFFF_FFFE,\n\n y &\n\n 0xFAFF_0000_FF8F_0000__FFFF_0000_FFFF_FFFF);\n\n let z: u128 = 0xABCD_EF;\n\n assert_eq!(z * z, 0x734C_C2F2_A521);\n\n assert_eq!(z * z * z * z, 0x33EE_0E2A_54E2_59DA_A0E7_8E41);\n\n assert_eq!(z + z + z + z, 0x2AF3_7BC);\n\n let k: u128 = 0x1234_5678_9ABC_DEFF_EDCB_A987_6543_210;\n\n assert_eq!(k + k, 0x2468_ACF1_3579_BDFF_DB97_530E_CA86_420);\n\n assert_eq!(0, k - k);\n\n assert_eq!(0x1234_5678_9ABC_DEFF_EDCB_A987_5A86_421, k - z);\n\n assert_eq!(0x1000_0000_0000_0000_0000_0000_0000_000,\n\n k - 0x234_5678_9ABC_DEFF_EDCB_A987_6543_210);\n", "file_path": "tests/run-pass-fullmir/u128.rs", "rank": 12, "score": 264062.96263110166 }, { "content": "fn main() {\n\n let x = \"hello\";\n\n match x {\n\n \"foo\" => {},\n\n \"bar\" => {},\n\n _ => {},\n\n }\n\n}\n", "file_path": "tests/run-pass/match_slice.rs", "rank": 13, "score": 264012.71672566584 }, { "content": "fn main() {\n\n assert_eq!(call_fn_ptr(), 42);\n\n assert_eq!(indirect(f), 42);\n\n assert_eq!(indirect_mut(f), 42);\n\n assert_eq!(indirect_once(f), 42);\n\n assert_eq!(indirect2(g), 420);\n\n assert_eq!(indirect_mut2(g), 420);\n\n assert_eq!(indirect_once2(g), 420);\n\n assert_eq!(indirect3(h), 210);\n\n assert_eq!(indirect_mut3(h), 210);\n\n assert_eq!(indirect_once3(h), 210);\n\n assert!(return_fn_ptr() == f);\n\n assert!(return_fn_ptr() as unsafe fn() -> i32 == f as fn() -> i32 as unsafe fn() -> i32);\n\n}\n", "file_path": "tests/run-pass/function_pointers.rs", "rank": 14, "score": 264000.14572868263 }, { "content": "fn main() {\n\n unsafe {\n\n assert_eq!(*BAR.0, 42);\n\n FOO = 5;\n\n assert_eq!(FOO, 5);\n\n assert_eq!(*BAR.0, 5);\n\n }\n\n}\n", "file_path": "tests/run-pass/static_mut.rs", "rank": 15, "score": 263815.24652267597 }, { "content": "fn main() {\n\n S::new(0u8, 1u16);\n\n}\n", "file_path": "tests/run-pass/miri-issue-133.rs", "rank": 16, "score": 262628.11929045734 }, { "content": "fn finish<T>(mut amt: usize, mut t: T) -> T::Return\n\n where T: Generator<Yield = ()>\n\n{\n\n loop {\n\n match t.resume() {\n\n GeneratorState::Yielded(()) => amt -= 1,\n\n GeneratorState::Complete(ret) => {\n\n assert_eq!(amt, 0);\n\n return ret\n\n }\n\n }\n\n }\n\n\n\n}\n\n\n", "file_path": "tests/run-pass/generator_control_flow.rs", "rank": 17, "score": 262049.06473732402 }, { "content": "fn main() {\n\n let f: &Fn() = &foo;\n\n f();\n\n}\n", "file_path": "tests/run-pass/fn_item_as_closure_trait_object.rs", "rank": 18, "score": 260437.98837284773 }, { "content": "fn main() {\n\n finish(1, || yield);\n\n finish(3, || {\n\n let mut x = 0;\n\n yield;\n\n x += 1;\n\n yield;\n\n x += 1;\n\n yield;\n\n assert_eq!(x, 2);\n\n });\n\n finish(8, || {\n\n for _ in 0..8 {\n\n yield;\n\n }\n\n });\n\n finish(1, || {\n\n if true {\n\n yield;\n\n } else {\n", "file_path": "tests/run-pass/generator_control_flow.rs", "rank": 19, "score": 259458.99337438348 }, { "content": "fn main() {\n\n let z = foo(&mut |x, y| x * 10 + y);\n\n assert_eq!(z, 12);\n\n}\n", "file_path": "tests/run-pass/multi_arg_closure.rs", "rank": 20, "score": 259385.35453590198 }, { "content": "fn main() {\n\n let mut x = 0;\n\n let y: *const i32 = &x;\n\n x = 1;\n\n\n\n // When the described bug is in place, this results in `0`, not observing the `x = 1` line.\n\n assert_eq!(unsafe { *y }, 1);\n\n\n\n assert_eq!(x, 1);\n\n}\n", "file_path": "tests/run-pass/observed_local_mut.rs", "rank": 21, "score": 259211.0235429565 }, { "content": "fn main() {\n\n let v = vec![0,1,2];\n\n let v1_ = safe::as_mut_slice(&v);\n\n let v2_ = safe::as_mut_slice(&v);\n\n}\n", "file_path": "tests/compile-fail/validation_buggy_as_mut_slice.rs", "rank": 22, "score": 258752.09773226414 }, { "content": "fn main() {\n\n let mut array = [1,2,3,4];\n\n let _x = safe::split_at_mut(&mut array, 0); //~ ERROR: in conflict with lock WriteLock\n\n}\n", "file_path": "tests/compile-fail/validation_buggy_split_at_mut.rs", "rank": 23, "score": 258752.09773226414 }, { "content": "pub fn main() {\n\n unsafe {\n\n use rusti::*;\n\n\n\n assert_eq!(ctpop(0u8), 0); assert_eq!(ctpop(0i8), 0);\n\n assert_eq!(ctpop(0u16), 0); assert_eq!(ctpop(0i16), 0);\n\n assert_eq!(ctpop(0u32), 0); assert_eq!(ctpop(0i32), 0);\n\n assert_eq!(ctpop(0u64), 0); assert_eq!(ctpop(0i64), 0);\n\n\n\n assert_eq!(ctpop(1u8), 1); assert_eq!(ctpop(1i8), 1);\n\n assert_eq!(ctpop(1u16), 1); assert_eq!(ctpop(1i16), 1);\n\n assert_eq!(ctpop(1u32), 1); assert_eq!(ctpop(1i32), 1);\n\n assert_eq!(ctpop(1u64), 1); assert_eq!(ctpop(1i64), 1);\n\n\n\n assert_eq!(ctpop(10u8), 2); assert_eq!(ctpop(10i8), 2);\n\n assert_eq!(ctpop(10u16), 2); assert_eq!(ctpop(10i16), 2);\n\n assert_eq!(ctpop(10u32), 2); assert_eq!(ctpop(10i32), 2);\n\n assert_eq!(ctpop(10u64), 2); assert_eq!(ctpop(10i64), 2);\n\n\n\n assert_eq!(ctpop(100u8), 3); assert_eq!(ctpop(100i8), 3);\n", "file_path": "tests/run-pass/intrinsics-integer.rs", "rank": 24, "score": 254731.06988639352 }, { "content": "fn main() {\n\n #[derive(PartialEq, Eq, Debug)]\n\n struct A(i32);\n\n assert_eq!(Some(42).map(A), Some(A(42)));\n\n}\n", "file_path": "tests/run-pass/tuple_like_struct_constructor.rs", "rank": 25, "score": 254485.90427629114 }, { "content": "pub fn main() {\n\n // With a vec of ints.\n\n let f1 = Fat { f1: 5, f2: \"some str\", ptr: [1, 2, 3] };\n\n foo(&f1);\n\n let f2 = &f1;\n\n foo(f2);\n\n let f3: &Fat<[isize]> = f2;\n\n foo(f3);\n\n let f4: &Fat<[isize]> = &f1;\n\n foo(f4);\n\n let f5: &Fat<[isize]> = &Fat { f1: 5, f2: \"some str\", ptr: [1, 2, 3] };\n\n foo(f5);\n\n\n\n // With a vec of Bars.\n\n let bar = Bar;\n\n let f1 = Fat { f1: 5, f2: \"some str\", ptr: [bar, bar, bar] };\n\n foo2(&f1);\n\n let f2 = &f1;\n\n foo2(f2);\n\n let f3: &Fat<[Bar]> = f2;\n", "file_path": "tests/run-pass/dst-struct.rs", "rank": 26, "score": 254212.49820677997 }, { "content": "pub fn main() {\n\n let bytes: [u8; 8] = unsafe { ::std::mem::transmute(0u64) };\n\n let _: &[u8] = &bytes;\n\n}\n", "file_path": "tests/run-pass/issue-miri-184.rs", "rank": 27, "score": 253398.03148034116 }, { "content": "pub fn main() {\n\n let x = &[1, 2, 3, 4, 5];\n\n\n\n let product = foldl(x, 1, |a, b| a * *b);\n\n assert_eq!(product, 120);\n\n\n\n let sum = foldr(x, 0, |a, b| *a + b);\n\n assert_eq!(sum, 15);\n\n}\n", "file_path": "tests/run-pass/vec-matching-fold.rs", "rank": 28, "score": 250356.04826239636 }, { "content": "pub fn main() {\n\n // This tests that do (not) do sign extension properly when loading integers\n\n assert_eq!(u32::max_value() as i64, 4294967295);\n\n assert_eq!(i32::min_value() as i64, -2147483648);\n\n\n\n assert_eq!(i8::min_value(), -128);\n\n\n\n assert_eq!(i8::max_value(), 127);\n\n\n\n assert_eq!(i32::from_str_radix(\"A\", 16), Ok(10));\n\n\n\n let n = -0b1000_0000i8;\n\n assert_eq!(n.count_ones(), 1);\n\n\n\n let n = -0b1000_0000i8;\n\n assert_eq!(n.count_zeros(), 7);\n\n\n\n let n = -1i16;\n\n assert_eq!(n.leading_zeros(), 0);\n\n\n", "file_path": "tests/run-pass-fullmir/integer-ops.rs", "rank": 29, "score": 250338.0669601482 }, { "content": "pub fn main() {\n\n let x = box 10;\n\n let y = x;\n\n assert_eq!(*y, 10);\n\n}\n", "file_path": "tests/run-pass/move-arg-3-unique.rs", "rank": 30, "score": 250338.0669601482 }, { "content": "pub fn main() {\n\n // With a vec of ints.\n\n let f1 = Fat { ptr: [1, 2, 3] };\n\n foo(&f1);\n\n let f2 = &f1;\n\n foo(f2);\n\n let f3: &Fat<[isize]> = f2;\n\n foo(f3);\n\n let f4: &Fat<[isize]> = &f1;\n\n foo(f4);\n\n let f5: &Fat<[isize]> = &Fat { ptr: [1, 2, 3] };\n\n foo(f5);\n\n\n\n // With a vec of Bars.\n\n let bar = Bar;\n\n let f1 = Fat { ptr: [bar, bar, bar] };\n\n foo2(&f1);\n\n let f2 = &f1;\n\n foo2(f2);\n\n let f3: &Fat<[Bar]> = f2;\n", "file_path": "tests/run-pass/dst-struct-sole.rs", "rank": 31, "score": 249831.61962678266 }, { "content": "pub fn record(r: usize) {\n\n assert!(r < 10);\n\n unsafe { RECORD = RECORD*10 + r };\n\n}\n\n\n\nunsafe extern fn dtor(ptr: *mut u64) {\n\n assert!(CANNARY != 0 as *mut _); // make sure we do not get run too often\n\n let val = *ptr;\n\n\n\n let which_key = GLOBALS.iter().position(|global| global as *const _ == ptr).expect(\"Should find my global\");\n\n record(which_key);\n\n\n\n if val > 0 {\n\n *ptr = val-1;\n\n set(KEYS[which_key], ptr as *mut _);\n\n }\n\n\n\n // Check if the records matches what we expect. If yes, clear the cannary.\n\n // If the record is wrong, the cannary will never get cleared, leading to a leak -> test fails.\n\n // If the record is incomplete (i.e., more dtor calls happen), the check at the beginning of this function will fail -> test fails.\n\n // The correct sequence is: First key 0, then key 1, then key 0.\n\n if RECORD == 0_1_0 {\n\n drop(Box::from_raw(CANNARY));\n\n CANNARY = 0 as *mut _;\n\n }\n\n}\n\n\n", "file_path": "tests/run-pass/thread-local.rs", "rank": 32, "score": 247181.2956240379 }, { "content": "pub fn main() {\n\n assert_eq!(foo()(), 22);\n\n}\n", "file_path": "tests/run-pass/last-use-in-cap-clause.rs", "rank": 33, "score": 246210.16282946707 }, { "content": "pub fn main() {\n\n let x = mk_rec();\n\n assert!(is_u64_aligned(&x.t));\n\n}\n", "file_path": "tests/run-pass/tag-align-dyn-u64.rs", "rank": 34, "score": 246186.7433982504 }, { "content": "pub fn main() {\n\n let value = 'outer: loop {\n\n if 1 == 1 {\n\n break 13;\n\n } else {\n\n let _never: ! = loop {\n\n break loop {\n\n break 'outer panic!();\n\n }\n\n };\n\n }\n\n };\n\n assert_eq!(value, 13);\n\n\n\n let x = [1, 3u32, 5];\n\n let y = [17];\n\n let z = [];\n\n let coerced: &[_] = loop {\n\n match 2 {\n\n 1 => break &x,\n", "file_path": "tests/run-pass-fullmir/loop-break-value.rs", "rank": 35, "score": 246180.90013735218 }, { "content": "pub fn main() {\n\n let x = box vec![10];\n\n // Test forgetting a local by move-in\n\n test(x);\n\n}\n", "file_path": "tests/run-pass-fullmir/move-arg-2-unique.rs", "rank": 36, "score": 246145.79019709534 }, { "content": "fn strlen(str: String) -> usize {\n\n // C string is terminated with a zero\n\n let s = CString::new(str).unwrap();\n\n unsafe {\n\n mlibc::my_strlen(s.as_ptr()) as usize\n\n }\n\n}\n\n\n", "file_path": "tests/run-pass-fullmir/foreign-fn-linkname.rs", "rank": 37, "score": 244820.98095413996 }, { "content": "fn main() {\n\n let path = option_env!(\"MIRI_RUSTC_TEST\")\n\n .map(String::from)\n\n .unwrap_or_else(|| {\n\n std::env::var(\"MIRI_RUSTC_TEST\")\n\n .expect(\"need to set MIRI_RUSTC_TEST to path of rustc tests\")\n\n });\n\n\n\n let mut mir_not_found = Vec::new();\n\n let mut crate_not_found = Vec::new();\n\n let mut success = 0;\n\n let mut failed = Vec::new();\n\n let mut c_abi_fns = Vec::new();\n\n let mut abi = Vec::new();\n\n let mut unsupported = Vec::new();\n\n let mut unimplemented_intrinsic = Vec::new();\n\n let mut limits = Vec::new();\n\n let mut files: Vec<_> = std::fs::read_dir(path).unwrap().collect();\n\n while let Some(file) = files.pop() {\n\n let file = file.unwrap();\n", "file_path": "rustc_tests/src/main.rs", "rank": 38, "score": 239872.57409412903 }, { "content": "fn main() {\n\n let x = 5;\n\n let a = A { x: 99, y: &x };\n\n assert_eq!(Some(a).map(Some), Some(Some(a)));\n\n let f = B;\n\n assert_eq!(Some(B(42, &x)), Some(f(42, &x)));\n\n // the following doesn't compile :(\n\n //let f: for<'a> fn(i32, &'a i32) -> B<'a> = B;\n\n //assert_eq!(Some(B(42, &x)), Some(f(42, &x)));\n\n assert_eq!(B(42, &x), foo(&x, B));\n\n let f = C::Value;\n\n assert_eq!(C::Value(42, &x), f(42, &x));\n\n}\n\n\n", "file_path": "tests/run-pass/tuple_like_enum_variant_constructor_struct_pointer_opt.rs", "rank": 39, "score": 238835.71693143295 }, { "content": "fn test(r: &mut RefCell<i32>) {\n\n let x = &*r; // releasing write lock, first suspension recorded\n\n let mut x_ref = x.borrow_mut();\n\n let x_inner : &mut i32 = &mut *x_ref; // new inner write lock, with same lifetime as outer lock\n\n {\n\n let x_inner_shr = &*x_inner; // releasing inner write lock, recording suspension\n\n let y = &*r; // second suspension for the outer write lock\n\n let x_inner_shr2 = &*x_inner; // 2nd suspension for inner write lock\n\n }\n\n // If the two locks are mixed up, here we should have a write lock, but we do not.\n\n evil(x_inner as *mut _);\n\n}\n\n\n", "file_path": "tests/compile-fail/validation_lock_confusion.rs", "rank": 40, "score": 238571.14598074934 }, { "content": "fn indirect_mut<F: FnMut() -> i32>(mut f: F) -> i32 { f() }\n", "file_path": "tests/run-pass/function_pointers.rs", "rank": 41, "score": 237126.93497975837 }, { "content": "fn vec_to_hist<T: PartialEq + Ord>(mut v: Vec<T>) -> Vec<(usize, T)> {\n\n v.sort();\n\n let mut v = v.into_iter();\n\n let mut result = Vec::new();\n\n let mut current = v.next();\n\n 'outer: while let Some(current_val) = current {\n\n let mut n = 1;\n\n for next in &mut v {\n\n if next == current_val {\n\n n += 1;\n\n } else {\n\n result.push((n, current_val));\n\n current = Some(next);\n\n continue 'outer;\n\n }\n\n }\n\n result.push((n, current_val));\n\n break;\n\n }\n\n result.sort();\n\n result\n\n}\n", "file_path": "rustc_tests/src/main.rs", "rank": 42, "score": 236779.43838666743 }, { "content": "fn for_all_targets<F: FnMut(String)>(sysroot: &Path, mut f: F) {\n\n let target_dir = sysroot.join(\"lib\").join(\"rustlib\");\n\n for entry in std::fs::read_dir(target_dir).expect(\"invalid sysroot\") {\n\n let entry = entry.unwrap();\n\n if !is_target_dir(entry.path()) {\n\n continue;\n\n }\n\n let target = entry.file_name().into_string().unwrap();\n\n f(target);\n\n }\n\n}\n\n\n", "file_path": "tests/compiletest.rs", "rank": 43, "score": 235453.87887610242 }, { "content": "fn main() {\n\n let buf = &[1,2,3,4];\n\n let n = <BigEndian as ByteOrder>::read_u32(buf);\n\n assert_eq!(n, 0x01020304);\n\n}\n", "file_path": "cargo-miri-test/src/main.rs", "rank": 44, "score": 234876.39916788117 }, { "content": "fn foo(f: &mut FnMut(isize, isize) -> isize) -> isize {\n\n f(1, 2)\n\n}\n\n\n", "file_path": "tests/run-pass/multi_arg_closure.rs", "rank": 45, "score": 233806.11223024628 }, { "content": "fn main() {\n\n let x: i32 = unsafe {\n\n *std::mem::transmute::<fn(), *const i32>(f) //~ ERROR: tried to dereference a function pointer\n\n };\n\n panic!(\"this should never print: {}\", x);\n\n}\n", "file_path": "tests/compile-fail/deref_fn_ptr.rs", "rank": 46, "score": 231059.6913265015 }, { "content": "fn main() {\n\n fn f() {}\n\n\n\n let g = unsafe {\n\n std::mem::transmute::<fn(), fn(i32)>(f)\n\n };\n\n\n\n g(42) //~ ERROR tried to call a function with sig fn() through a function pointer of type fn(i32)\n\n}\n", "file_path": "tests/compile-fail/cast_fn_ptr.rs", "rank": 47, "score": 231059.6913265015 }, { "content": "fn main() {\n\n fn f(_ : (i32,i32)) {}\n\n\n\n let g = unsafe {\n\n std::mem::transmute::<fn((i32,i32)), fn(i32)>(f)\n\n };\n\n\n\n g(42) //~ ERROR tried to call a function with sig fn((i32, i32)) through a function pointer of type fn(i32)\n\n}\n", "file_path": "tests/compile-fail/cast_fn_ptr2.rs", "rank": 48, "score": 231059.6913265015 }, { "content": "fn main() {\n\n let x : fn() = f;\n\n let y : *mut u8 = unsafe { mem::transmute(x) };\n\n let y = y.wrapping_offset(1);\n\n let x : fn() = unsafe { mem::transmute(y) };\n\n x(); //~ ERROR: tried to use a function pointer after offsetting it\n\n}\n", "file_path": "tests/compile-fail/fn_ptr_offset.rs", "rank": 49, "score": 231059.6913265015 }, { "content": "fn main() {\n\n assert_eq!(5, 6);\n\n}\n", "file_path": "tests/compile-fail/panic.rs", "rank": 50, "score": 229378.43081666692 }, { "content": "fn main() {\n\n let data: [u8; 1024*1024*1024] = [42; 1024*1024*1024];\n\n //~^ ERROR: tried to allocate\n\n assert_eq!(data.len(), 1024*1024*1024);\n\n}\n", "file_path": "tests/compile-fail/repeat2.rs", "rank": 51, "score": 229378.43081666692 }, { "content": "fn main() {\n\n let data: [u8; std::usize::MAX] = [42; std::usize::MAX];\n\n //~^ ERROR: rustc layout computation failed: SizeOverflow([u8;\n\n assert_eq!(data.len(), 1024);\n\n}\n", "file_path": "tests/compile-fail/repeat.rs", "rank": 52, "score": 229378.43081666692 }, { "content": "fn main() {\n\n loop {\n\n ::std::mem::forget(box 42);\n\n }\n\n}\n", "file_path": "tests/compile-fail/oom2.rs", "rank": 53, "score": 229378.43081666692 }, { "content": "fn main() {\n\n let _x = [42; 1024];\n\n //~^ERROR tried to allocate 4096 more bytes, but only\n\n}\n", "file_path": "tests/compile-fail/oom.rs", "rank": 54, "score": 229378.43081666692 }, { "content": "fn main() {\n\n // miri always gives allocations the worst possible alignment, so a `u8` array is guaranteed\n\n // to be at the virtual location 1 (so one byte offset from the ultimate alignemnt location 0)\n\n let mut x = [0u8; 20];\n\n let x_ptr: *mut u8 = &mut x[0];\n\n let y_ptr = x_ptr as *mut u64;\n\n unsafe {\n\n *y_ptr = 42; //~ ERROR tried to access memory with alignment 1, but alignment\n\n }\n\n panic!(\"unreachable in miri\");\n\n}\n", "file_path": "tests/compile-fail/alignment.rs", "rank": 55, "score": 229378.43081666692 }, { "content": "fn main() {\n\n std::mem::forget(Box::new(42));\n\n}\n", "file_path": "tests/compile-fail/memleak.rs", "rank": 56, "score": 229378.43081666692 }, { "content": "fn main() {\n\n let x = &() as *const () as *const i32;\n\n let _ = unsafe { *x }; //~ ERROR: tried to access memory with alignment 1, but alignment 4 is required\n\n}\n", "file_path": "tests/compile-fail/zst.rs", "rank": 57, "score": 229378.43081666692 }, { "content": "fn main() {\n\n for i in 0..1000000 {\n\n assert!(i < 1000);\n\n }\n\n}\n", "file_path": "tests/compile-fail/timeout.rs", "rank": 58, "score": 229378.43081666692 }, { "content": "fn main() {\n\n dep::foo();\n\n}\n", "file_path": "tests/run-pass/aux_test.rs", "rank": 59, "score": 229249.0166766579 }, { "content": "fn find_sysroot() -> String {\n\n // Taken from https://github.com/Manishearth/rust-clippy/pull/911.\n\n let home = option_env!(\"RUSTUP_HOME\").or(option_env!(\"MULTIRUST_HOME\"));\n\n let toolchain = option_env!(\"RUSTUP_TOOLCHAIN\").or(option_env!(\"MULTIRUST_TOOLCHAIN\"));\n\n match (home, toolchain) {\n\n (Some(home), Some(toolchain)) => format!(\"{}/toolchains/{}\", home, toolchain),\n\n _ => {\n\n option_env!(\"RUST_SYSROOT\")\n\n .expect(\n\n \"need to specify RUST_SYSROOT env var or use rustup or multirust\",\n\n )\n\n .to_owned()\n\n }\n\n }\n\n}\n\n\n", "file_path": "benches/helpers/miri_helper.rs", "rank": 60, "score": 228784.9160320885 }, { "content": "// just making sure that fn -> unsafe fn casts are handled by rustc so miri doesn't have to\n\nfn main() {\n\n fn f() {}\n\n\n\n let g = f as fn() as unsafe fn(i32); //~ERROR: non-primitive cast: `fn()` as `unsafe fn(i32)`\n\n\n\n unsafe {\n\n g(42);\n\n }\n\n}\n", "file_path": "tests/compile-fail/cast_fn_ptr_unsafe.rs", "rank": 61, "score": 227745.84102060978 }, { "content": "// just making sure that fn -> unsafe fn casts are handled by rustc so miri doesn't have to\n\nfn main() {\n\n fn f() {}\n\n\n\n let g = f as fn() as fn(i32) as unsafe fn(i32); //~ERROR: non-primitive cast: `fn()` as `fn(i32)`\n\n\n\n unsafe {\n\n g(42);\n\n }\n\n}\n", "file_path": "tests/compile-fail/cast_fn_ptr_unsafe2.rs", "rank": 62, "score": 227745.84102060978 }, { "content": "fn main() {\n\n let g = unsafe {\n\n std::mem::transmute::<usize, fn(i32)>(42)\n\n };\n\n\n\n g(42) //~ ERROR a memory access tried to interpret some bytes as a pointer\n\n}\n", "file_path": "tests/compile-fail/cast_int_to_fn_ptr.rs", "rank": 63, "score": 227733.06951453065 }, { "content": "fn main() {\n\n fn f(_: *const u8) {}\n\n\n\n let g = unsafe {\n\n std::mem::transmute::<fn(*const u8), fn(*const i32)>(f)\n\n };\n\n\n\n g(&42 as *const _);\n\n}\n", "file_path": "tests/run-pass/cast_fn_ptr.rs", "rank": 64, "score": 227035.6059103969 }, { "content": "fn main() {\n\n let x = 42;\n\n let bad_box = unsafe { std::mem::transmute::<&i32, Box<i32>>(&x) };\n\n drop(bad_box);\n\n}\n", "file_path": "tests/compile-fail/stack_free.rs", "rank": 65, "score": 225505.91704957932 }, { "content": "// error-pattern: overflowing math\n\nfn main() {\n\n let v = [0i8; 4];\n\n let x = &v as *const i8;\n\n let x = unsafe { x.offset(-1) };\n\n panic!(\"this should never print: {:?}\", x);\n\n}\n", "file_path": "tests/compile-fail/out_of_bounds_ptr_2.rs", "rank": 66, "score": 225505.91704957932 }, { "content": "fn main() {\n\n let b = unsafe { std::mem::transmute::<u8, bool>(2) }; //~ ERROR: invalid boolean value read\n\n if b { unreachable!() } else { unreachable!() }\n\n}\n", "file_path": "tests/compile-fail/invalid_bool.rs", "rank": 67, "score": 225505.91704957932 }, { "content": "fn main() {\n\n let target = &mut 42u32;\n\n let target2 = target as *mut _;\n\n drop(&mut *target); // reborrow\n\n // Now make sure we still got the lock\n\n safe::safe(target2);\n\n}\n", "file_path": "tests/compile-fail/validation_recover3.rs", "rank": 68, "score": 225505.91704957932 }, { "content": "fn main() {\n\n let _n = 1 / 0; //~ ERROR: DivisionByZero\n\n}\n", "file_path": "tests/compile-fail/div-by-zero-2.rs", "rank": 69, "score": 225505.91704957932 }, { "content": "fn main() {\n\n let _n = 1i64 >> 64; //~ Overflow(Shr)\n\n}\n", "file_path": "tests/compile-fail/overflowing-rsh.rs", "rank": 70, "score": 225505.91704957932 }, { "content": "// error-pattern: pointer computed at offset 5, outside bounds of allocation\n\nfn main() {\n\n let v = [0i8; 4];\n\n let x = &v as *const i8;\n\n // The error is inside another function, so we cannot match it by line\n\n let x = unsafe { x.offset(5) };\n\n panic!(\"this should never print: {:?}\", x);\n\n}\n", "file_path": "tests/compile-fail/out_of_bounds_ptr_1.rs", "rank": 71, "score": 225505.91704957932 }, { "content": "fn main() {\n\n let foo = Foo {\n\n x: 42,\n\n y: 99,\n\n };\n\n let p = unsafe { &foo.x };\n\n let i = *p; //~ ERROR tried to access memory with alignment 1, but alignment 4 is required\n\n}\n", "file_path": "tests/compile-fail/reference_to_packed.rs", "rank": 72, "score": 225505.91704957932 }, { "content": "fn main() {\n\n #[cfg(target_pointer_width=\"64\")]\n\n let bad = unsafe {\n\n std::mem::transmute::<&[u8], u128>(&[1u8])\n\n };\n\n #[cfg(target_pointer_width=\"32\")]\n\n let bad = unsafe {\n\n std::mem::transmute::<&[u8], u64>(&[1u8])\n\n };\n\n bad + 1; //~ ERROR a raw memory access tried to access part of a pointer value as raw bytes\n\n}\n", "file_path": "tests/compile-fail/transmute_fat.rs", "rank": 73, "score": 225505.91704957932 }, { "content": "fn main() {\n\n let target = &mut 42;\n\n let mut target_alias = &42; // initial dummy value\n\n safe::safe(&mut target_alias, target); //~ ERROR: in conflict with lock ReadLock\n\n}\n", "file_path": "tests/compile-fail/validation_recover2.rs", "rank": 74, "score": 225505.91704957932 }, { "content": "fn main() {\n\n let x = Dummy(Rc::new(RefCell::new(None)));\n\n let y = Dummy(x.0.clone());\n\n *x.0.borrow_mut() = Some(y);\n\n}\n", "file_path": "tests/compile-fail/memleak_rc.rs", "rank": 75, "score": 225505.91704957932 }, { "content": "fn main() {\n\n // Make sure we catch overflows that would be hidden by first casting the RHS to u32\n\n let _n = 1i64 >> (u32::max_value() as i64 + 1); //~ Overflow(Shr)\n\n}\n", "file_path": "tests/compile-fail/overflowing-rsh-2.rs", "rank": 76, "score": 225505.91704957932 }, { "content": "fn main() {\n\n unsafe {\n\n let x = Heap.alloc(Layout::from_size_align_unchecked(1, 1)).unwrap();\n\n Heap.dealloc(x, Layout::from_size_align_unchecked(1, 1));\n\n Heap.realloc(x, Layout::from_size_align_unchecked(1, 1), Layout::from_size_align_unchecked(1, 1));\n\n }\n\n}\n", "file_path": "tests/compile-fail/reallocate-dangling.rs", "rank": 77, "score": 225505.91704957932 }, { "content": "fn main() {\n\n let bytes = [0i8, 1, 2, 3, 4, 5, 6, 7, 8, 9];\n\n let one = bytes.as_ptr().wrapping_offset(1);\n\n let three = bytes.as_ptr().wrapping_offset(3);\n\n let res = (one as usize) | (three as usize); //~ ERROR a raw memory access tried to access part of a pointer value as raw bytes\n\n println!(\"{}\", res);\n\n}\n", "file_path": "tests/compile-fail/ptr_bitops.rs", "rank": 78, "score": 225505.91704957932 }, { "content": "fn main() {\n\n let x = box 42;\n\n unsafe {\n\n let f = std::mem::transmute::<Box<i32>, fn()>(x);\n\n f() //~ ERROR: tried to treat a memory pointer as a function pointer\n\n }\n\n}\n", "file_path": "tests/compile-fail/execute_memory.rs", "rank": 79, "score": 225505.91704957932 }, { "content": "fn main() {\n\n let v: Vec<u8> = vec![1, 2];\n\n let x = unsafe { *v.as_ptr().wrapping_offset(5) }; //~ ERROR: which has size 2\n\n panic!(\"this should never print: {}\", x);\n\n}\n", "file_path": "tests/compile-fail/out_of_bounds_read.rs", "rank": 80, "score": 225505.91704957932 }, { "content": "fn main() {\n\n let x = &1; // the `&1` is promoted to a constant, but it used to be that only the pointer is marked static, not the pointee\n\n let y = unsafe { &mut *(x as *const i32 as *mut i32) };\n\n *y = 42; //~ ERROR tried to modify constant memory\n\n assert_eq!(*x, 42);\n\n}\n", "file_path": "tests/compile-fail/modifying_constants.rs", "rank": 81, "score": 225505.91704957932 }, { "content": "fn main() {\n\n let mut data = [0u8; 16];\n\n unsafe {\n\n let a = &data[0] as *const _;\n\n let b = &mut data[1] as *mut _;\n\n std::ptr::copy_nonoverlapping(a, b, 2);\n\n }\n\n}\n", "file_path": "tests/compile-fail/copy_nonoverlapping.rs", "rank": 82, "score": 225505.91704957932 }, { "content": "fn main() {\n\n let mut x = Bool::True;\n\n safe::safe(&mut x); //~ ERROR: invalid enum discriminant\n\n}\n", "file_path": "tests/compile-fail/validation_recover1.rs", "rank": 83, "score": 225505.91704957932 }, { "content": "fn main() {\n\n #[cfg(target_pointer_width=\"64\")]\n\n let bad = unsafe {\n\n std::mem::transmute::<u128, &[u8]>(42)\n\n };\n\n #[cfg(target_pointer_width=\"32\")]\n\n let bad = unsafe {\n\n std::mem::transmute::<u64, &[u8]>(42)\n\n };\n\n bad[0]; //~ ERROR index out of bounds: the len is 0 but the index is 0\n\n}\n", "file_path": "tests/compile-fail/transmute_fat2.rs", "rank": 84, "score": 225505.91704957932 }, { "content": "fn main() {\n\n let _x = safe::make_float();\n\n}\n", "file_path": "tests/compile-fail/validation_undef.rs", "rank": 85, "score": 225505.91704957932 }, { "content": "fn main() {\n\n unsafe {\n\n let _n = unchecked_div(1i64, 0);\n\n }\n\n}\n", "file_path": "tests/compile-fail/div-by-zero.rs", "rank": 86, "score": 225505.91704957932 }, { "content": "fn main() {\n\n unsafe {\n\n let x = Heap.alloc(Layout::from_size_align_unchecked(1, 1)).unwrap();\n\n Heap.dealloc(x, Layout::from_size_align_unchecked(1, 1));\n\n Heap.dealloc(x, Layout::from_size_align_unchecked(1, 1));\n\n }\n\n}\n", "file_path": "tests/compile-fail/deallocate-twice.rs", "rank": 87, "score": 225505.91704957932 }, { "content": "fn main() {\n\n let v: Vec<u8> = vec![1, 2];\n\n let x = unsafe { *v.as_ptr().wrapping_offset(5) }; //~ ERROR: memory access at offset 6, outside bounds of allocation\n\n panic!(\"this should never print: {}\", x);\n\n}\n", "file_path": "tests/compile-fail/out_of_bounds_read2.rs", "rank": 88, "score": 225505.91704957932 }, { "content": "fn main() {\n\n assert_eq!(factorial_loop(), 3628800);\n\n assert_eq!(index_for_loop(), 60);\n\n assert_eq!(for_loop(), 60);\n\n}\n", "file_path": "tests/run-pass/loops.rs", "rank": 89, "score": 225151.98313413846 }, { "content": "fn main() {\n\n take(Handler::Default, Box::new(main));\n\n}\n\n\n", "file_path": "tests/run-pass/issue-30530.rs", "rank": 90, "score": 225151.98313413846 }, { "content": "fn main() {\n\n let mut x = 0;\n\n {\n\n let wrapper = Box::new(Wrapper(&mut x, 123));\n\n let _: Box<Wrapper<Send>> = wrapper;\n\n }\n\n assert_eq!(432, x)\n\n}\n", "file_path": "tests/run-pass/issue-26709.rs", "rank": 91, "score": 225151.98313413846 }, { "content": "fn main() {\n\n let foo: &Foo<i32> = &Foo { a: 1, b: false, c: 2i32 };\n\n let foo_unsized: &Foo<Send> = foo;\n\n assert_eq!(mem::size_of_val(foo), mem::size_of_val(foo_unsized));\n\n}\n", "file_path": "tests/run-pass/issue-35815.rs", "rank": 92, "score": 225151.98313413846 }, { "content": "fn main() {\n\n assert_eq!(one_line_ref(), 1);\n\n assert_eq!(basic_ref(), 1);\n\n assert_eq!(basic_ref_mut(), 3);\n\n assert_eq!(basic_ref_mut_var(), 3);\n\n assert_eq!(tuple_ref_mut(), (10, 22));\n\n assert_eq!(match_ref_mut(), 42);\n\n // FIXME: improve this test... how?\n\n assert!(dangling_pointer() != std::ptr::null());\n\n}\n", "file_path": "tests/run-pass/pointers.rs", "rank": 93, "score": 225151.98313413846 }, { "content": "fn main() {\n\n rc_refcell();\n\n rc_raw();\n\n rc_from();\n\n}\n", "file_path": "tests/run-pass/rc.rs", "rank": 94, "score": 225151.98313413846 }, { "content": "fn main() {\n\n let functions: [Box<Fn() -> Option<()>>; 1] = [Box::new(|| None)];\n\n\n\n let _: Option<Vec<()>> = functions.iter().map(|f| (*f)()).collect();\n\n}\n", "file_path": "tests/run-pass/issue-20575.rs", "rank": 95, "score": 225151.98313413846 }, { "content": "fn main() {\n\n let p1 = vec![1i32, 2].into_iter();\n\n let p2 = vec![\"10\", \"20\"].into_iter();\n\n let p3 = vec![100u16, 200].into_iter();\n\n let p4 = vec![1000i64, 2000].into_iter();\n\n\n\n let e = zip!([p1,p2,p3,p4]).collect::<Vec<_>>();\n\n assert_eq!(e[0], (1i32,\"10\",100u16,1000i64));\n\n}\n", "file_path": "tests/run-pass/issue-29746.rs", "rank": 96, "score": 225151.98313413846 }, { "content": "fn main() {\n\n match Foo::Foo(1) {\n\n _ => ()\n\n }\n\n}\n", "file_path": "tests/run-pass/issue-34571.rs", "rank": 97, "score": 225151.98313413846 }, { "content": "fn main() {\n\n let foo: &Foo<[u8]> = &Foo { a: 32, inner: [1, 2, 3] };\n\n check_val(foo);\n\n check_dst_val(foo);\n\n check_both(foo);\n\n\n\n let foo: &Foo<Get> = &Foo { a: 32, inner: 32 };\n\n check_trait_obj(foo);\n\n}\n", "file_path": "tests/run-pass/issue-23261.rs", "rank": 98, "score": 225151.98313413846 }, { "content": "fn main() {\n\n let fx = f as for<'t> fn(&'t str) -> (&'t str, <&'t str as Stream>::Item);\n\n assert_eq!(fx(\"hi\"), (\"hi\", 42));\n\n}\n", "file_path": "tests/run-pass/issue-27901.rs", "rank": 99, "score": 225151.98313413846 } ]
Rust
src/profiles.rs
rafamatias/rogcat
d90d1e02784b965c49e6ea67815e60b5c11a0e76
use clap::ArgMatches; use failure::{err_msg, Error}; use std::collections::HashMap; use std::convert::Into; use std::env::var; use std::fs::File; use std::io::prelude::*; use std::ops::AddAssign; use std::path::PathBuf; use toml::{from_str, to_string}; const EXTEND_LIMIT: u32 = 1000; #[derive(Clone, Debug, Default, Deserialize, Serialize)] pub struct ProfileFile { extends: Option<Vec<String>>, comment: Option<String>, highlight: Option<Vec<String>>, message: Option<Vec<String>>, tag: Option<Vec<String>>, } impl Into<Profile> for ProfileFile { fn into(self) -> Profile { Profile { comment: self.comment, extends: self.extends.unwrap_or_else(|| vec![]), highlight: self.highlight.unwrap_or_else(|| vec![]), message: self.message.unwrap_or_else(|| vec![]), tag: self.tag.unwrap_or_else(|| vec![]), } } } #[derive(Debug, Default, Deserialize, Serialize)] struct ConfigurationFile { profile: HashMap<String, ProfileFile>, } #[derive(Clone, Debug, Default, PartialEq)] pub struct Profile { comment: Option<String>, extends: Vec<String>, highlight: Vec<String>, message: Vec<String>, tag: Vec<String>, } impl Profile { pub fn comment(&self) -> &Option<String> { &self.comment } pub fn highlight(&self) -> &Vec<String> { &self.highlight } pub fn message(&self) -> &Vec<String> { &self.message } pub fn tag(&self) -> &Vec<String> { &self.tag } } impl AddAssign for Profile { fn add_assign(&mut self, other: Profile) { macro_rules! vec_extend { ($x:expr, $y:expr) => { $x.extend($y); $x.sort(); $x.dedup(); }; } vec_extend!(self.extends, other.extends); vec_extend!(self.highlight, other.highlight); vec_extend!(self.message, other.message); vec_extend!(self.tag, other.tag); } } #[derive(Debug, Default)] pub struct Profiles { file: PathBuf, profile: Profile, profiles: HashMap<String, Profile>, } impl Profiles { pub fn new(args: &ArgMatches) -> Result<Self, Error> { let file = Self::file(Some(args))?; if !file.exists() { Ok(Profiles { file, ..Default::default() }) } else { let mut config = String::new(); File::open(file.clone()) .map_err(|e| format_err!("Failed to open {}: {}", file.display(), e))? .read_to_string(&mut config)?; let mut config_file: ConfigurationFile = from_str(&config) .map_err(|e| format_err!("Failed to parse {}: {}", file.display(), e))?; let profiles: HashMap<String, Profile> = config_file .profile .drain() .map(|(k, v)| (k, v.into())) .collect(); let mut profile = Profile::default(); if let Some(n) = args.value_of("profile") { profile = profiles .get(n) .ok_or_else(|| format_err!("Unknown profile {}", n))? .clone(); Self::expand(n, &mut profile, &profiles)?; } Ok(Profiles { file, profile, profiles, }) } } fn expand(n: &str, p: &mut Profile, a: &HashMap<String, Profile>) -> Result<(), Error> { let mut loops = EXTEND_LIMIT; while !p.extends.is_empty() { let extends = p.extends.clone(); p.extends.clear(); for e in &extends { let f = a.get(e).ok_or_else(|| { format_err!("Unknown extend profile name {} used in {}", e, n) })?; *p += f.clone(); } loops -= 1; if loops == 0 { return Err(format_err!( "Reached recursion limit while resolving profile {} extends", n )); } } Ok(()) } pub fn profile(&self) -> Profile { self.profile.clone() } pub fn subcommand(self, args: &ArgMatches) -> Result<i32, Error> { if args.is_present("list") { if self.profiles.is_empty() { println!("No profiles present in \"{}\".", self.file.display()); } else { println!("Available profiles in \"{}\":", self.file.display()); for (k, v) in self.profiles { println!( " * {}{}", k, v.comment() .clone() .map(|c| format!(": {}", c)) .unwrap_or_else(|| "".into()) ); } } Ok(0) } else if args.is_present("examples") { let mut example = ConfigurationFile::default(); example.profile.insert( "W hitespace".into(), ProfileFile { comment: Some( "Profile names can contain whitespaces. Quote on command line...".into(), ), ..Default::default() }, ); example.profile.insert( "rogcat".into(), ProfileFile { comment: Some("Only tag \"rogcat\"".into()), tag: Some(vec!["^rogcat$".into()]), ..Default::default() }, ); example.profile.insert( "Comments are optional".into(), ProfileFile { tag: Some(vec!["rogcat".into()]), ..Default::default() }, ); example.profile.insert( "A".into(), ProfileFile { comment: Some("Messages starting with A".into()), message: Some(vec!["^A.*".into()]), ..Default::default() }, ); example.profile.insert( "B".into(), ProfileFile { comment: Some("Messages starting with B".into()), message: Some(vec!["^B.*".into()]), ..Default::default() }, ); example.profile.insert( "ABC".into(), ProfileFile { extends: Some(vec!["A".into(), "B".into()]), comment: Some("Profiles A, B plus the following filter (^C.*)".into()), message: Some(vec!["^C.*".into()]), ..Default::default() }, ); example.profile.insert( "complex".into(), ProfileFile { comment: Some( "Profiles can be complex. This one is probably very useless.".into(), ), tag: Some(vec!["b*".into(), "!adb".into()]), message: Some(vec!["^R.*".into(), "!^A.*".into(), "!^A.*".into()]), highlight: Some(vec!["blah".into()]), ..Default::default() }, ); to_string(&example) .map_err(|e| format_err!("Internal example serialization error: {}", e)) .map(|s| { println!("Example profiles:"); println!(); println!("{}", s); 0 }) } else { Err(err_msg("Missing option for profiles subcommand!")) } } pub fn file(args: Option<&ArgMatches>) -> Result<PathBuf, Error> { if let Some(args) = args { if args.is_present("profiles_path") { let f = PathBuf::from(value_t!(args, "profiles_path", String)?); if f.exists() { return Ok(f); } else { return Err(format_err!( "Cannot find {}. Use --profiles_path to specify the path manually!", f.display() )); } } } if let Ok(f) = var("ROGCAT_PROFILES").map(PathBuf::from) { if f.exists() { return Ok(f); } else { Err(format_err!( "Cannot find {} set in ROGCAT_PROFILES!", f.display() )) } } else { Ok(::config_dir().join("profiles.toml")) } } }
use clap::ArgMatches; use failure::{err_msg, Error}; use std::collections::HashMap; use std::convert::Into; use std::env::var; use std::fs::File; use std::io::prelude::*; use std::ops::AddAssign; use std::path::PathBuf; use toml::{from_str, to_string}; const EXTEND_LIMIT: u32 = 1000; #[derive(Clone, Debug, Default, Deserialize, Serialize)] pub struct ProfileFile { extends: Option<Vec<String>>, comment: Option<String>, highlight: Option<Vec<String>>, message: Option<Vec<String>>, tag: Option<Vec<String>>, } impl Into<Profile> for ProfileFile { fn into(self) -> Profile { Profile { comment: self.comment, extends: self.extends.unwrap_or_else(|| vec![]), highlight: self.highlight.unwrap_or_else(|| vec![]), message: self.message.unwrap_or_else(|| vec![]), tag: self.tag.unwrap_or_else(|| vec![]), } } } #[derive(Debug, Default, Deserialize, Serialize)] struct ConfigurationFile { profile: HashMap<String, ProfileFile>, } #[derive(Clone, Debug, Default, PartialEq)] pub struct Profile { comment: Option<String>, extends: Vec<String>, highlight: Vec<String>, message: Vec<String>, tag: Vec<String>, } impl Profile { pub fn comment(&self) -> &Option<String> { &self.comment } pub fn highlight(&self) -> &Vec<String> { &self.highlight } pub fn message(&self) -> &Vec<String> { &self.message } pub fn tag(&self) -> &Vec<String> { &self.tag } } impl AddAssign for Profile { fn add_assign(&mut self, other: Profile) { macro_rules! vec_extend { ($x:expr, $y:expr) => { $x.extend($y); $x.sort(); $x.dedup(); }; } vec_extend!(self.extends, other.extends); vec_extend!(self.highlight, other.highlight); vec_extend!(self.message, other.message); vec_extend!(self.tag, other.tag); } } #[derive(Debug, Default)] pub struct Profiles { file: PathBuf, profile: Profile, profiles: HashMap<String, Profile>, } impl Profiles { pub fn new(args: &ArgMatches) -> Result<Self, Error> { let file = Self::file(Some(args))?; if !file.exists() { Ok(Profiles { file, ..Default::default() }) } else { let mut config = String::new(); File::open(file.clone()) .map_err(|e| format_err!("Failed to open {}: {}", file.display(), e))? .read_to_string(&mut config)?; let mut config_file: ConfigurationFile = from_str(&config) .map_err(|e| format_err!("Failed to parse {}: {}", file.display(), e))?; let profiles: HashMap<String, Profile> = config_file .profile .drain() .map(|(k, v)| (k, v.into())) .collect(); let mut profile = Profile::default();
Ok(Profiles { file, profile, profiles, }) } } fn expand(n: &str, p: &mut Profile, a: &HashMap<String, Profile>) -> Result<(), Error> { let mut loops = EXTEND_LIMIT; while !p.extends.is_empty() { let extends = p.extends.clone(); p.extends.clear(); for e in &extends { let f = a.get(e).ok_or_else(|| { format_err!("Unknown extend profile name {} used in {}", e, n) })?; *p += f.clone(); } loops -= 1; if loops == 0 { return Err(format_err!( "Reached recursion limit while resolving profile {} extends", n )); } } Ok(()) } pub fn profile(&self) -> Profile { self.profile.clone() } pub fn subcommand(self, args: &ArgMatches) -> Result<i32, Error> { if args.is_present("list") { if self.profiles.is_empty() { println!("No profiles present in \"{}\".", self.file.display()); } else { println!("Available profiles in \"{}\":", self.file.display()); for (k, v) in self.profiles { println!( " * {}{}", k, v.comment() .clone() .map(|c| format!(": {}", c)) .unwrap_or_else(|| "".into()) ); } } Ok(0) } else if args.is_present("examples") { let mut example = ConfigurationFile::default(); example.profile.insert( "W hitespace".into(), ProfileFile { comment: Some( "Profile names can contain whitespaces. Quote on command line...".into(), ), ..Default::default() }, ); example.profile.insert( "rogcat".into(), ProfileFile { comment: Some("Only tag \"rogcat\"".into()), tag: Some(vec!["^rogcat$".into()]), ..Default::default() }, ); example.profile.insert( "Comments are optional".into(), ProfileFile { tag: Some(vec!["rogcat".into()]), ..Default::default() }, ); example.profile.insert( "A".into(), ProfileFile { comment: Some("Messages starting with A".into()), message: Some(vec!["^A.*".into()]), ..Default::default() }, ); example.profile.insert( "B".into(), ProfileFile { comment: Some("Messages starting with B".into()), message: Some(vec!["^B.*".into()]), ..Default::default() }, ); example.profile.insert( "ABC".into(), ProfileFile { extends: Some(vec!["A".into(), "B".into()]), comment: Some("Profiles A, B plus the following filter (^C.*)".into()), message: Some(vec!["^C.*".into()]), ..Default::default() }, ); example.profile.insert( "complex".into(), ProfileFile { comment: Some( "Profiles can be complex. This one is probably very useless.".into(), ), tag: Some(vec!["b*".into(), "!adb".into()]), message: Some(vec!["^R.*".into(), "!^A.*".into(), "!^A.*".into()]), highlight: Some(vec!["blah".into()]), ..Default::default() }, ); to_string(&example) .map_err(|e| format_err!("Internal example serialization error: {}", e)) .map(|s| { println!("Example profiles:"); println!(); println!("{}", s); 0 }) } else { Err(err_msg("Missing option for profiles subcommand!")) } } pub fn file(args: Option<&ArgMatches>) -> Result<PathBuf, Error> { if let Some(args) = args { if args.is_present("profiles_path") { let f = PathBuf::from(value_t!(args, "profiles_path", String)?); if f.exists() { return Ok(f); } else { return Err(format_err!( "Cannot find {}. Use --profiles_path to specify the path manually!", f.display() )); } } } if let Ok(f) = var("ROGCAT_PROFILES").map(PathBuf::from) { if f.exists() { return Ok(f); } else { Err(format_err!( "Cannot find {} set in ROGCAT_PROFILES!", f.display() )) } } else { Ok(::config_dir().join("profiles.toml")) } } }
if let Some(n) = args.value_of("profile") { profile = profiles .get(n) .ok_or_else(|| format_err!("Unknown profile {}", n))? .clone(); Self::expand(n, &mut profile, &profiles)?; }
if_condition
[ { "content": "pub fn file_content(file: &PathBuf) -> Result<SVec, Error> {\n\n let content = BufReader::new(File::open(file)?)\n\n .lines()\n\n .map(|e| e.unwrap())\n\n .collect();\n\n Ok(content)\n\n}\n\n\n", "file_path": "src/tests/utils.rs", "rank": 0, "score": 187593.4565125723 }, { "content": "pub fn check_file_content(file: &PathBuf, content: &SVec) -> Result<bool, Error> {\n\n Ok(content == &file_content(file)?)\n\n}\n\n\n", "file_path": "src/tests/utils.rs", "rank": 1, "score": 169937.35839397652 }, { "content": "pub fn devices(core: &mut Core) -> Result<i32, Error> {\n\n let mut child = Command::new(adb()?)\n\n .arg(\"devices\")\n\n .stdout(Stdio::piped())\n\n .spawn_async()?;\n\n let stdout = child\n\n .stdout()\n\n .take()\n\n .ok_or_else(|| err_msg(\"Failed to read stdout of adb\"))?;\n\n let reader = BufReader::new(stdout);\n\n let lines = lines(reader);\n\n let result = lines.skip(1).for_each(|l| {\n\n if !l.is_empty() && !l.starts_with(\"* daemon\") {\n\n let mut s = l.split_whitespace();\n\n let id: &str = s.next().unwrap_or(\"unknown\");\n\n let name: &str = s.next().unwrap_or(\"unknown\");\n\n println!(\"{} {}\", id, name);\n\n }\n\n Ok(())\n\n });\n\n\n\n core.run(result)\n\n .map_err(|e| format_err!(\"{}\", e))\n\n .map(|_| 0)\n\n}\n", "file_path": "src/devices.rs", "rank": 2, "score": 146237.93315308663 }, { "content": "pub fn tempfile_with_content(c: &SVec) -> Result<PathBuf, Error> {\n\n let path = tempfile()?;\n\n File::create(path.clone())?.write_all(c.join(\"\\n\").as_bytes())?;\n\n Ok(path)\n\n}\n\n\n", "file_path": "src/tests/utils.rs", "rank": 3, "score": 143256.15783120616 }, { "content": "fn run_rogcat_with_config_and_input_file(args: &SVec, payload: &SVec) -> Result<SVec, Error> {\n\n let lines = CONFIG.lines().map(|s| s.to_string()).collect();\n\n let config = tempfile_with_content(&lines)?.display().to_string();\n\n let mut a = svec!(\"-P\", config);\n\n a.extend(args.clone());\n\n let output = run_rogcat_with_input_file(&a, payload)\n\n .expect(\"Failed to run rogcat with config and input file\");\n\n assert!(output.0);\n\n Ok(output.1)\n\n}\n\n\n", "file_path": "src/tests/profiles.rs", "rank": 4, "score": 143062.74634906685 }, { "content": "pub fn tempfile() -> Result<PathBuf, Error> {\n\n let mut path = tempdir()?;\n\n let filename: String = thread_rng().sample_iter(&Alphanumeric).take(8).collect();\n\n path.push(filename);\n\n Ok(path)\n\n}\n\n\n", "file_path": "src/tests/utils.rs", "rank": 5, "score": 137355.1691869461 }, { "content": "pub fn tempdir() -> Result<PathBuf, Error> {\n\n TempDir::new(\"rogcat\")\n\n .map(|e| e.into_path())\n\n .map_err(|e| format_err!(\"Tempdir error: {}\", e))\n\n}\n\n\n", "file_path": "src/tests/utils.rs", "rank": 6, "score": 137355.1691869461 }, { "content": "pub fn run_rogcat_with_input_file(args: &SVec, payload: &SVec) -> Result<(bool, SVec), Error> {\n\n let input = tempfile_with_content(payload).expect(\"Failed to crate input file\");\n\n let mut a = svec!(\"-i\", format!(\"{}\", input.display()));\n\n a.extend(args.clone());\n\n run_rogcat(&a, None)\n\n}\n\n\n", "file_path": "src/tests/utils.rs", "rank": 7, "score": 134538.36682486997 }, { "content": "/// Performs a dumpstate and write to fs. Note: The Android 7+ dumpstate is not supported.\n\npub fn create(args: &ArgMatches, core: &mut Core) -> Result<i32, Error> {\n\n let filename = value_t!(args.value_of(\"file\"), String).unwrap_or(report_filename()?);\n\n let filename_path = PathBuf::from(&filename);\n\n if !args.is_present(\"overwrite\") && filename_path.exists() {\n\n return Err(format_err!(\"File {} already exists\", filename));\n\n }\n\n\n\n let mut child = Command::new(adb()?)\n\n .arg(\"bugreport\")\n\n .stdout(Stdio::piped())\n\n .stderr(Stdio::piped())\n\n .spawn_async()?;\n\n let stdout = child\n\n .stdout()\n\n .take()\n\n .ok_or_else(|| err_msg(\"Failed get stdout\"))?;\n\n let stdout_reader = BufReader::new(stdout);\n\n\n\n let dir = filename_path.parent().unwrap_or_else(|| Path::new(\"\"));\n\n if !dir.is_dir() {\n", "file_path": "src/bugreport.rs", "rank": 8, "score": 131472.3552242795 }, { "content": "pub fn run(args: &ArgMatches, core: &mut Core) -> Result<i32, Error> {\n\n let message = args.value_of(\"MESSAGE\").unwrap_or(\"\");\n\n let tag = args.value_of(\"tag\").unwrap_or(\"Rogcat\").to_owned();\n\n let level = Level::from(args.value_of(\"level\").unwrap_or(\"\"));\n\n match message {\n\n \"-\" => {\n\n let sink = Logger {\n\n handle: core.handle(),\n\n tag,\n\n level,\n\n };\n\n\n\n let input = stdin_reader(core)?;\n\n let stream = sink.send_all(input);\n\n core.run(stream)\n\n .map_err(|e| format_err!(\"Failed to run \\\"adb shell log\\\": {}\", e))\n\n .map(|_| 0)\n\n }\n\n _ => {\n\n let child = Command::new(adb()?)\n", "file_path": "src/log.rs", "rank": 9, "score": 131472.3552242795 }, { "content": "#[test]\n\nfn highlight() {\n\n let input = svec!(\"A\", \"B\", \"C\");\n\n let output = run_rogcat_with_config_and_input_file(&svec!(\"-p\", \"Highlight\"), &input).unwrap();\n\n assert_eq!(output.len(), 2);\n\n}\n", "file_path": "src/tests/profiles.rs", "rank": 10, "score": 129593.04810688662 }, { "content": "pub fn tcp_reader(addr: &SocketAddr, core: &mut Core) -> Result<RStream, Error> {\n\n let handle = core.handle();\n\n let s = core\n\n .run(TcpStream::connect(addr, &handle))\n\n .map(|s| Decoder::framed(LossyLineCodec {}, s))\n\n .map_err(|e| format_err!(\"Failed to connect: {}\", e))?\n\n .map(Some)\n\n .map_err(|e| e.into());\n\n Ok(Box::new(s))\n\n}\n\n\n\nnamed!(\n\n num_usize<usize>,\n\n map_res!(map_res!(digit, from_utf8), str::parse::<usize>)\n\n);\n\n\n\nnamed!(\n\n baudrate<::serial::BaudRate>,\n\n map!(num_usize, |b| match b {\n\n 110 => ::serial::Baud110,\n", "file_path": "src/reader.rs", "rank": 11, "score": 126419.45021740384 }, { "content": "#[test]\n\nfn extends_message_a_b_c() {\n\n let input = svec!(\"A\", \"B\", \"C\");\n\n\n\n let output = run_rogcat_with_config_and_input_file(&svec!(\"-p\", \"AB\"), &input).unwrap();\n\n assert_eq!(output.len(), 2);\n\n\n\n let output =\n\n run_rogcat_with_config_and_input_file(&svec!(\"-p\", \"AB\", \"-m\", \"C\"), &input).unwrap();\n\n assert_eq!(output.len(), 3);\n\n\n\n let output = run_rogcat_with_config_and_input_file(&svec!(\"-p\", \"ABC\"), &input).unwrap();\n\n assert_eq!(output.len(), 3);\n\n}\n\n\n", "file_path": "src/tests/profiles.rs", "rank": 12, "score": 126241.07389752517 }, { "content": "pub fn run_rogcat(args: &SVec, input: Option<SVec>) -> Result<(bool, SVec), Error> {\n\n let rogcat = find_rogcat_binary();\n\n let mut process = Command::new(format!(\"{}\", rogcat.display()))\n\n .args(args)\n\n .stdin(Stdio::piped())\n\n .stderr(Stdio::piped())\n\n .stdout(Stdio::piped())\n\n .spawn()\n\n .expect(\"Failed to run rogcat\");\n\n\n\n {\n\n if let Some(input) = input {\n\n let stdin = process.stdin.as_mut().expect(\"failed to get stdin\");\n\n let mut input = input.join(\"\\n\");\n\n if input.len() != 0 {\n\n input.push('\\n');\n\n }\n\n stdin.write_all(input.as_bytes()).unwrap();\n\n }\n\n }\n\n\n\n let output = process.wait_with_output().expect(\"Failed to run rogcat\");\n\n let stdout = String::from_utf8(output.stdout)\n\n .expect(\"Malformed stdout\")\n\n .lines()\n\n .map(|s| s.to_string())\n\n .collect();\n\n Ok((output.status.success(), stdout))\n\n}\n\n\n", "file_path": "src/tests/utils.rs", "rank": 13, "score": 117739.02302199963 }, { "content": "pub fn serial_reader<'a>(args: &ArgMatches<'a>, core: &Core) -> Result<RStream, Error> {\n\n let i = args\n\n .value_of(\"input\")\n\n .ok_or_else(|| err_msg(\"Invalid input value\"))?;\n\n let p = match serial(i.as_bytes()) {\n\n IResult::Done(_, v) => v,\n\n IResult::Error(_) => return Err(err_msg(\"Failed to parse serial port settings\")),\n\n IResult::Incomplete(_) => return Err(err_msg(\"Serial port settings are incomplete\")),\n\n };\n\n let mut port = ::serial::open(&p.0)?;\n\n port.configure(&p.1)?;\n\n port.set_timeout(Duration::from_secs(u64::MAX))?;\n\n\n\n records(port, core)\n\n}\n\n\n", "file_path": "src/reader.rs", "rank": 15, "score": 112353.96383943936 }, { "content": "pub fn file_reader<'a>(args: &ArgMatches<'a>, core: &Core) -> Result<RStream, Error> {\n\n let files = args\n\n .values_of(\"input\")\n\n .map(|f| f.map(PathBuf::from).collect::<Vec<PathBuf>>())\n\n .ok_or_else(|| err_msg(\"Failed to parse input files\"))?;\n\n\n\n let mut streams = Vec::new();\n\n for f in &files {\n\n if !f.exists() {\n\n return Err(format_err!(\"Cannot open {}\", f.display()));\n\n }\n\n\n\n let file =\n\n File::open(f).map_err(|e| format_err!(\"Failed to open {}: {}\", f.display(), e))?;\n\n\n\n streams.push(records(file, core)?);\n\n }\n\n\n\n // The flattened streams emmit None in between - filter those\n\n // here...\n", "file_path": "src/reader.rs", "rank": 16, "score": 112249.72835833492 }, { "content": "pub fn stdin_reader(core: &Core) -> Result<RStream, Error> {\n\n records(Box::new(stdin()), core)\n\n}\n\n\n", "file_path": "src/reader.rs", "rank": 17, "score": 106999.294539192 }, { "content": "pub fn subcommand_completions(args: &ArgMatches) -> Result<i32, Error> {\n\n args.value_of(\"shell\")\n\n .ok_or_else(|| err_msg(\"Required shell argument is missing\"))\n\n .map(|s| s.parse::<Shell>())\n\n .map(|s| {\n\n cli().gen_completions_to(crate_name!(), s.unwrap(), &mut stdout());\n\n 0\n\n })\n\n}\n", "file_path": "src/cli.rs", "rank": 18, "score": 106999.294539192 }, { "content": "pub fn find_rogcat_binary() -> PathBuf {\n\n let exe = env::current_exe().unwrap();\n\n let this_dir = exe.parent().unwrap();\n\n let dirs = &[&this_dir, &this_dir.parent().unwrap()];\n\n dirs.iter()\n\n .map(|d| d.join(\"rogcat\").with_extension(env::consts::EXE_EXTENSION))\n\n .filter_map(|d| fs::metadata(&d).ok().map(|_| d))\n\n .next()\n\n .expect(&format!(\n\n \"Error: rogcat binary not found, looked in `{:?}`. Do you need to run `cargo build`?\",\n\n dirs\n\n ))\n\n}\n\n\n", "file_path": "src/tests/utils.rs", "rank": 19, "score": 103535.32720682559 }, { "content": "fn adb() -> Result<PathBuf, Error> {\n\n which_in(\"adb\", env::var_os(\"PATH\"), env::current_dir()?)\n\n .map_err(|e| format_err!(\"Cannot find adb: {}\", e))\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 20, "score": 102622.70114712376 }, { "content": "pub fn runner<'a>(args: &ArgMatches<'a>) -> Result<RStream, Error> {\n\n let (cmd, restart) = if let Ok(cmd) = value_t!(args, \"COMMAND\", String) {\n\n (cmd, args.is_present(\"restart\"))\n\n } else {\n\n let adb = format!(\"{}\", adb()?.display());\n\n let mut logcat_args = vec![];\n\n\n\n let mut restart = args.is_present(\"restart\");\n\n if !restart {\n\n restart = ::config_get::<bool>(\"restart\").unwrap_or(true);\n\n }\n\n\n\n if args.is_present(\"tail\") {\n\n let count = value_t!(args, \"tail\", u32).unwrap_or_else(|e| e.exit());\n\n logcat_args.push(format!(\"-t {}\", count));\n\n restart = false;\n\n };\n\n\n\n if args.is_present(\"dump\") {\n\n logcat_args.push(\"-d\".to_owned());\n", "file_path": "src/runner.rs", "rank": 21, "score": 102412.11005571342 }, { "content": "fn input(core: &mut Core, args: &ArgMatches) -> Result<RStream, Error> {\n\n if args.is_present(\"input\") {\n\n let input = args\n\n .value_of(\"input\")\n\n .ok_or_else(|| err_msg(\"Invalid input value\"))?;\n\n match Url::parse(input) {\n\n Ok(url) => match url.scheme() {\n\n \"serial\" => serial_reader(args, core),\n\n _ => file_reader(args, core),\n\n },\n\n _ => file_reader(args, core),\n\n }\n\n } else {\n\n match args.value_of(\"COMMAND\") {\n\n Some(c) => {\n\n if c == \"-\" {\n\n stdin_reader(core)\n\n } else if let Ok(url) = Url::parse(c) {\n\n match url.scheme() {\n\n \"tcp\" => {\n", "file_path": "src/main.rs", "rank": 22, "score": 99176.21328437827 }, { "content": "#[test]\n\nfn extends_circle() {\n\n let lines = CONFIG.lines().map(|s| s.to_string()).collect();\n\n let config = tempfile_with_content(&lines).unwrap().display().to_string();\n\n // This is supposed to fail!\n\n let args = svec!(\"--profiles-path\", config, \"-p\", \"CircleA\");\n\n let output = run_rogcat(&args, None).unwrap();\n\n assert!(!output.0);\n\n}\n\n\n", "file_path": "src/tests/profiles.rs", "rank": 23, "score": 98170.70363336273 }, { "content": "#[test]\n\nfn cannot_find_config() {\n\n let file = tempfile().unwrap().display().to_string();\n\n let args = svec!(\"-C\", file);\n\n let output = run_rogcat_with_input_file(&args, &vec![])\n\n .expect(\"Failed to run rogcat with config and input file\");\n\n assert!(!output.0);\n\n}\n\n\n", "file_path": "src/tests/profiles.rs", "rank": 24, "score": 98096.78725183892 }, { "content": "#[test]\n\nfn malformed_config() {\n\n let config = \"[\";\n\n let config = tempfile_with_content(&svec!(config))\n\n .unwrap()\n\n .display()\n\n .to_string();\n\n let args = svec!(\"-C\", config);\n\n let output = run_rogcat_with_input_file(&args, &vec![])\n\n .expect(\"Failed to run rogcat with config and input file\");\n\n assert!(!output.0);\n\n}\n\n\n", "file_path": "src/tests/profiles.rs", "rank": 25, "score": 98096.78725183892 }, { "content": "#[test]\n\nfn filter_message_a() {\n\n let input = svec!(\"A\", \"B\", \"C\");\n\n let output = run_rogcat_with_config_and_input_file(&svec!(\"-p\", \"A\"), &input).unwrap();\n\n assert_eq!(output.len(), 1);\n\n}\n\n\n", "file_path": "src/tests/profiles.rs", "rank": 26, "score": 98048.09380388145 }, { "content": "#[test]\n\nfn filter_message_a_b() {\n\n let input = svec!(\"A\", \"B\", \"C\");\n\n\n\n let output =\n\n run_rogcat_with_config_and_input_file(&svec!(\"-p\", \"A\", \"-m\", \"B\"), &input).unwrap();\n\n assert_eq!(output.len(), 2);\n\n}\n\n\n", "file_path": "src/tests/profiles.rs", "rank": 27, "score": 98048.09380388145 }, { "content": "/// Detect configuration directory\n\nfn config_dir() -> PathBuf {\n\n directories::BaseDirs::new()\n\n .unwrap()\n\n .config_dir()\n\n .join(\"rogcat\")\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 28, "score": 91310.61718067252 }, { "content": "struct ZipFile {\n\n zip: ZipWriter<File>,\n\n}\n\n\n\nimpl ZipFile {\n\n fn new(filename: &str) -> Result<Self, Error> {\n\n let file = File::create(&format!(\"{}.zip\", filename))?;\n\n let options = FileOptions::default()\n\n .compression_method(CompressionMethod::Deflated)\n\n .unix_permissions(0o644);\n\n let filename_path = PathBuf::from(&filename);\n\n let f = filename_path\n\n .file_name()\n\n .and_then(|f| f.to_str())\n\n .ok_or_else(|| err_msg(\"Failed to get filename\"))?;\n\n let mut zip = ZipWriter::new(file);\n\n zip.start_file(f, options)?;\n\n Ok(ZipFile { zip })\n\n }\n\n}\n", "file_path": "src/bugreport.rs", "rank": 29, "score": 85648.9375086555 }, { "content": "fn run() -> Result<i32, Error> {\n\n let args = cli().get_matches();\n\n let config_file = config_dir().join(\"config.toml\");\n\n CONFIG\n\n .write()\n\n .map_err(|e| format_err!(\"Failed to get config lock: {}\", e))?\n\n .merge(config::File::from(config_file))\n\n .ok();\n\n let profiles = Profiles::new(&args)?;\n\n let profile = profiles.profile();\n\n let mut core = Core::new()?;\n\n\n\n match args.subcommand() {\n\n (\"bugreport\", Some(sub_matches)) => exit(bugreport::create(sub_matches, &mut core)?),\n\n (\"completions\", Some(sub_matches)) => exit(cli::subcommand_completions(sub_matches)?),\n\n (\"devices\", _) => exit(devices::devices(&mut core)?),\n\n (\"log\", Some(sub_matches)) => exit(log::run(sub_matches, &mut core)?),\n\n (\"profiles\", Some(sub_matches)) => exit(profiles.subcommand(sub_matches)?),\n\n (_, _) => (),\n\n }\n", "file_path": "src/main.rs", "rank": 30, "score": 83809.81728971723 }, { "content": "pub fn terminal_width() -> Option<usize> {\n\n match dimensions() {\n\n Some((width, _)) => Some(width),\n\n None => env::var(\"COLUMNS\")\n\n .ok()\n\n .and_then(|e| e.parse::<usize>().ok()),\n\n }\n\n}\n\n\n\npub struct LossyLines<A> {\n\n io: A,\n\n buffer: Vec<u8>,\n\n}\n\n\n", "file_path": "src/utils.rs", "rank": 31, "score": 82179.78224713002 }, { "content": "fn report_filename() -> Result<String, Error> {\n\n #[cfg(not(windows))]\n\n let sep = \":\";\n\n #[cfg(windows)]\n\n let sep = \"_\";\n\n\n\n let format = format!(\"%m-%d_%H{}%M{}%S\", sep, sep);\n\n Ok(format!(\"{}-bugreport.txt\", strftime(&format, &now())?))\n\n}\n\n\n", "file_path": "src/bugreport.rs", "rank": 32, "score": 81642.02541470985 }, { "content": "pub fn cli() -> App<'static, 'static> {\n\n App::new(crate_name!())\n\n .setting(AppSettings::ColoredHelp)\n\n .version(crate_version!())\n\n .author(crate_authors!())\n\n .about(ABOUT.as_str())\n\n .arg(Arg::with_name(\"buffer\")\n\n .short(\"b\")\n\n .long(\"buffer\")\n\n .multiple(true)\n\n .takes_value(true)\n\n .conflicts_with_all(&[\"input\", \"COMMAND\"])\n\n .help(\"Select specific (logcat) log buffers. Defaults to main, events, kernel and crash (logcat default)\"))\n\n .arg(Arg::with_name(\"clear\")\n\n .short(\"c\")\n\n .long(\"clear\")\n\n .help(\"Clear (flush) the entire log and exit\"))\n\n .arg(Arg::with_name(\"color\")\n\n .long(\"color\")\n\n .takes_value(true)\n", "file_path": "src/cli.rs", "rank": 33, "score": 79962.25432911428 }, { "content": "#[test]\n\nfn list_profiles() {\n\n let output = run_rogcat(&svec!(\"profiles\", \"--list\"), None).unwrap();\n\n assert!(output.0);\n\n assert!(output.1.len() >= 1); // check for >1 if default location settings are found\n\n\n\n let output = run_rogcat(&svec!(\"profiles\", \"-l\"), None).unwrap();\n\n assert!(output.1.len() >= 1); // check for >1 if default location settings are found\n\n\n\n let output = run_rogcat_with_config_and_input_file(&svec!(\"profiles\", \"-l\"), &vec![]).unwrap();\n\n assert_eq!(output.len(), 7);\n\n\n\n let output =\n\n run_rogcat_with_config_and_input_file(&svec!(\"profiles\", \"--list\"), &vec![]).unwrap();\n\n assert_eq!(output.len(), 7);\n\n}\n\n\n", "file_path": "src/tests/profiles.rs", "rank": 34, "score": 78637.82340276298 }, { "content": "#[test]\n\nfn multiple_files() {\n\n let content = svec!(\"A\", \"B\", \"C\");\n\n let a = tempfile_with_content(&content)\n\n .unwrap()\n\n .display()\n\n .to_string();\n\n let b = tempfile_with_content(&content)\n\n .unwrap()\n\n .display()\n\n .to_string();\n\n let args = svec!(\"-i\", a, \"-i\", b);\n\n let output = run_rogcat(&args, None).unwrap();\n\n assert!(output.0);\n\n assert_eq!(output.1.len(), 6);\n\n}\n", "file_path": "src/tests/file_reader.rs", "rank": 35, "score": 76879.78821442496 }, { "content": "pub fn lossy_lines<A>(a: A) -> LossyLines<A>\n\nwhere\n\n A: AsyncRead + BufRead,\n\n{\n\n LossyLines {\n\n io: a,\n\n buffer: Vec::new(),\n\n }\n\n}\n\n\n\nimpl<A> Stream for LossyLines<A>\n\nwhere\n\n A: AsyncRead + BufRead,\n\n{\n\n type Item = String;\n\n type Error = ::std::io::Error;\n\n\n\n fn poll(&mut self) -> Poll<Option<String>, ::std::io::Error> {\n\n let n = try_nb!(self.io.read_until(b'\\n', &mut self.buffer));\n\n if n == 0 && self.buffer.is_empty() {\n", "file_path": "src/utils.rs", "rank": 36, "score": 76322.92244073858 }, { "content": "type Parse = fn(&str) -> Result<Record, Error>;\n\n\n\npub struct Parser {\n\n last: Option<Parse>,\n\n}\n\n\n\nimpl Parser {\n\n pub fn new() -> Self {\n\n Parser { last: None }\n\n }\n\n\n\n fn parse_default(line: &str) -> Result<Record, Error> {\n\n match printable(line.as_bytes()) {\n\n IResult::Done(_, mut v) => {\n\n v.raw = line.to_owned();\n\n Ok(v)\n\n }\n\n IResult::Error(e) => Err(format_err!(\"{}\", e)),\n\n IResult::Incomplete(_) => Err(err_msg(\"Not enough data\")),\n\n }\n", "file_path": "src/parser.rs", "rank": 37, "score": 75549.36802161724 }, { "content": "#[test]\n\nfn parse_serial_port() {\n\n let s = serial(\"serial://COM0@115200\".as_bytes()).unwrap().1;\n\n assert_eq!(\"COM0\", s.0);\n\n assert_eq!(::serial::Baud115200, s.1.baud_rate);\n\n assert_eq!(::serial::Bits8, s.1.char_size);\n\n assert_eq!(::serial::ParityNone, s.1.parity);\n\n assert_eq!(::serial::Stop1, s.1.stop_bits);\n\n\n\n let s = serial(\"serial:///dev/ttyUSB0@115200,7O2\".as_bytes())\n\n .unwrap()\n\n .1;\n\n assert_eq!(\"/dev/ttyUSB0\", s.0);\n\n assert_eq!(::serial::Baud115200, s.1.baud_rate);\n\n assert_eq!(::serial::Bits7, s.1.char_size);\n\n assert_eq!(::serial::ParityOdd, s.1.parity);\n\n assert_eq!(::serial::Stop2, s.1.stop_bits);\n\n}\n", "file_path": "src/reader.rs", "rank": 38, "score": 70483.99127253729 }, { "content": "#[test]\n\nfn parse_serial_baudrate() {\n\n assert_eq!(baudrate(\"110\".as_bytes()).unwrap().1, ::serial::Baud110);\n\n assert_eq!(baudrate(\"300\".as_bytes()).unwrap().1, ::serial::Baud300);\n\n assert_eq!(baudrate(\"600\".as_bytes()).unwrap().1, ::serial::Baud600);\n\n assert_eq!(baudrate(\"1200\".as_bytes()).unwrap().1, ::serial::Baud1200);\n\n assert_eq!(baudrate(\"2400\".as_bytes()).unwrap().1, ::serial::Baud2400);\n\n assert_eq!(baudrate(\"4800\".as_bytes()).unwrap().1, ::serial::Baud4800);\n\n assert_eq!(baudrate(\"9600\".as_bytes()).unwrap().1, ::serial::Baud9600);\n\n assert_eq!(baudrate(\"19200\".as_bytes()).unwrap().1, ::serial::Baud19200);\n\n assert_eq!(baudrate(\"38400\".as_bytes()).unwrap().1, ::serial::Baud38400);\n\n assert_eq!(baudrate(\"57600\".as_bytes()).unwrap().1, ::serial::Baud57600);\n\n assert_eq!(\n\n baudrate(\"115200\".as_bytes()).unwrap().1,\n\n ::serial::Baud115200\n\n );\n\n assert_eq!(\n\n baudrate(\"921600\".as_bytes()).unwrap().1,\n\n ::serial::BaudOther(921600)\n\n );\n\n}\n\n\n", "file_path": "src/reader.rs", "rank": 39, "score": 70483.99127253729 }, { "content": "#[test]\n\nfn parse_serial_parity() {\n\n assert_eq!(parity(\"N\".as_bytes()).unwrap().1, ::serial::ParityNone);\n\n assert_eq!(parity(\"O\".as_bytes()).unwrap().1, ::serial::ParityOdd);\n\n assert_eq!(parity(\"E\".as_bytes()).unwrap().1, ::serial::ParityEven);\n\n}\n\n\n", "file_path": "src/reader.rs", "rank": 40, "score": 70483.99127253729 }, { "content": "#[test]\n\nfn filter_message() {\n\n let input = svec!(\"A\", \"B\", \"C\", \"D\", \"EF\", \"FE\");\n\n let output = run_rogcat_with_input_file(&svec!(\"-m\", \"A\"), &input).unwrap();\n\n assert!(output.0);\n\n assert_eq!(output.1.len(), 1);\n\n\n\n let output = run_rogcat_with_input_file(&svec!(\"-m\", \"A\", \"-m\", \"B\"), &input).unwrap();\n\n assert!(output.0);\n\n assert_eq!(output.1.len(), 2);\n\n}\n\n\n", "file_path": "src/tests/filter.rs", "rank": 41, "score": 70459.63264848651 }, { "content": "#[test]\n\nfn parse_serial_char_size() {\n\n assert_eq!(char_size(\"5\".as_bytes()).unwrap().1, ::serial::Bits5);\n\n assert_eq!(char_size(\"6\".as_bytes()).unwrap().1, ::serial::Bits6);\n\n assert_eq!(char_size(\"7\".as_bytes()).unwrap().1, ::serial::Bits7);\n\n assert_eq!(char_size(\"8\".as_bytes()).unwrap().1, ::serial::Bits8);\n\n}\n\n\n", "file_path": "src/reader.rs", "rank": 42, "score": 68096.23282038153 }, { "content": "#[test]\n\nfn parse_serial_stop_bits() {\n\n assert_eq!(stop_bits(\"1\".as_bytes()).unwrap().1, ::serial::Stop1);\n\n assert_eq!(stop_bits(\"2\".as_bytes()).unwrap().1, ::serial::Stop2);\n\n}\n\n\n", "file_path": "src/reader.rs", "rank": 43, "score": 68096.23282038153 }, { "content": "#[test]\n\nfn filter_message_regex() {\n\n let input = svec!(\"A\", \"B\", \"CF\", \"D\", \"EF\", \"FE\", \"monkey\");\n\n let output = run_rogcat_with_input_file(&svec!(\"-m\", \"^.*nk.*\"), &input).unwrap();\n\n assert!(output.0);\n\n assert_eq!(output.1.len(), 1);\n\n\n\n let output = run_rogcat_with_input_file(&svec!(\"-m\", \"^E.*\"), &input).unwrap();\n\n assert!(output.0);\n\n assert_eq!(output.1.len(), 1);\n\n\n\n // match CF, EF, FE\n\n let output = run_rogcat_with_input_file(&svec!(\"-m\", \"^E.*\", \"-m\", \"^.*F\"), &input).unwrap();\n\n assert!(output.0);\n\n assert_eq!(output.1.len(), 3);\n\n}\n", "file_path": "src/tests/filter.rs", "rank": 44, "score": 68072.94275407137 }, { "content": "#[test]\n\nfn invalid_string() {\n\n let path = tempfile().unwrap();\n\n File::create(path.clone())\n\n .unwrap()\n\n .write_all(b\"some invalid bytes come here: \\xF0\\x90\\x80\\nhaha\")\n\n .unwrap();\n\n let args = svec!(\"-i\", format!(\"{}\", path.display()));\n\n let output = run_rogcat(&args, None).unwrap();\n\n assert!(output.0);\n\n assert_eq!(output.1.len(), 2);\n\n}\n\n\n", "file_path": "src/tests/file_reader.rs", "rank": 45, "score": 67955.41674098145 }, { "content": "#[test]\n\nfn compare_file_content() {\n\n let content = svec!(\"A\", \"B\", \"C\");\n\n let tempfile = tempfile_with_content(&content).expect(\"Failed to create tempfile with content\");\n\n assert!(check_file_content(&tempfile, &content).unwrap());\n\n}\n\n\n", "file_path": "src/tests/utils.rs", "rank": 46, "score": 67955.41674098145 }, { "content": "#[test]\n\nfn filter_message_opt_long() {\n\n let opt = \"--message\";\n\n let input = svec!(\"A\", \"B\", \"C\", \"D\");\n\n let output = run_rogcat_with_input_file(&svec!(opt, \"A\"), &input).unwrap();\n\n assert!(output.0);\n\n assert_eq!(output.1.len(), 1);\n\n\n\n let output = run_rogcat_with_input_file(&svec!(opt, \"A\", opt, \"B\"), &input).unwrap();\n\n assert!(output.0);\n\n assert_eq!(output.1.len(), 2);\n\n}\n\n\n", "file_path": "src/tests/filter.rs", "rank": 47, "score": 65886.85047067094 }, { "content": "#[test]\n\nfn testrun_rogcat_with_input_file() {\n\n let input = svec!(\"A\", \"B\", \"C\");\n\n let output = run_rogcat_with_input_file(&vec![], &input).unwrap();\n\n assert!(output.0);\n\n assert_eq!(output.1.len(), 3);\n\n}\n", "file_path": "src/tests/utils.rs", "rank": 48, "score": 65774.26339780804 }, { "content": "#[test]\n\nfn filter_message_opt_short_long() {\n\n let long = \"--message\";\n\n let short = \"-m\";\n\n let input = svec!(\"A\", \"B\", \"C\", \"D\");\n\n let output = run_rogcat_with_input_file(&svec!(short, \"A\", long, \"B\"), &input).unwrap();\n\n assert!(output.0);\n\n assert_eq!(output.1.len(), 2);\n\n\n\n let output = run_rogcat_with_input_file(&svec!(long, \"A\", short, \"B\"), &input).unwrap();\n\n assert!(output.0);\n\n assert_eq!(output.1.len(), 2);\n\n}\n\n\n", "file_path": "src/tests/filter.rs", "rank": 49, "score": 63877.08584389453 }, { "content": "fn run(cmd: &str, skip_until: &Option<String>) -> Result<(Child, OutStream), Error> {\n\n let cmd = cmd\n\n .split_whitespace()\n\n .map(|s| s.to_owned())\n\n .collect::<Vec<String>>();\n\n\n\n let mut child = Command::new(&cmd[0])\n\n .args(&cmd[1..])\n\n .stdout(Stdio::piped())\n\n .stderr(Stdio::piped())\n\n .spawn_async()?;\n\n\n\n let stdout = child\n\n .stdout()\n\n .take()\n\n .ok_or_else(|| err_msg(\"Failed get stdout\"))?;\n\n let stderr = child\n\n .stderr()\n\n .take()\n\n .ok_or_else(|| err_msg(\"Failed get stderr\"))?;\n", "file_path": "src/runner.rs", "rank": 50, "score": 63257.0124345229 }, { "content": "#[derive(Default)]\n\nstruct Html {\n\n filename: PathBuf,\n\n records: Vec<HtmlRecord>,\n\n}\n\n\n\nimpl Html {\n\n // TODO: ensure readability\n\n fn hash_color(value: &str) -> String {\n\n let mut digest = crc32::Digest::new(crc32::IEEE);\n\n digest.write(value.as_bytes());\n\n let h = digest.sum32();\n\n let r = h & 0xFF;\n\n let g = (h & 0xFF00) >> 8;\n\n let b = (h & 0xFF_0000) >> 16;\n\n format!(\"#{:02x}{:02x}{:02x}\", r, g, b)\n\n }\n\n fn color_helper(\n\n h: &Helper,\n\n _: &Handlebars,\n\n _: &Context,\n", "file_path": "src/filewriter.rs", "rank": 51, "score": 57847.53269962702 }, { "content": "/// Textfile with format\n\nstruct Textfile {\n\n file: File,\n\n format: Format,\n\n}\n\n\n\nimpl Writer for Textfile {\n\n fn new(filename: &PathBuf, format: &Format) -> Result<Box<Self>, Error> {\n\n let file = File::create(filename).map_err(|e| {\n\n format_err!(\"Failed to create output file {}: {}\", filename.display(), e)\n\n })?;\n\n let textfile = Textfile {\n\n file,\n\n format: format.clone(),\n\n };\n\n Ok(Box::new(textfile))\n\n }\n\n\n\n fn write(&mut self, record: &Record, _index: usize) -> Result<(), Error> {\n\n self.file\n\n .write(record.format(&self.format)?.as_bytes())\n\n .map_err(|e| format_err!(\"Failed to write: {}\", e))?;\n\n self.file\n\n .write(b\"\\n\")\n\n .map_err(|e| format_err!(\"Failed to write: {}\", e))?;\n\n Ok(())\n\n }\n\n}\n\n\n", "file_path": "src/filewriter.rs", "rank": 52, "score": 57843.76950756078 }, { "content": "struct Logger {\n\n handle: Handle,\n\n tag: String,\n\n level: Level,\n\n}\n\n\n\nimpl Logger {\n\n fn level(level: &Level) -> &str {\n\n match *level {\n\n Level::Trace | Level::Verbose => \"v\",\n\n Level::Debug | Level::None => \"d\",\n\n Level::Info => \"i\",\n\n Level::Warn => \"w\",\n\n Level::Error | Level::Fatal | Level::Assert => \"e\",\n\n }\n\n }\n\n}\n\n\n\nimpl Sink for Logger {\n\n type SinkItem = Option<Record>;\n", "file_path": "src/log.rs", "rank": 53, "score": 57843.76950756078 }, { "content": "#[derive(Serialize)]\n\nstruct HtmlRecord {\n\n index: usize,\n\n record: Record,\n\n}\n\n\n\n/// Simple static html file\n", "file_path": "src/filewriter.rs", "rank": 54, "score": 56361.38564870984 }, { "content": "/// Read a value from the configuration file\n\n/// `config_dir/config.toml`\n\nfn config_get<'a, T>(key: &'a str) -> Option<T>\n\nwhere\n\n T: serde::Deserialize<'a>,\n\n{\n\n CONFIG.read().ok().and_then(|c| c.get::<T>(key).ok())\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 55, "score": 55730.5698214282 }, { "content": "type StdResult<T, E> = ::std::result::Result<T, E>;\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub enum Format {\n\n Csv,\n\n Html,\n\n Human,\n\n Json,\n\n Raw,\n\n}\n\n\n\nimpl FromStr for Format {\n\n type Err = &'static str;\n\n fn from_str(s: &str) -> StdResult<Self, Self::Err> {\n\n match s {\n\n \"csv\" => Ok(Format::Csv),\n\n \"html\" => Ok(Format::Html),\n\n \"human\" => Ok(Format::Human),\n\n \"json\" => Ok(Format::Json),\n\n \"raw\" => Ok(Format::Raw),\n", "file_path": "src/record.rs", "rank": 56, "score": 55488.63609466703 }, { "content": "fn records<T: Read + Send + Sized + 'static>(reader: T, core: &Core) -> Result<RStream, Error> {\n\n let (tx, rx) = mpsc::channel(1);\n\n let mut reader = BufReader::new(reader);\n\n let remote = core.remote();\n\n\n\n thread::spawn(move || {\n\n let mut buffer = Vec::new();\n\n loop {\n\n let mut tx = tx.clone();\n\n buffer.clear();\n\n match reader.read_until(b'\\n', &mut buffer) {\n\n Ok(len) => {\n\n if len > 0 {\n\n while buffer.ends_with(&[b'\\r']) || buffer.ends_with(&[b'\\n']) {\n\n buffer.pop();\n\n }\n\n let record = Record {\n\n raw: String::from_utf8_lossy(&buffer).into(),\n\n ..Default::default()\n\n };\n", "file_path": "src/reader.rs", "rank": 57, "score": 55315.96188611053 }, { "content": "struct LossyLineCodec;\n\n\n\nimpl Decoder for LossyLineCodec {\n\n type Item = Record;\n\n type Error = ::std::io::Error;\n\n\n\n fn decode(\n\n &mut self,\n\n buf: &mut BytesMut,\n\n ) -> ::std::result::Result<Option<Record>, ::std::io::Error> {\n\n if let Some(n) = buf.as_ref().iter().position(|b| *b == b'\\n') {\n\n let line = buf.split_to(n);\n\n buf.split_to(1);\n\n return Ok(Some(Record {\n\n raw: String::from_utf8_lossy(&line).into_owned(),\n\n ..Default::default()\n\n }));\n\n }\n\n\n\n Ok(None)\n", "file_path": "src/reader.rs", "rank": 58, "score": 55007.77639531379 }, { "content": "type OutStream = Box<Stream<Item = String, Error = ::std::io::Error>>;\n\n\n\npub struct Runner {\n\n child: Child,\n\n cmd: String,\n\n skip_until: Option<String>,\n\n output: OutStream,\n\n restart: bool,\n\n skip: bool,\n\n}\n\n\n", "file_path": "src/runner.rs", "rank": 59, "score": 51966.49731839124 }, { "content": "fn main() {\n\n match run() {\n\n Err(e) => {\n\n let stderr = &mut stderr();\n\n let errmsg = \"Error writing to stderr\";\n\n writeln!(stderr, \"{}\", e).unwrap_or_else(|_| panic!(errmsg));\n\n exit(1)\n\n }\n\n Ok(r) => exit(r),\n\n }\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 60, "score": 45050.04202344398 }, { "content": "#[test]\n\nfn parse_printable() {\n\n let t = \"03-01 02:19:45.207 1 2 I EXT4-fs (mmcblk3p8): mounted filesystem with \\\n\n ordered data mode. Opts: (null)\";\n\n let r = Parser::parse_default(t).unwrap();\n\n assert_eq!(r.level, Level::Info);\n\n assert_eq!(r.tag, \"EXT4-fs (mmcblk3p8)\");\n\n assert_eq!(r.process, \"1\");\n\n assert_eq!(r.thread, \"2\");\n\n assert_eq!(\n\n r.message,\n\n \"mounted filesystem with ordered data mode. Opts: (null)\"\n\n );\n\n\n\n let t = \"03-01 02:19:42.868 0 0 D /soc/aips-bus@02100000/usdhc@0219c000: \\\n\n voltage-ranges unspecified\";\n\n let r = Parser::parse_default(t).unwrap();\n\n assert_eq!(r.level, Level::Debug);\n\n assert_eq!(r.tag, \"/soc/aips-bus@02100000/usdhc@0219c000\");\n\n assert_eq!(r.process, \"0\");\n\n assert_eq!(r.thread, \"0\");\n", "file_path": "src/parser.rs", "rank": 61, "score": 43655.67822411313 }, { "content": "#[test]\n\nfn filter_args() {\n\n assert!(Filter::init_filter(&vec![]).is_ok());\n\n assert!(Filter::init_filter(&vec![\"\".to_owned()]).is_ok());\n\n assert!(Filter::init_filter(&vec![\"a\".to_owned()]).is_ok());\n\n assert!(Filter::init_filter(&vec![\".*\".to_owned()]).is_ok());\n\n assert!(Filter::init_filter(&vec![\".*\".to_owned(), \".*\".to_owned()]).is_ok());\n\n assert!(Filter::init_filter(&vec![\"(\".to_owned()]).is_err());\n\n}\n", "file_path": "src/filter.rs", "rank": 62, "score": 43655.67822411313 }, { "content": "#[test]\n\nfn parse_i32() {\n\n assert_eq!(num_i32(\"123\".as_bytes()).unwrap().1, 123);\n\n assert_eq!(num_i32(\"0\".as_bytes()).unwrap().1, 0);\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 63, "score": 43655.67822411313 }, { "content": "#[test]\n\nfn parse_property() {\n\n let t = \"[ro.build.tags]: [release-keys]\";\n\n assert_eq!(\n\n property(t.as_bytes()).unwrap().1,\n\n (\"ro.build.tags\".to_owned(), \"release-keys\".to_owned())\n\n );\n\n}\n", "file_path": "src/parser.rs", "rank": 64, "score": 43655.67822411313 }, { "content": "#[test]\n\nfn parse_unparseable() {\n\n assert!(Parser::parse_default(\"\").is_err());\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 65, "score": 43655.67822411313 }, { "content": "#[test]\n\nfn parse_mindroid() {\n\n let t = \"D/ServiceManager(000000000000000C): foo bar\";\n\n let r = Parser::parse_mindroid(t).unwrap();\n\n assert_eq!(r.level, Level::Debug);\n\n assert_eq!(r.tag, \"ServiceManager\");\n\n assert_eq!(r.process, \"000000000000000C\");\n\n assert_eq!(r.thread, \"\");\n\n assert_eq!(r.message, \"foo bar\");\n\n\n\n let t = \"D/ServiceManager(0x123): Service MediaPlayer has been created in process main\";\n\n let r = Parser::parse_mindroid(t).unwrap();\n\n assert_eq!(r.level, Level::Debug);\n\n assert_eq!(r.tag, \"ServiceManager\");\n\n assert_eq!(r.process, \"123\");\n\n assert_eq!(r.thread, \"\");\n\n assert_eq!(\n\n r.message,\n\n \"Service MediaPlayer has been created in process main\"\n\n );\n\n\n", "file_path": "src/parser.rs", "rank": 66, "score": 43655.67822411313 }, { "content": "#[test]\n\nfn help() {\n\n let args = &[\n\n svec!(\"--help\"),\n\n svec!(\"bugreport\", \"--help\"),\n\n svec!(\"completions\", \"--help\"),\n\n svec!(\"configuration\", \"--help\"),\n\n svec!(\"devices\", \"--help\"),\n\n svec!(\"log\", \"--help\"),\n\n svec!(\"profiles\", \"--help\"),\n\n ];\n\n\n\n for a in args {\n\n let result = run_rogcat(a, None).expect(&a.join(\" \"));\n\n assert!(result.0);\n\n assert!(!result.1.is_empty());\n\n }\n\n}\n", "file_path": "src/tests/system.rs", "rank": 67, "score": 43655.67822411313 }, { "content": "#[test]\n\nfn parse_csv() {\n\n let t = \"07-01 14:13:14.446000000,Sensor:batt_therm:29000 mC,Info,ThermalEngine,225,295,07-01 14:13:14.446 225 295 I ThermalEngine: Sensor:batt_therm:29000 mC\";\n\n let r = Parser::parse_csv(t).unwrap();\n\n assert_eq!(r.level, Level::Info);\n\n assert_eq!(r.tag, \"ThermalEngine\");\n\n assert_eq!(r.process, \"225\");\n\n assert_eq!(r.thread, \"295\");\n\n assert_eq!(r.message, \"Sensor:batt_therm:29000 mC\");\n\n assert_eq!(\n\n r.raw,\n\n \"07-01 14:13:14.446 225 295 I ThermalEngine: Sensor:batt_therm:29000 mC\"\n\n );\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 68, "score": 43655.67822411313 }, { "content": "#[test]\n\nfn parse_level() {\n\n assert_eq!(level(\"V\".as_bytes()).unwrap().1, Level::Verbose);\n\n assert_eq!(level(\"D\".as_bytes()).unwrap().1, Level::Debug);\n\n assert_eq!(level(\"I\".as_bytes()).unwrap().1, Level::Info);\n\n assert_eq!(level(\"W\".as_bytes()).unwrap().1, Level::Warn);\n\n assert_eq!(level(\"E\".as_bytes()).unwrap().1, Level::Error);\n\n assert_eq!(level(\"F\".as_bytes()).unwrap().1, Level::Fatal);\n\n assert_eq!(level(\"A\".as_bytes()).unwrap().1, Level::Assert);\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 69, "score": 43655.67822411313 }, { "content": "#[test]\n\nfn tempdirs() {\n\n let dirs: Vec<PathBuf> = [..100].iter().map(|_| tempdir().unwrap()).collect();\n\n for d in dirs {\n\n assert!(d.exists());\n\n }\n\n}\n\n\n", "file_path": "src/tests/utils.rs", "rank": 70, "score": 43655.67822411313 }, { "content": "#[test]\n\nfn head() {\n\n let input = svec!(\"A\", \"B\", \"C\", \"D\");\n\n let output = run_rogcat_with_input_file(&svec!(\"--head\", \"2\"), &input).unwrap();\n\n assert!(output.0);\n\n assert_eq!(output.1.len(), 2);\n\n}\n", "file_path": "src/tests/misc.rs", "rank": 71, "score": 43655.67822411313 }, { "content": "#[test]\n\nfn stdin_stdout() {\n\n let input = Some(vec![]);\n\n let output = run_rogcat(&svec!(\"-\"), input).unwrap();\n\n assert!(output.0);\n\n assert_eq!(output.1.len(), 0);\n\n\n\n let input = Some(svec!(\"A\", \"B\", \"C\"));\n\n let output = run_rogcat(&svec!(\"-\"), input).unwrap();\n\n assert!(output.0);\n\n assert_eq!(output.1.len(), 3);\n\n\n\n let input = Some(svec!(\"A\", \"B\", \"C\", \"D\"));\n\n let output = run_rogcat(&svec!(\"-\"), input).unwrap();\n\n assert!(output.0);\n\n assert_eq!(output.1.len(), 4);\n\n}\n\n\n", "file_path": "src/tests/utils.rs", "rank": 72, "score": 42389.26238432409 }, { "content": "#[test]\n\nfn parse_csv_unparseable() {\n\n assert!(Parser::parse_csv(\"\").is_err());\n\n assert!(Parser::parse_csv(\",,,\").is_err());\n\n}\n\n\n", "file_path": "src/parser.rs", "rank": 73, "score": 42389.26238432409 }, { "content": "#[test]\n\nfn create_tempfile_with_content() {\n\n let content = svec!(\"A\", \"B\", \"C\");\n\n let tempfile = tempfile_with_content(&content).expect(\"Failed to create tempfile with content\");\n\n let file = File::open(tempfile).expect(\"Failed to open tempfile\");\n\n let reader: BufReader<File> = BufReader::new(file);\n\n assert_eq!(reader.lines().count(), content.len());\n\n}\n\n\n", "file_path": "src/tests/utils.rs", "rank": 74, "score": 41233.956141333794 }, { "content": "#[cfg(not(target_os = \"windows\"))]\n\nfn hashed_color(i: &str) -> Color {\n\n // Some colors are hard to read on (at least) dark terminals\n\n // and others seem just ugly to me...\n\n match i.bytes().fold(42u32, |c, x| (c ^ Color::from(x))) {\n\n c @ 0...1 => c + 2,\n\n c @ 16...21 => c + 6,\n\n c @ 52...55 | c @ 126...129 => c + 4,\n\n c @ 163...165 | c @ 200...201 => c + 3,\n\n c @ 207 => c + 1,\n\n c @ 232...240 => c + 9,\n\n c => c,\n\n }\n\n}\n\n\n\npub struct Terminal {\n\n beginning_of: Regex,\n\n color: bool,\n\n date_format: (String, usize),\n\n diff_width: usize,\n\n format: Format,\n", "file_path": "src/terminal.rs", "rank": 75, "score": 37724.008137991266 }, { "content": "// Copyright © 2017 Felix Obenhuber\n\n// This program is free software. It comes without any warranty, to the extent\n\n// permitted by applicable law. You can redistribute it and/or modify it under\n\n// the terms of the Do What The Fuck You Want To Public License, Version 2, as\n\n// published by Sam Hocevar. See the COPYING file for more details.\n\n\n\nuse failure::Error;\n\nuse tests::utils::*;\n\n\n\nconst CONFIG: &str = \"\n\n[profile.A]\n\nmessage = [\\\"A\\\"]\n\n\n\n[profile.AB]\n\nextends = [\\\"A\\\"]\n\nmessage = [\\\"B\\\"]\n\n\n\n[profile.ABC]\n\nextends = [\\\"AB\\\"]\n\nmessage = [\\\"C\\\"]\n", "file_path": "src/tests/profiles.rs", "rank": 90, "score": 30392.27130731155 }, { "content": "\n\n[profile.Highlight]\n\nextends = [\\\"AB\\\"]\n\nhighlight = [\\\"A\\\"]\n\n\n\n# CicleA extends CircleB and CircleB extends CircleA -> invalid\n\n[profile.CircleA]\n\nextends = [\\\"CircleB\\\"]\n\n\n\n[profile.CircleB]\n\nextends = [\\\"CircleA\\\"]\";\n\n\n", "file_path": "src/tests/profiles.rs", "rank": 91, "score": 30387.684527583955 }, { "content": "// Copyright © 2017 Felix Obenhuber\n\n// This program is free software. It comes without any warranty, to the extent\n\n// permitted by applicable law. You can redistribute it and/or modify it under\n\n// the terms of the Do What The Fuck You Want To Public License, Version 2, as\n\n// published by Sam Hocevar. See the COPYING file for more details.\n\n\n\nuse std::fs::File;\n\nuse std::io::prelude::*;\n\nuse tests::utils::*;\n\n\n\n#[test]\n", "file_path": "src/tests/file_reader.rs", "rank": 92, "score": 29297.07268430999 }, { "content": "impl<'a> Filter {\n\n pub fn new(args: &ArgMatches<'a>, profile: &Profile) -> Result<Self, Error> {\n\n let mut tag_filter = args\n\n .values_of(\"tag\")\n\n .map(|m| m.map(|f| f.to_owned()).collect::<Vec<String>>())\n\n .unwrap_or_else(|| vec![]);\n\n tag_filter.extend(profile.tag().clone());\n\n let mut message_filter = args\n\n .values_of(\"message\")\n\n .map(|m| m.map(|f| f.to_owned()).collect::<Vec<String>>())\n\n .unwrap_or_else(|| vec![]);\n\n message_filter.extend(profile.message().clone());\n\n\n\n let (tag, tag_negative) = Self::init_filter(&tag_filter)?;\n\n let (message, message_negative) = Self::init_filter(&message_filter)?;\n\n\n\n Ok(Filter {\n\n level: Level::from(args.value_of(\"level\").unwrap_or(\"\")),\n\n message,\n\n message_negative,\n", "file_path": "src/filter.rs", "rank": 93, "score": 29.073514057156107 }, { "content": " highlight: Vec<Regex>,\n\n no_dimm: bool,\n\n process_width: usize,\n\n shorten_tag: bool,\n\n tag_timestamps: HashMap<String, Tm>,\n\n tag_width: Option<usize>,\n\n thread_width: usize,\n\n time_diff: bool,\n\n vovels: Regex,\n\n}\n\n\n\nimpl<'a> Terminal {\n\n pub fn new(args: &ArgMatches<'a>, profile: &Profile) -> Result<Self, Error> {\n\n let mut hl = profile.highlight().clone();\n\n if args.is_present(\"highlight\") {\n\n hl.extend(values_t!(args.values_of(\"highlight\"), String)?);\n\n }\n\n let highlight = hl.iter().flat_map(|h| Regex::new(h)).collect();\n\n\n\n let format = args\n", "file_path": "src/terminal.rs", "rank": 94, "score": 23.968020653835225 }, { "content": " Output format. Defaults to human on stdout and raw on file output [values: csv, html, human, json, raw]\n\n\n\n -H, --head <head> Read n records and exit\n\n -h, --highlight <highlight>...\n\n Highlight messages that match this pattern in RE2. The prefix '!' inverts the match\n\n\n\n -i, --input <input>...\n\n Read from file instead of command. Use 'serial://COM0@115200,8N1 or similiar for reading a serial port\n\n\n\n -l, --level <level>\n\n Minimum level [values: trace, debug, info, warn, error, fatal, assert, T, D, I, W, E, F, A]\n\n\n\n -m, --message <message>... Message filters in RE2. The prefix '!' inverts the match\n\n -o, --output <output> Write output to file\n\n -p, --profile <profile> Select profile\n\n -P, --profiles-path <profiles_path> Manually specify profile file (overrules ROGCAT_PROFILES)\n\n -n, --records-per-file <records_per_file> Write n records per file. Use k, M, G suffixes or a plain number\n\n -t, --tag <tag>... Tag filters in RE2. The prefix '!' inverts the match\n\n -T, --tail <tail> Dump only the most recent <COUNT> lines (implies --dump)\n\n\n\nARGS:\n\n <COMMAND> Optional command to run and capture stdout from. Pass \"-\" to d capture stdin'. If omitted, rogcat\n\n will run \"adb logcat -b all\" and restarts this commmand if 'adb' terminates\n\n\n\nSUBCOMMANDS:\n\n bugreport Capture bugreport. This is only works for Android versions < 7.\n\n completions Generates completion scripts\n\n devices Show list of available devices\n\n help Prints this message or the help of the given subcommand(s)\n\n log Add log message(s) log buffer\n\n profiles Show and manage profiles\n\n```\n\n\n", "file_path": "README.md", "rank": 95, "score": 19.77913075010914 }, { "content": " fn expecting(&self, formatter: &mut Formatter) -> ::std::fmt::Result {\n\n formatter.write_str(\"string %m-%d %H:%M:%S.%f\")\n\n }\n\n }\n\n\n\n deserializer.deserialize_str(TimeVisitor)\n\n }\n\n}\n\n\n\n#[derive(Clone, Debug, Default, Deserialize, Serialize, PartialEq)]\n\npub struct Record {\n\n pub timestamp: Option<Timestamp>,\n\n pub message: String,\n\n pub level: Level,\n\n pub tag: String,\n\n pub process: String,\n\n pub thread: String,\n\n pub raw: String,\n\n}\n\n\n", "file_path": "src/record.rs", "rank": 96, "score": 19.687316237918278 }, { "content": "### Terminal settings\n\n\n\nSome parameters of the `human` format are adjustable via the config file:\n\n\n\n```\n\nterminal_tag_width = 20\n\nterminal_shorten_tag = true\n\nterminal_show_time_diff = true\n\nterminal_show_date = false\n\nterminal_time_diff_width = 10\n\nterminal_hide_timestamp = true\n\nterminal_color = never\n\nterminal_no_dimm = true\n\n```\n\n\n\n## Profiles\n\n\n\nOptionally `rogcat` reads a (`toml` formated) configuration file if present. This configuration may include tracing profiles\n\n('-p') and settings. The possible options in the configuration file are a subset of the command line options. The configuration\n\nfile is read from the location set in the environment variable `ROGCAT_PROFILES` or a fixed pathes depending on your OS:\n\n\n\n* MacOS: `$HOME/Library/Preferences/rogcat/profiles.toml`\n\n* Linux: `$HOME/.config/rogcat/profiles.toml`\n\n* Windows: `%HOME%/AppData/Roaming/rogcat/profiles.toml`\n\n\n\nThe environment variable overrules the default path. See `rogcat profiles --help` or `rogcat profiles --examples`.\n\n\n\nExample:\n\n\n\n```\n\n[profile.B]\n\ncomment = \"Messages starting with B\"\n\nmessage = [\"^B.*\"]\n\n\n\n[profile.ABC]\n\nextends = [\"A\", \"B\"]\n\ncomment = \"Profiles A, B plus the following filter (^C.*)\"\n\nmessage = [\"^C.*\"]\n\n\n\n[profile.\"Comments are optional\"]\n\ntag = [\"rogcat\"]\n\n\n\n[profile.complex]\n\ncomment = \"Profiles can be complex. This one is probably very useless.\"\n\nhighlight = [\"blah\"]\n\nmessage = [\"^R.*\", \"!^A.*\", \"!^A.*\"]\n\ntag = [\"b*\", \"!adb\"]\n\n\n\n[profile.\"W hitespace\"]\n\ncomment = \"Profile names can contain whitespaces. Quote on command line...\"\n\n\n\n[profile.A]\n\ncomment = \"Messages starting with A\"\n\nmessage = [\"^A.*\"]\n\n\n\n[profile.rogcat]\n\ncomment = \"Only tag \\\"rogcat\\\"\"\n\ntag = [\"^rogcat$\"]\n\n```\n\n\n\nTo check your setup, run `rogcat profiles --list` and select a profile for a run by passing the `-p/--profile` option.\n\n\n", "file_path": "README.md", "rank": 97, "score": 19.15503652043468 }, { "content": "\n\nconst LEVEL_VALUES: &[&str] = &[\n\n \"trace\", \"debug\", \"info\", \"warn\", \"error\", \"fatal\", \"assert\", \"T\", \"D\", \"I\", \"W\", \"E\", \"F\",\n\n \"A\",\n\n];\n\n\n\n#[derive(Clone, Debug, Deserialize, PartialOrd, PartialEq, Serialize)]\n\npub enum Level {\n\n None,\n\n Trace,\n\n Verbose,\n\n Debug,\n\n Info,\n\n Warn,\n\n Error,\n\n Fatal,\n\n Assert,\n\n}\n\n\n\nimpl Display for Level {\n", "file_path": "src/record.rs", "rank": 98, "score": 18.281983205235083 }, { "content": "impl Timestamp {\n\n pub fn new(t: Tm) -> Timestamp {\n\n Timestamp { tm: t }\n\n }\n\n}\n\n\n\nimpl Serialize for Timestamp {\n\n fn serialize<S>(&self, serializer: S) -> StdResult<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n strftime(\"%m-%d %H:%M:%S.%f\", &self.tm)\n\n .map_err(|e| ::serde::ser::Error::custom(e.to_string()))?\n\n .serialize(serializer)\n\n }\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for Timestamp {\n\n fn deserialize<D>(deserializer: D) -> StdResult<Self, D::Error>\n\n where\n", "file_path": "src/record.rs", "rank": 99, "score": 18.18336108752094 } ]
Rust
src/yuv444i/mod.rs
dunkelstern/grapho-bitplane
4db3789284fa89c85e6b1b1c972aa4d42eda9bbf
use crate::*; pub use grapho_color::DigitalYCbCrColor; pub use crate::yuv422i::YUVComponent; #[derive(Debug, PartialEq)] pub struct YUV444iPixelBuffer<'a> { width: usize, height: usize, stride: usize, fourcc: &'a str, component_order: Vec<YUVComponent>, data: Vec<u8> } impl<'a> YUV444iPixelBuffer<'a> { fn decode_component_order(fourcc:&'a str) -> Vec<YUVComponent> { match fourcc { "VUY" => vec![YUVComponent::V, YUVComponent::U, YUVComponent::Y], "YVU" => vec![YUVComponent::Y, YUVComponent::V, YUVComponent::U], "UVY" => vec![YUVComponent::U, YUVComponent::V, YUVComponent::Y], "YUV" | "YUV444" | "YUV 4:4:4" | _ => vec![YUVComponent::Y, YUVComponent::U, YUVComponent::V], } } } impl<'a> PixelBuffer<'a> for YUV444iPixelBuffer<'a> { type ColorType = DigitalYCbCrColor; fn new(width: usize, height: usize, stride: Option<usize>, fourcc: Option<&'a str>) -> Self { let f = fourcc.unwrap_or("YUV444"); let component_order = YUV444iPixelBuffer::decode_component_order(f); let line_width = stride.unwrap_or(width * 3); YUV444iPixelBuffer { width, height, data: vec![0; line_width * height], stride: line_width, fourcc: f, component_order } } fn new_with_data(width: usize, height: usize, data: Vec<u8>, stride: Option<usize>, fourcc: Option<&'a str>) -> Result<Self, PixelBufferError> { let f = fourcc.unwrap_or("YUV444"); let component_order = YUV444iPixelBuffer::decode_component_order(f); if data.len() < stride.unwrap_or(width * 3) * height { return Err(PixelBufferError::BufferTooSmall); } Ok( YUV444iPixelBuffer { width, height, data, stride: stride.unwrap_or(width * 3), fourcc: f, component_order } ) } fn new_with_background(width: usize, height: usize, color: Self::ColorType, stride: Option<usize>, fourcc: Option<&'a str>) -> Self { let f = fourcc.unwrap_or("YUV444"); let component_order = YUV444iPixelBuffer::decode_component_order(f); let rep: [u8; 3] = color.into(); let line_width = stride.unwrap_or(width * component_order.len()); let data:Vec<u8>; let representation = vec![ rep[component_order[0] as usize], rep[component_order[1] as usize], rep[component_order[2] as usize], ]; if line_width > width * 3 { let mut line = representation.repeat(width); line.extend([0].repeat(line_width - width * 3)); data = line.repeat(height); } else { data = representation.repeat(width * height); } YUV444iPixelBuffer { width, height, data, stride: line_width, fourcc: f, component_order } } fn get_width(&self) -> usize { self.width } fn get_height(&self) -> usize { self.height } fn get_stride(&self) -> usize { self.stride } fn get_fourcc(&self) -> &'a str { self.fourcc } fn set_pixel(&mut self, x: usize, y: usize, color: Self::ColorType) -> Result<(), PixelBufferError> { if (x >= self.width) || (y >= self.width) { return Err(PixelBufferError::RequestOutOfBounds); } let start = x * self.component_order.len() + y * self.stride; let repr: [u8; 3] = color.into(); for i in 0..self.component_order.len() { self.data[start + i] = repr[self.component_order[i] as usize]; } Ok(()) } fn get_pixel(&self, x: usize, y: usize) -> Result<Self::ColorType, PixelBufferError> { if (x >= self.width) || (y >= self.width) { return Err(PixelBufferError::RequestOutOfBounds); } let start = x * 3 + y * self.stride; let mut color: [u8; 3] = [0, 0, 0]; for i in 0..self.component_order.len() { color[self.component_order[i] as usize] = self.data[start + i]; } Ok(DigitalYCbCrColor::from(color)) } } pub mod iter; mod tests;
use crate::*; pub use grapho_color::DigitalYCbCrColor; pub use crate::yuv422i::YUVComponent; #[derive(Debug, PartialEq)] pub struct YUV444iPixelBuffer<'a> { width: usize, height: usize, stride: usize, fourcc: &'a str, component_order: Vec<YUVComponent>, data: Vec<u8> } impl<'a> YUV444iPixelBuffer<'a> { fn decode_component_order(fourcc:&'a str) -> Vec<YUVComponent> { match fourcc { "VUY" => vec![YUVComponent::V, YUVComponent::U, YUVComponent::Y], "YVU" => vec![YUVComponent::Y, YUVComponent::V, YUVComponent::U], "UVY" => vec![YUVComponent::U, YUVComponent::V, YUVComponent::Y], "YUV" | "YUV444" | "YUV 4:4:4" | _ => vec![YUVComponent::Y, YUVComponent::U, YUVComponent::V], } } } impl<'a> PixelBuffer<'a> for YUV444iPixelBuffer<'a> { type ColorType = DigitalYCbCrColor;
fn new_with_data(width: usize, height: usize, data: Vec<u8>, stride: Option<usize>, fourcc: Option<&'a str>) -> Result<Self, PixelBufferError> { let f = fourcc.unwrap_or("YUV444"); let component_order = YUV444iPixelBuffer::decode_component_order(f); if data.len() < stride.unwrap_or(width * 3) * height { return Err(PixelBufferError::BufferTooSmall); } Ok( YUV444iPixelBuffer { width, height, data, stride: stride.unwrap_or(width * 3), fourcc: f, component_order } ) } fn new_with_background(width: usize, height: usize, color: Self::ColorType, stride: Option<usize>, fourcc: Option<&'a str>) -> Self { let f = fourcc.unwrap_or("YUV444"); let component_order = YUV444iPixelBuffer::decode_component_order(f); let rep: [u8; 3] = color.into(); let line_width = stride.unwrap_or(width * component_order.len()); let data:Vec<u8>; let representation = vec![ rep[component_order[0] as usize], rep[component_order[1] as usize], rep[component_order[2] as usize], ]; if line_width > width * 3 { let mut line = representation.repeat(width); line.extend([0].repeat(line_width - width * 3)); data = line.repeat(height); } else { data = representation.repeat(width * height); } YUV444iPixelBuffer { width, height, data, stride: line_width, fourcc: f, component_order } } fn get_width(&self) -> usize { self.width } fn get_height(&self) -> usize { self.height } fn get_stride(&self) -> usize { self.stride } fn get_fourcc(&self) -> &'a str { self.fourcc } fn set_pixel(&mut self, x: usize, y: usize, color: Self::ColorType) -> Result<(), PixelBufferError> { if (x >= self.width) || (y >= self.width) { return Err(PixelBufferError::RequestOutOfBounds); } let start = x * self.component_order.len() + y * self.stride; let repr: [u8; 3] = color.into(); for i in 0..self.component_order.len() { self.data[start + i] = repr[self.component_order[i] as usize]; } Ok(()) } fn get_pixel(&self, x: usize, y: usize) -> Result<Self::ColorType, PixelBufferError> { if (x >= self.width) || (y >= self.width) { return Err(PixelBufferError::RequestOutOfBounds); } let start = x * 3 + y * self.stride; let mut color: [u8; 3] = [0, 0, 0]; for i in 0..self.component_order.len() { color[self.component_order[i] as usize] = self.data[start + i]; } Ok(DigitalYCbCrColor::from(color)) } } pub mod iter; mod tests;
fn new(width: usize, height: usize, stride: Option<usize>, fourcc: Option<&'a str>) -> Self { let f = fourcc.unwrap_or("YUV444"); let component_order = YUV444iPixelBuffer::decode_component_order(f); let line_width = stride.unwrap_or(width * 3); YUV444iPixelBuffer { width, height, data: vec![0; line_width * height], stride: line_width, fourcc: f, component_order } }
function_block-full_function
[ { "content": "/// Pixel buffer trait, all Pixel buffers will implement this\n\npub trait PixelBuffer<'a>: Sized + IntoIterator\n\n // + Sub + Mul + Add + Div + SubAssign + MulAssign + AddAssign + DivAssign\n\n{\n\n /// The color type this pixel buffer contains\n\n type ColorType;\n\n\n\n /// Create a new pixel buffer with given dimensions\n\n /// \n\n /// # Arguments\n\n /// \n\n /// * `width` - The width of the buffer\n\n /// * `height` - The height of the buffer\n\n /// * `stride` - optional, the line-width of the buffer if it differs from the\n\n /// default: `<length of color type representation> * width`\n\n /// * `fourcc` - optional, data representation format\n\n /// \n\n /// # Returns\n\n /// \n\n /// This returns a new instance of `Self` with it's contents set to zero\n\n fn new(width: usize, height: usize, stride: Option<usize>, fourcc: Option<&'a str>) -> Self;\n", "file_path": "src/lib.rs", "rank": 0, "score": 25291.655606568027 }, { "content": "//! This module describes a planar YUV pixel buffer with color subsampling (half horizontal resolution)\n\n\n\nuse crate::*;\n\npub use grapho_color::DigitalYCbCrColor;\n\npub use crate::yuv422i::YUVComponent;\n\n\n\n/// YUV Pixel buffer without alpha channel, half resolution color subsampling\n\n#[derive(Debug, PartialEq)]\n\npub struct YUV422pPixelBuffer<'a> {\n\n width: usize,\n\n height: usize,\n\n stride: usize,\n\n fourcc: &'a str,\n\n component_order: Vec<YUVComponent>,\n\n data: Vec<u8>\n\n}\n\n\n\nimpl<'a> YUV422pPixelBuffer<'a> {\n\n fn decode_component_order(fourcc:&'a str) -> Vec<YUVComponent> {\n\n match fourcc {\n", "file_path": "src/yuv422p/mod.rs", "rank": 4, "score": 27.181138099295396 }, { "content": " /// * `fourcc` - optional, data representation format\n\n /// \n\n /// # Defined fourcc codes\n\n /// \n\n /// * `YV12`, `YUV` (default)\n\n /// * `YV21`, `YVU`\n\n /// \n\n /// # Returns\n\n /// \n\n /// This returns a new instance of `YUV422pPixelBuffer` with it's contents set to zero\n\n fn new(width: usize, height: usize, stride: Option<usize>, fourcc: Option<&'a str>) -> Self {\n\n let f = fourcc.unwrap_or(\"YV12\");\n\n let component_order = YUV422pPixelBuffer::decode_component_order(f);\n\n let line_width = stride.unwrap_or(width * 2);\n\n\n\n YUV422pPixelBuffer {\n\n width,\n\n height,\n\n stride: stride.unwrap_or(width),\n\n data: vec![0; line_width * height],\n", "file_path": "src/yuv422p/mod.rs", "rank": 6, "score": 23.108922016824952 }, { "content": "//! This module describes an interleaved YUV pixel buffer with color subsampling (half horizontal resolution)\n\n\n\nuse crate::*;\n\npub use grapho_color::DigitalYCbCrColor;\n\n\n\n#[derive(Debug, PartialEq, Clone, Copy)]\n\npub enum YUVComponent {\n\n Y = 0,\n\n U = 1,\n\n V = 2,\n\n Ignore = -1\n\n}\n\n\n\n/// YUV Pixel buffer without alpha channel, half resolution color subsampling\n\n#[derive(Debug, PartialEq)]\n\npub struct YUV422iPixelBuffer<'a> {\n\n width: usize,\n\n height: usize,\n\n stride: usize,\n\n fourcc: &'a str,\n", "file_path": "src/yuv422i/mod.rs", "rank": 7, "score": 22.473574882082502 }, { "content": " stride: line_width,\n\n data,\n\n fourcc: f,\n\n component_order\n\n }\n\n }\n\n\n\n fn get_width(&self) -> usize {\n\n self.width\n\n }\n\n\n\n fn get_height(&self) -> usize {\n\n self.height\n\n }\n\n\n\n fn get_stride(&self) -> usize {\n\n self.stride\n\n }\n\n\n\n fn get_fourcc(&self) -> &'a str {\n", "file_path": "src/yuv422i/mod.rs", "rank": 8, "score": 22.445838067609152 }, { "content": " /// \n\n /// # Arguments\n\n /// \n\n /// * `width` - The width of the buffer\n\n /// * `height` - The height of the buffer\n\n /// * `stride` - optional, the line-width of the buffer if it differs from the\n\n /// default: `<length of color type representation> * width`\n\n /// * `fourcc` - optional, data representation format\n\n /// * `color` - fill color to use\n\n /// \n\n /// # Returns\n\n /// \n\n /// This returns a new instance of `Self` with it's contents set to the\n\n /// defined color. If stride is bigger than needed width the padding is filled with\n\n /// zeroes.\n\n fn new_with_background(width: usize, height: usize, color: Self::ColorType, stride: Option<usize>, fourcc: Option<&'a str>) -> Self;\n\n \n\n /// width of the buffer\n\n fn get_width(&self) -> usize;\n\n\n", "file_path": "src/lib.rs", "rank": 9, "score": 21.846072344542804 }, { "content": "//! This module describes an interleaved single component grayscale buffer\n\n\n\nuse crate::*;\n\npub use grapho_color::DigitalGrayscaleColor;\n\n\n\n#[derive(Debug, PartialEq, Clone, Copy)]\n\npub enum GrayscaleComponent {\n\n Value = 0,\n\n Ignore = 1\n\n}\n\n\n\n/// Grayscale Pixel buffer without alpha channel\n\n#[derive(Debug, PartialEq)]\n\npub struct GrayscalePixelBuffer<'a> {\n\n width: usize,\n\n height: usize,\n\n stride: usize,\n\n fourcc: &'a str,\n\n component_order: Vec<GrayscaleComponent>,\n\n data: Vec<u8>\n", "file_path": "src/grayscale/mod.rs", "rank": 10, "score": 21.320754965269504 }, { "content": "//! This module describes an interleaved RGB pixel buffer\n\n\n\nuse crate::*;\n\npub use grapho_color::DigitalRGBAColor;\n\n\n\n#[derive(Debug, PartialEq, Clone, Copy)]\n\npub enum RGBComponent {\n\n Red = 0,\n\n Green = 1,\n\n Blue = 2,\n\n Alpha = 3\n\n}\n\n\n\n/// RGB Pixel buffer without alpha channel\n\n#[derive(Debug, PartialEq)]\n\npub struct RGBPixelBuffer<'a> {\n\n width: usize,\n\n height: usize,\n\n stride: usize,\n\n fourcc: &'a str,\n", "file_path": "src/rgb/mod.rs", "rank": 11, "score": 20.980331352892996 }, { "content": " /// * `YV12`, `YUV` (default)\n\n /// * `YV21`, `YVU`\n\n /// \n\n /// # Returns\n\n /// \n\n /// This returns a new instance of `YUV422iPixelBuffer` with it's contents set to the\n\n /// defined color. If stride is bigger than needed width the padding is filled with\n\n /// zeroes.\n\n fn new_with_background(width: usize, height: usize, color: Self::ColorType, stride: Option<usize>, fourcc: Option<&'a str>) -> Self {\n\n let f = fourcc.unwrap_or(\"YV12\");\n\n let component_order = YUV422pPixelBuffer::decode_component_order(f);\n\n let line_width = stride.unwrap_or(width);\n\n \n\n let mut data: Vec<u8> = Vec::with_capacity(line_width * height);\n\n\n\n let mut y = [color.y].repeat(width);\n\n let mut u = [color.cb].repeat(width / 2);\n\n let mut v = [color.cr].repeat(width / 2);\n\n\n\n if line_width > width {\n", "file_path": "src/yuv422p/mod.rs", "rank": 12, "score": 20.929196737836158 }, { "content": " fourcc: f,\n\n component_order\n\n }\n\n }\n\n\n\n fn get_width(&self) -> usize {\n\n self.width\n\n }\n\n\n\n fn get_height(&self) -> usize {\n\n self.height\n\n }\n\n\n\n fn get_stride(&self) -> usize {\n\n self.stride\n\n }\n\n\n\n fn get_fourcc(&self) -> &'a str {\n\n self.fourcc\n\n }\n", "file_path": "src/yuv422p/mod.rs", "rank": 14, "score": 20.54230521356276 }, { "content": " /// * `width` - The width of the buffer\n\n /// * `height` - The height of the buffer\n\n /// * `stride` - optional, the line-width of the buffer if it differs from the\n\n /// default: `<length of color type representation> * width`\n\n /// * `fourcc` - optional, data representation format\n\n /// * `color` - fill color to use\n\n /// \n\n /// # Defined fourcc codes\n\n /// \n\n /// * `Y`, tightly packed grayscale only image (default)\n\n /// * `Yxx`, grayscale image with 2 bytes of padding (to interpret a YUV444 image as grayscale)\n\n /// * `Yx`, grayscale image with 1 byte of padding (to interpret a YUV422 interleaved image as grayscale)\n\n /// * `xY`, like `Yx` but inverted order\n\n /// \n\n /// # Returns\n\n /// \n\n /// This returns a new instance of `GrayscalePixelBuffer` with it's contents set to the\n\n /// defined color. If stride is bigger than needed width the padding is filled with\n\n /// zeroes.\n\n fn new_with_background(width: usize, height: usize, color: Self::ColorType, stride: Option<usize>, fourcc: Option<&'a str>) -> Self {\n", "file_path": "src/grayscale/mod.rs", "rank": 16, "score": 20.167320501527655 }, { "content": " RGBPixelBuffer {\n\n width,\n\n height,\n\n data,\n\n stride: line_width,\n\n fourcc: f,\n\n component_order\n\n }\n\n }\n\n\n\n fn get_width(&self) -> usize {\n\n self.width\n\n }\n\n\n\n fn get_height(&self) -> usize {\n\n self.height\n\n }\n\n\n\n fn get_stride(&self) -> usize {\n\n self.stride\n", "file_path": "src/rgb/mod.rs", "rank": 17, "score": 19.21068998904772 }, { "content": " \n\n GrayscalePixelBuffer {\n\n width,\n\n height,\n\n stride: line_width,\n\n data,\n\n fourcc: f,\n\n component_order\n\n }\n\n }\n\n\n\n fn get_width(&self) -> usize {\n\n self.width\n\n }\n\n\n\n fn get_height(&self) -> usize {\n\n self.height\n\n }\n\n\n\n fn get_stride(&self) -> usize {\n", "file_path": "src/grayscale/mod.rs", "rank": 18, "score": 19.007728719432514 }, { "content": " }\n\n\n\n fn get_pixel(&self, x: usize, y: usize) -> Result<Self::ColorType, PixelBufferError> {\n\n if (x >= self.width) || (y >= self.width) {\n\n return Err(PixelBufferError::RequestOutOfBounds);\n\n }\n\n\n\n let start = x + y * self.stride;\n\n let plane1 = (self.stride * self.height) + x / 2 + y * self.stride / 2;\n\n let plane2 = plane1 + (self.stride * self.height / 2);\n\n\n\n let u: u8;\n\n let v: u8;\n\n\n\n if self.component_order[0] == YUVComponent::U {\n\n u = self.data[plane1];\n\n v = self.data[plane2];\n\n } else {\n\n v = self.data[plane1];\n\n u = self.data[plane2];\n", "file_path": "src/yuv422p/mod.rs", "rank": 19, "score": 18.86297428239003 }, { "content": " self.stride\n\n }\n\n\n\n fn get_fourcc(&self) -> &'a str {\n\n self.fourcc\n\n }\n\n\n\n fn set_pixel(&mut self, x: usize, y: usize, color: Self::ColorType) -> Result<(), PixelBufferError> {\n\n if (x >= self.width) || (y >= self.width) {\n\n return Err(PixelBufferError::RequestOutOfBounds);\n\n }\n\n\n\n let start = x * self.component_order.len() + y * self.stride;\n\n for i in 0..self.component_order.len() {\n\n match self.component_order[i] {\n\n GrayscaleComponent::Value => self.data[start + i] = color.v,\n\n GrayscaleComponent::Ignore => ()\n\n }\n\n }\n\n\n", "file_path": "src/grayscale/mod.rs", "rank": 20, "score": 18.785009839470437 }, { "content": " self.fourcc\n\n }\n\n\n\n fn set_pixel(&mut self, x: usize, y: usize, color: Self::ColorType) -> Result<(), PixelBufferError> {\n\n if (x >= self.width) || (y >= self.width) {\n\n return Err(PixelBufferError::RequestOutOfBounds);\n\n }\n\n\n\n let start = x * 2 + y * self.stride - ((x % 2) * 2);\n\n let repr: [u8; 3] = color.into();\n\n\n\n let order = &self.component_order[x % 2];\n\n for i in 0..4 {\n\n match order[i] {\n\n YUVComponent::Y => self.data[start + i] = repr[0],\n\n YUVComponent::U => self.data[start + i] = (self.data[start + i] + repr[1]) >> 1,\n\n YUVComponent::V => self.data[start + i] = (self.data[start + i] + repr[2]) >> 1,\n\n YUVComponent::Ignore => ()\n\n }\n\n }\n", "file_path": "src/yuv422i/mod.rs", "rank": 21, "score": 18.739891132802963 }, { "content": " /// \n\n /// This returns a new instance of `RGBPixelBuffer` with it's contents set to zero\n\n\n\n fn new(width: usize, height: usize, stride: Option<usize>, fourcc: Option<&'a str>) -> Self {\n\n let f = fourcc.unwrap_or(\"RGB\");\n\n let component_order = RGBPixelBuffer::decode_component_order(f);\n\n let line_width = stride.unwrap_or(width * component_order.len());\n\n\n\n RGBPixelBuffer {\n\n width,\n\n height,\n\n data: vec![0; line_width * height],\n\n stride: line_width,\n\n fourcc: f,\n\n component_order\n\n }\n\n }\n\n\n\n /// Create a new pixel buffer with given dimensions from a `Vec<u8>`\n\n /// \n", "file_path": "src/rgb/mod.rs", "rank": 22, "score": 18.38806251011755 }, { "content": " fourcc: f,\n\n component_order\n\n }\n\n }\n\n\n\n /// Create a new pixel buffer with given dimensions from a `Vec<u8>`\n\n /// \n\n /// # Arguments\n\n /// \n\n /// * `width` - The width of the buffer\n\n /// * `height` - The height of the buffer\n\n /// * `stride` - optional, the line-width of the buffer if it differs from the\n\n /// default: `<length of color type representation> * width`\n\n /// * `fourcc` - optional, data representation format\n\n /// * `data` - the data to consume\n\n /// \n\n /// # Defined fourcc codes\n\n /// \n\n /// * `YV12`, `YUV` (default)\n\n /// * `YV21`, `YVU`\n", "file_path": "src/yuv422p/mod.rs", "rank": 23, "score": 18.242030586699425 }, { "content": " /// \n\n /// # Defined fourcc codes\n\n /// \n\n /// * `YVYU`\n\n /// * `UYVY`\n\n /// * `VYUY`\n\n /// * `YUYV`, `YUV4:2:2`, `YUV422`, `YUY2`\n\n /// \n\n /// # Returns\n\n /// \n\n /// This returns a new instance of `YUV422iPixelBuffer` with it's contents set to the\n\n /// defined color. If stride is bigger than needed width the padding is filled with\n\n /// zeroes.\n\n fn new_with_background(width: usize, height: usize, color: Self::ColorType, stride: Option<usize>, fourcc: Option<&'a str>) -> Self {\n\n let f = fourcc.unwrap_or(\"YUV422\");\n\n let component_order = YUV422iPixelBuffer::decode_component_order(f);\n\n let line_width = stride.unwrap_or(width * 2);\n\n let data:Vec<u8>;\n\n\n\n let mut representation = vec![0; 4];\n", "file_path": "src/yuv422i/mod.rs", "rank": 24, "score": 18.229478824342856 }, { "content": " /// \n\n /// # Returns\n\n /// \n\n /// This returns a `Result` with either a new instance of `YUV422pPixelBuffer`\n\n /// or `PixelBufferError::BufferTooSmall` if the buffer is too small for\n\n /// the requested dimensions\n\n fn new_with_data(width: usize, height: usize, data: Vec<u8>, stride: Option<usize>, fourcc: Option<&'a str>) -> Result<Self, PixelBufferError> {\n\n\n\n if data.len() < stride.unwrap_or(width) * height {\n\n return Err(PixelBufferError::BufferTooSmall);\n\n }\n\n\n\n let f = fourcc.unwrap_or(\"YV12\");\n\n let component_order = YUV422pPixelBuffer::decode_component_order(f);\n\n\n\n Ok(\n\n YUV422pPixelBuffer {\n\n width,\n\n height,\n\n stride: stride.unwrap_or(width),\n", "file_path": "src/yuv422p/mod.rs", "rank": 25, "score": 18.19453534475677 }, { "content": " /// This returns a `Result` with either a new instance of `RGBPixelBuffer`\n\n /// or `PixelBufferError::BufferTooSmall` if the buffer is too small for\n\n /// the requested dimensions\n\n fn new_with_data(width: usize, height: usize, data: Vec<u8>, stride: Option<usize>, fourcc: Option<&'a str>) -> Result<Self, PixelBufferError> {\n\n let f = fourcc.unwrap_or(\"RGB\");\n\n let component_order = RGBPixelBuffer::decode_component_order(f);\n\n\n\n if data.len() < stride.unwrap_or(width * component_order.len()) * height {\n\n return Err(PixelBufferError::BufferTooSmall);\n\n }\n\n\n\n \n\n Ok(\n\n RGBPixelBuffer {\n\n width,\n\n height,\n\n data,\n\n stride: stride.unwrap_or(width * component_order.len()),\n\n fourcc: f,\n\n component_order\n", "file_path": "src/rgb/mod.rs", "rank": 26, "score": 18.160652378493268 }, { "content": " /// \n\n /// * `width` - The width of the buffer\n\n /// * `height` - The height of the buffer\n\n /// * `stride` - optional, the line-width of the buffer if it differs from the\n\n /// default: `<length of color type representation> * width`\n\n /// * `fourcc` - optional, data representation format\n\n /// \n\n /// # Defined fourcc codes\n\n /// \n\n /// * `Y`, tightly packed grayscale only image\n\n /// * `Yxx`, grayscale image with 2 bytes of padding (to interpret a YUV444 image as grayscale)\n\n /// * `Yx`, grayscale image with 1 byte of padding (to interpret a YUV422 interleaved image as grayscale)\n\n /// * `xY`, like `Yx` but inverted order\n\n /// \n\n /// # Returns\n\n /// \n\n /// This returns a new instance of `GrayscalePixelBuffer` with it's contents set to zero\n\n fn new(width: usize, height: usize, stride: Option<usize>, fourcc: Option<&'a str>) -> Self {\n\n let f = fourcc.unwrap_or(\"Y\");\n\n let component_order = GrayscalePixelBuffer::decode_component_order(f);\n", "file_path": "src/grayscale/mod.rs", "rank": 27, "score": 17.64413883404324 }, { "content": " /// * `ARGB`\n\n /// * `BGRA`\n\n /// * `ABGR`\n\n /// \n\n /// # Returns\n\n /// \n\n /// This returns a new instance of `RGBPixelBuffer` with it's contents set to the\n\n /// defined color. If stride is bigger than needed width the padding is filled with\n\n /// zeroes.\n\n fn new_with_background(width: usize, height: usize, color: Self::ColorType, stride: Option<usize>, fourcc: Option<&'a str>) -> Self {\n\n let f = fourcc.unwrap_or(\"RGB\");\n\n let component_order = RGBPixelBuffer::decode_component_order(f);\n\n let rep: [u8; 4] = color.into();\n\n let line_width = stride.unwrap_or(width * component_order.len());\n\n let data:Vec<u8>;\n\n\n\n let representation =\n\n if component_order.len() == 3 {\n\n vec![\n\n rep[component_order[0] as usize],\n", "file_path": "src/rgb/mod.rs", "rank": 28, "score": 17.53752879315329 }, { "content": "\n\n /// Create a new pixel buffer with given dimensions from a `Vec<u8>`\n\n /// \n\n /// # Arguments\n\n /// \n\n /// * `width` - The width of the buffer\n\n /// * `height` - The height of the buffer\n\n /// * `stride` - optional, the line-width of the buffer if it differs from the\n\n /// default: `<length of color type representation> * width`\n\n /// * `fourcc` - optional, data representation format\n\n /// * `data` - the data to consume\n\n /// \n\n /// # Returns\n\n /// \n\n /// This returns a `Result` with either a new instance of `Self`\n\n /// or `PixelBufferError::BufferTooSmall` if the buffer is too small for\n\n /// the requested dimensions\n\n fn new_with_data(width: usize, height: usize, data: Vec<u8>, stride: Option<usize>, fourcc: Option<&'a str>) -> Result<Self, PixelBufferError>;\n\n \n\n /// Create a new pixel buffer with given dimensions and fill color\n", "file_path": "src/lib.rs", "rank": 29, "score": 17.491307220742854 }, { "content": " width,\n\n height,\n\n stride: stride.unwrap_or(width * 3),\n\n data,\n\n fourcc: f,\n\n component_order\n\n }\n\n )\n\n }\n\n\n\n /// Create a new pixel buffer with given dimensions and fill color\n\n /// \n\n /// # Arguments\n\n /// \n\n /// * `width` - The width of the buffer\n\n /// * `height` - The height of the buffer\n\n /// * `stride` - optional, the line-width of the buffer if it differs from the\n\n /// default: `<length of color type representation> * width`\n\n /// * `fourcc` - optional, data representation format\n\n /// * `color` - fill color to use\n", "file_path": "src/yuv422i/mod.rs", "rank": 30, "score": 17.190617320598825 }, { "content": " /// * `fourcc` - optional, data representation format\n\n /// * `data` - the data to consume\n\n /// \n\n /// # Defined fourcc codes\n\n /// \n\n /// * `Y`, tightly packed grayscale only image\n\n /// * `Yxx`, grayscale image with 2 bytes of padding (to interpret a YUV444 image as grayscale)\n\n /// * `Yx`, grayscale image with 1 byte of padding (to interpret a YUV422 interleaved image as grayscale)\n\n /// * `xY`, like `Yx` but inverted order\n\n /// \n\n /// # Returns\n\n /// \n\n /// This returns a `Result` with either a new instance of `GrayscalePixelBuffer`\n\n /// or `PixelBufferError::BufferTooSmall` if the buffer is too small for\n\n /// the requested dimensions\n\n fn new_with_data(width: usize, height: usize, data: Vec<u8>, stride: Option<usize>, fourcc: Option<&'a str>) -> Result<Self, PixelBufferError> {\n\n\n\n if data.len() < stride.unwrap_or(width) * height {\n\n return Err(PixelBufferError::BufferTooSmall);\n\n }\n", "file_path": "src/grayscale/mod.rs", "rank": 31, "score": 17.17243060688327 }, { "content": "\n\n fn set_pixel(&mut self, x: usize, y: usize, color: Self::ColorType) -> Result<(), PixelBufferError> {\n\n if (x >= self.width) || (y >= self.width) {\n\n return Err(PixelBufferError::RequestOutOfBounds);\n\n }\n\n\n\n let start = x + y * self.stride;\n\n let plane1 = (self.stride * self.height) + x / 2 + y * self.stride / 2;\n\n let plane2 = plane1 + (self.stride * self.height / 2);\n\n\n\n self.data[start] = color.y;\n\n if self.component_order[0] == YUVComponent::U {\n\n self.data[plane1] = (self.data[plane1] + color.cb) >> 1;\n\n self.data[plane2] = (self.data[plane2] + color.cr) >> 1;\n\n } else {\n\n self.data[plane2] = (self.data[plane2] + color.cb) >> 1;\n\n self.data[plane1] = (self.data[plane1] + color.cr) >> 1;\n\n }\n\n \n\n Ok(())\n", "file_path": "src/yuv422p/mod.rs", "rank": 32, "score": 17.13306657618141 }, { "content": "\n\n Ok(())\n\n }\n\n\n\n fn get_pixel(&self, x: usize, y: usize) -> Result<Self::ColorType, PixelBufferError> {\n\n if (x >= self.width) || (y >= self.width) {\n\n return Err(PixelBufferError::RequestOutOfBounds);\n\n }\n\n\n\n let start = x * 2 + y * self.stride - ((x % 2) * 2);\n\n\n\n let mut y: u8 = 0;\n\n let mut u: u8 = 0;\n\n let mut v: u8 = 0;\n\n\n\n let order = &self.component_order[x % 2];\n\n for i in 0..4 {\n\n match order[i] {\n\n YUVComponent::Y => y = self.data[start + i],\n\n YUVComponent::U => u = self.data[start + i],\n", "file_path": "src/yuv422i/mod.rs", "rank": 33, "score": 17.113300671166275 }, { "content": " /// \n\n /// # Defined fourcc codes\n\n /// \n\n /// * `YVYU`\n\n /// * `UYVY`\n\n /// * `VYUY`\n\n /// * `YUYV`, `YUV4:2:2`, `YUV422`, `YUY2` (default)\n\n /// \n\n /// # Returns\n\n /// \n\n /// This returns a new instance of `YUV422iPixelBuffer` with it's contents set to zero\n\n fn new(width: usize, height: usize, stride: Option<usize>, fourcc: Option<&'a str>) -> Self {\n\n let f = fourcc.unwrap_or(\"YUV422\");\n\n let component_order = YUV422iPixelBuffer::decode_component_order(f);\n\n let line_width = stride.unwrap_or(width * 2);\n\n\n\n YUV422iPixelBuffer {\n\n width,\n\n height,\n\n stride: line_width,\n", "file_path": "src/yuv422i/mod.rs", "rank": 34, "score": 17.042900186858947 }, { "content": " }\n\n\n\n fn get_fourcc(&self) -> &'a str {\n\n self.fourcc\n\n }\n\n\n\n fn set_pixel(&mut self, x: usize, y: usize, color: Self::ColorType) -> Result<(), PixelBufferError> {\n\n if (x >= self.width) || (y >= self.width) {\n\n return Err(PixelBufferError::RequestOutOfBounds);\n\n }\n\n\n\n let start = x * self.component_order.len() + y * self.stride;\n\n let repr: [u8; 4] = color.into();\n\n\n\n for i in 0..self.component_order.len() {\n\n self.data[start + i] = repr[self.component_order[i] as usize];\n\n }\n\n \n\n Ok(())\n\n }\n", "file_path": "src/rgb/mod.rs", "rank": 35, "score": 16.864125161348078 }, { "content": "//\n\n// Tests\n\n//\n\n\n\npub use crate::{ PixelBuffer, PixelBufferError };\n\npub use crate::grayscale::GrayscalePixelBuffer;\n\npub use grapho_color::DigitalGrayscaleColor;\n\n\n\n#[cfg(test)]\n\nmod grayscale {\n\n use super::*;\n\n\n\n #[test]\n\n fn empty_buffer() {\n\n let buffer = GrayscalePixelBuffer::new(2, 2, None, Some(\"Y\"));\n\n assert_eq!(buffer.data.len(), 4);\n\n assert_eq!(buffer.data[0], 0);\n\n assert_eq!(buffer.width, 2);\n\n assert_eq!(buffer.height, 2);\n\n assert_eq!(buffer.stride, 2);\n", "file_path": "src/grayscale/tests.rs", "rank": 36, "score": 16.357852926208103 }, { "content": " /// * `UYVY`\n\n /// * `VYUY`\n\n /// * `YUYV`, `YUV4:2:2`, `YUV422`, `YUY2`\n\n /// \n\n /// # Returns\n\n /// \n\n /// This returns a `Result` with either a new instance of `YUV422iPixelBuffer`\n\n /// or `PixelBufferError::BufferTooSmall` if the buffer is too small for\n\n /// the requested dimensions\n\n fn new_with_data(width: usize, height: usize, data: Vec<u8>, stride: Option<usize>, fourcc: Option<&'a str>) -> Result<Self, PixelBufferError> {\n\n\n\n if data.len() < stride.unwrap_or(width * 2) * height {\n\n return Err(PixelBufferError::BufferTooSmall);\n\n }\n\n\n\n let f = fourcc.unwrap_or(\"YUV422\");\n\n let component_order = YUV422iPixelBuffer::decode_component_order(f);\n\n\n\n Ok(\n\n YUV422iPixelBuffer {\n", "file_path": "src/yuv422i/mod.rs", "rank": 37, "score": 16.2162150742622 }, { "content": " data,\n\n fourcc: f,\n\n component_order\n\n }\n\n )\n\n }\n\n\n\n /// Create a new pixel buffer with given dimensions and fill color\n\n /// \n\n /// # Arguments\n\n /// \n\n /// * `width` - The width of the buffer\n\n /// * `height` - The height of the buffer\n\n /// * `stride` - optional, the line-width of the buffer if it differs from the\n\n /// default: `<length of color type representation> * width`\n\n /// * `fourcc` - optional, data representation format\n\n /// * `color` - fill color to use\n\n /// \n\n /// # Defined fourcc codes\n\n /// \n", "file_path": "src/yuv422p/mod.rs", "rank": 38, "score": 16.032918863982793 }, { "content": " vec![YUVComponent::Y, YUVComponent::U, YUVComponent::Ignore, YUVComponent::V],\n\n vec![YUVComponent::Ignore, YUVComponent::U, YUVComponent::Y, YUVComponent::V]\n\n ],\n\n } \n\n }\n\n}\n\n\n\n\n\nimpl<'a> PixelBuffer<'a> for YUV422iPixelBuffer<'a> {\n\n type ColorType = DigitalYCbCrColor;\n\n\n\n /// Create a new pixel buffer with given dimensions\n\n /// \n\n /// # Arguments\n\n /// \n\n /// * `width` - The width of the buffer\n\n /// * `height` - The height of the buffer\n\n /// * `stride` - optional, the line-width of the buffer if it differs from the\n\n /// default: `<length of color type representation> * width`\n\n /// * `fourcc` - optional, data representation format\n", "file_path": "src/yuv422i/mod.rs", "rank": 39, "score": 15.708923842753677 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod yuv {\n\n use super::*;\n\n\n\n #[test]\n\n fn empty_buffer() {\n\n let buffer = YUV444iPixelBuffer::new(2, 2, None, Some(\"YUV444\"));\n\n assert_eq!(buffer.data.len(), 12);\n\n assert_eq!(buffer.data[0], 0);\n\n assert_eq!(buffer.data[1], 0);\n\n assert_eq!(buffer.data[2], 0);\n\n assert_eq!(buffer.width, 2);\n\n assert_eq!(buffer.height, 2);\n\n assert_eq!(buffer.stride, 2 * 3);\n\n assert_eq!(buffer.get_width(), 2);\n\n assert_eq!(buffer.get_height(), 2);\n\n assert_eq!(buffer.get_stride(), 2 * 3);\n", "file_path": "src/yuv444i/tests.rs", "rank": 41, "score": 15.477340776909466 }, { "content": " \"YV21\" | \"YVU\" =>\n\n vec![YUVComponent::V, YUVComponent::U],\n\n \"YV12\" | \"YUV\" | _ =>\n\n vec![YUVComponent::U, YUVComponent::V],\n\n } \n\n }\n\n}\n\n\n\n\n\nimpl<'a> PixelBuffer<'a> for YUV422pPixelBuffer<'a> {\n\n type ColorType = DigitalYCbCrColor;\n\n\n\n /// Create a new pixel buffer with given dimensions\n\n /// \n\n /// # Arguments\n\n /// \n\n /// * `width` - The width of the buffer\n\n /// * `height` - The height of the buffer\n\n /// * `stride` - optional, the line-width of the buffer if it differs from the\n\n /// default: `<length of color type representation> * width`\n", "file_path": "src/yuv422p/mod.rs", "rank": 43, "score": 15.203627574296728 }, { "content": " }\n\n )\n\n }\n\n\n\n /// Create a new pixel buffer with given dimensions and fill color\n\n /// \n\n /// # Arguments\n\n /// \n\n /// * `width` - The width of the buffer\n\n /// * `height` - The height of the buffer\n\n /// * `stride` - optional, the line-width of the buffer if it differs from the\n\n /// default: `<length of color type representation> * width`\n\n /// * `fourcc` - optional, data representation format\n\n /// * `color` - fill color to use\n\n /// \n\n /// # Defined fourcc codes\n\n /// \n\n /// * `RGB` (default)\n\n /// * `BGR`\n\n /// * `RGBA`\n", "file_path": "src/rgb/mod.rs", "rank": 44, "score": 14.930516468626113 }, { "content": " data: vec![0; line_width * height],\n\n fourcc: f,\n\n component_order\n\n }\n\n }\n\n\n\n /// Create a new pixel buffer with given dimensions from a `Vec<u8>`\n\n /// \n\n /// # Arguments\n\n /// \n\n /// * `width` - The width of the buffer\n\n /// * `height` - The height of the buffer\n\n /// * `stride` - optional, the line-width of the buffer if it differs from the\n\n /// default: `<length of color type representation> * width`\n\n /// * `fourcc` - optional, data representation format\n\n /// * `data` - the data to consume\n\n /// \n\n /// # Defined fourcc codes\n\n /// \n\n /// * `YVYU`\n", "file_path": "src/yuv422i/mod.rs", "rank": 46, "score": 14.760663821195957 }, { "content": " for i in 0..4 {\n\n representation[i] = \n\n match component_order[0][i] {\n\n YUVComponent::Y | YUVComponent::Ignore => color.y,\n\n YUVComponent::U => color.cb,\n\n YUVComponent::V => color.cr\n\n };\n\n }\n\n\n\n if line_width > width * 2 {\n\n let mut line = representation.repeat(width / 2);\n\n line.extend([0].repeat(line_width - width * 2));\n\n data = line.repeat(height);\n\n } else {\n\n data = representation.repeat(width / 2 * height);\n\n } \n\n \n\n YUV422iPixelBuffer {\n\n width,\n\n height,\n", "file_path": "src/yuv422i/mod.rs", "rank": 47, "score": 14.728821973622829 }, { "content": " y.extend([0].repeat(line_width - width));\n\n u.extend([0].repeat((line_width - width) / 2));\n\n v.extend([0].repeat((line_width - width) / 2));\n\n }\n\n\n\n data.extend(y.repeat(height));\n\n\n\n if component_order[0] == YUVComponent::U {\n\n data.extend(u.repeat(height));\n\n data.extend(v.repeat(height));\n\n } else {\n\n data.extend(v.repeat(height));\n\n data.extend(u.repeat(height));\n\n }\n\n \n\n YUV422pPixelBuffer {\n\n width,\n\n height,\n\n stride: line_width,\n\n data,\n", "file_path": "src/yuv422p/mod.rs", "rank": 48, "score": 14.521640581101199 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod yvu {\n\n use super::*;\n\n\n\n #[test]\n\n fn empty_buffer() {\n\n let buffer = YUV444iPixelBuffer::new(2, 2, None, Some(\"YVU\"));\n\n assert_eq!(buffer.data.len(), 12);\n\n assert_eq!(buffer.data[0], 0);\n\n assert_eq!(buffer.data[1], 0);\n\n assert_eq!(buffer.data[2], 0);\n\n assert_eq!(buffer.width, 2);\n\n assert_eq!(buffer.height, 2);\n\n assert_eq!(buffer.stride, 2 * 3);\n\n assert_eq!(buffer.get_width(), 2);\n\n assert_eq!(buffer.get_height(), 2);\n\n assert_eq!(buffer.get_stride(), 2 * 3);\n", "file_path": "src/yuv444i/tests.rs", "rank": 49, "score": 14.516176534267831 }, { "content": " Ok(())\n\n }\n\n\n\n fn get_pixel(&self, x: usize, y: usize) -> Result<Self::ColorType, PixelBufferError> {\n\n if (x >= self.width) || (y >= self.width) {\n\n return Err(PixelBufferError::RequestOutOfBounds);\n\n }\n\n\n\n let mut color = DigitalGrayscaleColor { v: 0 };\n\n let start = x + y * self.stride;\n\n for i in 0..self.component_order.len() {\n\n match self.component_order[i] {\n\n GrayscaleComponent::Value => {\n\n color = DigitalGrayscaleColor::from(self.data[start + i]);\n\n }\n\n GrayscaleComponent::Ignore => ()\n\n }\n\n }\n\n \n\n Ok(color)\n\n }\n\n}\n\n\n\npub mod iter;\n\nmod tests;\n", "file_path": "src/grayscale/mod.rs", "rank": 50, "score": 14.411351032049112 }, { "content": " /// # Arguments\n\n /// \n\n /// * `width` - The width of the buffer\n\n /// * `height` - The height of the buffer\n\n /// * `stride` - optional, the line-width of the buffer if it differs from the\n\n /// default: `<length of color type representation> * width`\n\n /// * `fourcc` - optional, data representation format\n\n /// * `data` - the data to consume\n\n /// \n\n /// # Defined fourcc codes\n\n /// \n\n /// * `RGB`\n\n /// * `BGR`\n\n /// * `RGBA`\n\n /// * `ARGB`\n\n /// * `BGRA`\n\n /// * `ABGR`\n\n /// \n\n /// # Returns\n\n /// \n", "file_path": "src/rgb/mod.rs", "rank": 51, "score": 14.397087211513092 }, { "content": " /// height of the buffer\n\n fn get_height(&self) -> usize;\n\n\n\n /// stride of the buffer\n\n fn get_stride(&self) -> usize;\n\n\n\n /// fourcc code of the buffer\n\n fn get_fourcc(&self) -> &'a str;\n\n\n\n /// Set a pixel to a color\n\n /// \n\n /// # Arguments\n\n /// \n\n /// * `x` - x coordinate (from top left)\n\n /// * `y` - y coordinate (from top left)\n\n /// * `color` - color to set the pixel to\n\n /// \n\n /// # Returns\n\n /// \n\n /// A `Result`, either a `()` if everything went ok, or\n", "file_path": "src/lib.rs", "rank": 52, "score": 14.205413236171573 }, { "content": " let line_width = stride.unwrap_or(width * component_order.len());\n\n\n\n GrayscalePixelBuffer {\n\n width,\n\n height,\n\n stride: line_width,\n\n fourcc: f,\n\n component_order,\n\n data: vec![0; line_width * height]\n\n }\n\n }\n\n\n\n /// Create a new pixel buffer with given dimensions from a `Vec<u8>`\n\n /// \n\n /// # Arguments\n\n /// \n\n /// * `width` - The width of the buffer\n\n /// * `height` - The height of the buffer\n\n /// * `stride` - optional, the line-width of the buffer if it differs from the\n\n /// default: `<length of color type representation> * width`\n", "file_path": "src/grayscale/mod.rs", "rank": 53, "score": 13.784810991305918 }, { "content": "\n\n fn get_pixel(&self, x: usize, y: usize) -> Result<Self::ColorType, PixelBufferError> {\n\n if (x >= self.width) || (y >= self.width) {\n\n return Err(PixelBufferError::RequestOutOfBounds);\n\n }\n\n\n\n let start = x * self.component_order.len() + y * self.stride;\n\n let mut color: [u8; 4] = [0, 0, 0, 255];\n\n for i in 0..self.component_order.len() {\n\n color[self.component_order[i] as usize] = self.data[start + i];\n\n }\n\n \n\n Ok(DigitalRGBAColor::from(color))\n\n }\n\n}\n\n\n\npub mod iter;\n\nmod tests;\n", "file_path": "src/rgb/mod.rs", "rank": 54, "score": 13.62334497628218 }, { "content": " /// Create a new pixel buffer with given dimensions\n\n /// \n\n /// # Arguments\n\n /// \n\n /// * `width` - The width of the buffer\n\n /// * `height` - The height of the buffer\n\n /// * `stride` - optional, the line-width of the buffer if it differs from the\n\n /// default: `<length of color type representation> * width`\n\n /// * `fourcc` - optional, data representation format\n\n /// \n\n /// # Defined fourcc codes\n\n /// \n\n /// * `RGB`\n\n /// * `BGR`\n\n /// * `RGBA`\n\n /// * `ARGB`\n\n /// * `BGRA`\n\n /// * `ABGR`\n\n /// \n\n /// # Returns\n", "file_path": "src/rgb/mod.rs", "rank": 55, "score": 13.100391284032037 }, { "content": " let f = fourcc.unwrap_or(\"Y\");\n\n let component_order = GrayscalePixelBuffer::decode_component_order(f);\n\n let line_width = stride.unwrap_or(width * component_order.len());\n\n let data:Vec<u8>;\n\n\n\n let mut representation:Vec<u8> = vec![0; component_order.len()];\n\n for i in 0..component_order.len() {\n\n representation[i] =\n\n match component_order[i] {\n\n GrayscaleComponent::Value => color.v,\n\n GrayscaleComponent::Ignore => 0u8\n\n };\n\n }\n\n if line_width > width {\n\n let mut line = representation.repeat(width);\n\n line.extend([0].repeat(line_width - width * component_order.len()));\n\n data = line.repeat(height);\n\n } else {\n\n data = representation.repeat(width * height);\n\n } \n", "file_path": "src/grayscale/mod.rs", "rank": 56, "score": 13.002789295151029 }, { "content": " \n\n let f = fourcc.unwrap_or(\"Y\");\n\n let component_order = GrayscalePixelBuffer::decode_component_order(f);\n\n\n\n Ok(\n\n GrayscalePixelBuffer {\n\n width,\n\n height,\n\n stride: stride.unwrap_or(width),\n\n data,\n\n fourcc: f,\n\n component_order\n\n }\n\n )\n\n }\n\n\n\n /// Create a new pixel buffer with given dimensions and fill color\n\n /// \n\n /// # Arguments\n\n /// \n", "file_path": "src/grayscale/mod.rs", "rank": 57, "score": 12.950786490757386 }, { "content": " rep[component_order[1] as usize],\n\n rep[component_order[2] as usize],\n\n ]\n\n } else {\n\n vec![\n\n rep[component_order[0] as usize],\n\n rep[component_order[1] as usize],\n\n rep[component_order[2] as usize],\n\n rep[component_order[3] as usize],\n\n ]\n\n };\n\n\n\n if line_width > width * component_order.len() {\n\n let mut line = representation.repeat(width);\n\n line.extend([0].repeat(line_width - width * component_order.len()));\n\n data = line.repeat(height);\n\n } else {\n\n data = representation.repeat(width * height);\n\n } \n\n \n", "file_path": "src/rgb/mod.rs", "rank": 59, "score": 12.148788389614076 }, { "content": " use super::*;\n\n \n\n #[test]\n\n fn empty_buffer() {\n\n let buffer = RGBPixelBuffer::new(2, 2, None, Some(\"BGRA\"));\n\n assert_eq!(buffer.data.len(), 16);\n\n assert_eq!(buffer.data[0], 0);\n\n assert_eq!(buffer.data[1], 0);\n\n assert_eq!(buffer.data[2], 0);\n\n assert_eq!(buffer.width, 2);\n\n assert_eq!(buffer.height, 2);\n\n assert_eq!(buffer.stride, 2 * 4);\n\n assert_eq!(buffer.get_width(), 2);\n\n assert_eq!(buffer.get_height(), 2);\n\n assert_eq!(buffer.get_stride(), 2 * 4);\n\n }\n\n\n\n #[test]\n\n fn prefilled_buffer() {\n\n let data = vec![0, 255, 64, 255, 0, 255, 64, 255, 0, 255, 64, 255, 0, 255, 64, 255];\n", "file_path": "src/rgb/tests.rs", "rank": 60, "score": 11.895491183407353 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod yv12 {\n\n use super::*;\n\n\n\n #[test]\n\n fn empty_buffer() {\n\n let buffer = YUV422pPixelBuffer::new(2, 2, None, Some(\"YV12\"));\n\n assert_eq!(buffer.data.len(), 8);\n\n assert_eq!(buffer.data[0], 0);\n\n assert_eq!(buffer.data[1], 0);\n\n assert_eq!(buffer.width, 2);\n\n assert_eq!(buffer.height, 2);\n\n assert_eq!(buffer.stride, 2);\n\n assert_eq!(buffer.get_width(), 2);\n\n assert_eq!(buffer.get_height(), 2);\n\n assert_eq!(buffer.get_stride(), 2);\n\n }\n\n\n", "file_path": "src/yuv422p/tests.rs", "rank": 61, "score": 11.859558965157257 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod uyvy {\n\n use super::*;\n\n\n\n #[test]\n\n fn empty_buffer() {\n\n let buffer = YUV422iPixelBuffer::new(2, 2, None, Some(\"UYVY\"));\n\n assert_eq!(buffer.data.len(), 8);\n\n assert_eq!(buffer.data[0], 0);\n\n assert_eq!(buffer.data[1], 0);\n\n assert_eq!(buffer.width, 2);\n\n assert_eq!(buffer.height, 2);\n\n assert_eq!(buffer.stride, 2 * 2);\n\n assert_eq!(buffer.get_width(), 2);\n\n assert_eq!(buffer.get_height(), 2);\n\n assert_eq!(buffer.get_stride(), 2 * 2);\n\n }\n\n\n", "file_path": "src/yuv422i/tests.rs", "rank": 62, "score": 11.859558965157259 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod rgb {\n\n use super::*;\n\n\n\n #[test]\n\n fn empty_buffer() {\n\n let buffer = RGBPixelBuffer::new(2, 2, None, Some(\"RGB\".into()));\n\n assert_eq!(buffer.data.len(), 12);\n\n assert_eq!(buffer.data[0], 0);\n\n assert_eq!(buffer.data[1], 0);\n\n assert_eq!(buffer.data[2], 0);\n\n assert_eq!(buffer.width, 2);\n\n assert_eq!(buffer.height, 2);\n\n assert_eq!(buffer.stride, 2 * 3);\n\n assert_eq!(buffer.get_width(), 2);\n\n assert_eq!(buffer.get_height(), 2);\n\n assert_eq!(buffer.get_stride(), 2 * 3);\n", "file_path": "src/rgb/tests.rs", "rank": 63, "score": 11.795636109152644 }, { "content": "#[cfg(test)]\n\nmod vyuy {\n\n use super::*;\n\n\n\n #[test]\n\n fn empty_buffer() {\n\n let buffer = YUV422iPixelBuffer::new(2, 2, None, Some(\"VYUY\"));\n\n assert_eq!(buffer.data.len(), 8);\n\n assert_eq!(buffer.data[0], 0);\n\n assert_eq!(buffer.data[1], 0);\n\n assert_eq!(buffer.width, 2);\n\n assert_eq!(buffer.height, 2);\n\n assert_eq!(buffer.stride, 2 * 2);\n\n assert_eq!(buffer.get_width(), 2);\n\n assert_eq!(buffer.get_height(), 2);\n\n assert_eq!(buffer.get_stride(), 2 * 2);\n\n }\n\n\n\n #[test]\n\n fn prefilled_buffer() {\n", "file_path": "src/yuv422i/tests.rs", "rank": 64, "score": 11.62624242295146 }, { "content": " assert_eq!(buffer.data.len(), 12);\n\n assert_eq!(buffer.data[0], 0);\n\n assert_eq!(buffer.data[1], 0);\n\n assert_eq!(buffer.data[2], 0);\n\n assert_eq!(buffer.width, 2);\n\n assert_eq!(buffer.height, 2);\n\n assert_eq!(buffer.stride, 2 * 3);\n\n assert_eq!(buffer.get_width(), 2);\n\n assert_eq!(buffer.get_height(), 2);\n\n assert_eq!(buffer.get_stride(), 2 * 3);\n\n }\n\n\n\n #[test]\n\n fn prefilled_buffer() {\n\n let data = vec![0, 255, 64, 0, 255, 64, 0, 255, 64, 0, 255, 64];\n\n let copy = data.clone();\n\n let buffer = YUV444iPixelBuffer::new_with_data(2, 2, data, None, Some(\"VUY\")).unwrap();\n\n assert_eq!(buffer.data.len(), 12);\n\n for x in 0..buffer.data.len() {\n\n assert_eq!(copy[x], buffer.data[x]);\n", "file_path": "src/yuv444i/tests.rs", "rank": 65, "score": 11.556107444768555 }, { "content": " assert_eq!(buffer.data.len(), 12);\n\n assert_eq!(buffer.data[0], 0);\n\n assert_eq!(buffer.data[1], 0);\n\n assert_eq!(buffer.data[2], 0);\n\n assert_eq!(buffer.width, 2);\n\n assert_eq!(buffer.height, 2);\n\n assert_eq!(buffer.stride, 2 * 3);\n\n assert_eq!(buffer.get_width(), 2);\n\n assert_eq!(buffer.get_height(), 2);\n\n assert_eq!(buffer.get_stride(), 2 * 3);\n\n }\n\n\n\n #[test]\n\n fn prefilled_buffer() {\n\n let data = vec![0, 255, 64, 0, 255, 64, 0, 255, 64, 0, 255, 64];\n\n let copy = data.clone();\n\n let buffer = YUV444iPixelBuffer::new_with_data(2, 2, data, None, Some(\"UVY\")).unwrap();\n\n assert_eq!(buffer.data.len(), 12);\n\n for x in 0..buffer.data.len() {\n\n assert_eq!(copy[x], buffer.data[x]);\n", "file_path": "src/yuv444i/tests.rs", "rank": 66, "score": 11.556107444768557 }, { "content": "//\n\n// Tests\n\n//\n\n\n\npub use crate::{ PixelBuffer, PixelBufferError };\n\npub use crate::yuv444i::YUV444iPixelBuffer;\n\npub use grapho_color::DigitalYCbCrColor;\n\n\n\n#[cfg(test)]\n\nmod iter {\n\n use super::*;\n\n\n\n #[test]\n\n fn iter_buffer() {\n\n let color = DigitalYCbCrColor{ y: 255, cb: 64, cr: 0 };\n\n let buffer = YUV444iPixelBuffer::new_with_background(2, 2, color, None, Some(\"YUV444\"));\n\n\n\n for pixel in buffer {\n\n assert_eq!(pixel.2, color);\n\n }\n", "file_path": "src/yuv444i/tests.rs", "rank": 67, "score": 11.351482265880769 }, { "content": " assert_eq!(buffer.data[1], 0);\n\n assert_eq!(buffer.width, 2);\n\n assert_eq!(buffer.height, 2);\n\n assert_eq!(buffer.stride, 2 * 2);\n\n assert_eq!(buffer.get_width(), 2);\n\n assert_eq!(buffer.get_height(), 2);\n\n assert_eq!(buffer.get_stride(), 2 * 2);\n\n }\n\n\n\n #[test]\n\n fn prefilled_buffer() {\n\n let data = vec![255, 64, 128, 64, 255, 64, 128, 64];\n\n let copy = data.clone();\n\n let buffer = YUV422iPixelBuffer::new_with_data(2, 2, data, None, Some(\"YVYU\")).unwrap();\n\n assert_eq!(buffer.data.len(), 8);\n\n for x in 0..buffer.data.len() {\n\n assert_eq!(copy[x], buffer.data[x]);\n\n }\n\n }\n\n\n", "file_path": "src/yuv422i/tests.rs", "rank": 68, "score": 10.377134613334865 }, { "content": " component_order: [Vec<YUVComponent>; 2],\n\n data: Vec<u8>\n\n}\n\n\n\nimpl<'a> YUV422iPixelBuffer<'a> {\n\n fn decode_component_order(fourcc:&'a str) -> [Vec<YUVComponent>; 2] {\n\n match fourcc {\n\n \"YVYU\" => [\n\n vec![YUVComponent::Y, YUVComponent::V, YUVComponent::Ignore, YUVComponent::U],\n\n vec![YUVComponent::Ignore, YUVComponent::V, YUVComponent::Y, YUVComponent::U]\n\n ],\n\n \"UYVY\" => [\n\n vec![YUVComponent::U, YUVComponent::Y, YUVComponent::V, YUVComponent::Ignore],\n\n vec![YUVComponent::U, YUVComponent::Ignore, YUVComponent::V, YUVComponent::Y]\n\n ],\n\n \"VYUY\" => [\n\n vec![YUVComponent::V, YUVComponent::Y, YUVComponent::U, YUVComponent::Ignore],\n\n vec![YUVComponent::V, YUVComponent::Ignore, YUVComponent::U, YUVComponent::Y]\n\n ],\n\n \"YUYV\" | \"YUV4:2:2\" | \"YUV422\" | \"YUY2\" | _ => [\n", "file_path": "src/yuv422i/mod.rs", "rank": 69, "score": 10.039553601531775 }, { "content": " assert_eq!(buffer.data.len(), 16);\n\n assert_eq!(buffer.data[0], 0);\n\n assert_eq!(buffer.data[1], 0);\n\n assert_eq!(buffer.data[2], 0);\n\n assert_eq!(buffer.width, 2);\n\n assert_eq!(buffer.height, 2);\n\n assert_eq!(buffer.stride, 2 * 4);\n\n assert_eq!(buffer.get_width(), 2);\n\n assert_eq!(buffer.get_height(), 2);\n\n assert_eq!(buffer.get_stride(), 2 * 4);\n\n }\n\n\n\n #[test]\n\n fn prefilled_buffer() {\n\n let data = vec![0, 255, 64, 255, 0, 255, 64, 255, 0, 255, 64, 255, 0, 255, 64, 255];\n\n let copy = data.clone();\n\n let buffer = RGBPixelBuffer::new_with_data(2, 2, data, None, Some(\"RGBA\")).unwrap();\n\n assert_eq!(buffer.data.len(), 16);\n\n for x in 0..buffer.data.len() {\n\n assert_eq!(copy[x], buffer.data[x]);\n", "file_path": "src/rgb/tests.rs", "rank": 70, "score": 9.910190946741622 }, { "content": " /// `PixelBufferError::RequestOutOfBounds` if the request was out of bounds\n\n fn set_pixel(&mut self, x: usize, y: usize, color: Self::ColorType) -> Result<(), PixelBufferError>;\n\n\n\n /// Get color of pixel at position\n\n /// \n\n /// # Arguments\n\n /// \n\n /// * `x` - x coordinate (from top left)\n\n /// * `y` - y coordinate (from top left)\n\n /// \n\n /// # Returns\n\n /// \n\n /// A `Result`, either a `ColorType` if everything went ok, or\n\n /// `PixelBufferError::RequestOutOfBounds` if the request was out of bounds\n\n fn get_pixel(&self, x: usize, y: usize) -> Result<Self::ColorType, PixelBufferError>;\n\n\n\n}\n\n\n\npub mod rgb;\n\npub mod grayscale;\n\npub mod yuv422i;\n\npub mod yuv444i;\n\npub mod yuv422p;\n\n\n\npub mod conversion;\n", "file_path": "src/lib.rs", "rank": 71, "score": 9.854750137588024 }, { "content": " #[test]\n\n fn empty_buffer() {\n\n let buffer = YUV422pPixelBuffer::new(2, 2, None, Some(\"YV21\"));\n\n assert_eq!(buffer.data.len(), 8);\n\n assert_eq!(buffer.data[0], 0);\n\n assert_eq!(buffer.data[1], 0);\n\n assert_eq!(buffer.width, 2);\n\n assert_eq!(buffer.height, 2);\n\n assert_eq!(buffer.stride, 2);\n\n assert_eq!(buffer.get_width(), 2);\n\n assert_eq!(buffer.get_height(), 2);\n\n assert_eq!(buffer.get_stride(), 2);\n\n }\n\n\n\n #[test]\n\n fn prefilled_buffer() {\n\n let data = vec![255, 64, 128, 64, 255, 64, 128, 64];\n\n let copy = data.clone();\n\n let buffer = YUV422pPixelBuffer::new_with_data(2, 2, data, None, Some(\"YV21\")).unwrap();\n\n assert_eq!(buffer.data.len(), 8);\n", "file_path": "src/yuv422p/tests.rs", "rank": 72, "score": 9.821547106846761 }, { "content": " assert_eq!(buffer.height, 2);\n\n assert_eq!(buffer.stride, 2 * 3);\n\n assert_eq!(buffer.get_width(), 2);\n\n assert_eq!(buffer.get_height(), 2);\n\n assert_eq!(buffer.get_stride(), 2 * 3);\n\n }\n\n\n\n #[test]\n\n fn prefilled_buffer() {\n\n let data = vec![0, 255, 64, 0, 255, 64, 0, 255, 64, 0, 255, 64];\n\n let copy = data.clone();\n\n let buffer = RGBPixelBuffer::new_with_data(2, 2, data, None, Some(\"BGR\")).unwrap();\n\n assert_eq!(buffer.data.len(), 12);\n\n for x in 0..buffer.data.len() {\n\n assert_eq!(copy[x], buffer.data[x]);\n\n }\n\n }\n\n\n\n #[test]\n\n fn bg_buffer() {\n", "file_path": "src/rgb/tests.rs", "rank": 73, "score": 9.796074712558152 }, { "content": " assert_eq!(buffer.height, 2);\n\n assert_eq!(buffer.stride, 2 * 2);\n\n assert_eq!(buffer.get_width(), 2);\n\n assert_eq!(buffer.get_height(), 2);\n\n assert_eq!(buffer.get_stride(), 2 * 2);\n\n }\n\n\n\n #[test]\n\n fn prefilled_buffer() {\n\n let data = vec![255, 64, 128, 64, 255, 64, 128, 64];\n\n let copy = data.clone();\n\n let buffer = YUV422iPixelBuffer::new_with_data(2, 2, data, None, Some(\"YUV422\")).unwrap();\n\n assert_eq!(buffer.data.len(), 8);\n\n for x in 0..buffer.data.len() {\n\n assert_eq!(copy[x], buffer.data[x]);\n\n }\n\n }\n\n\n\n #[test]\n\n fn bg_buffer() {\n", "file_path": "src/yuv422i/tests.rs", "rank": 74, "score": 9.796074712558152 }, { "content": "//\n\n// Tests\n\n//\n\n\n\npub use crate::{ PixelBuffer, PixelBufferError };\n\npub use crate::rgb::RGBPixelBuffer;\n\npub use grapho_color::{ DigitalRGBAColor, DigitalRGBColor };\n\n\n\n#[cfg(test)]\n\nmod iter {\n\n use super::*;\n\n\n\n #[test]\n\n fn iter_buffer() {\n\n let color = DigitalRGBAColor{ r: 255, g: 64, b: 0, a: 255 };\n\n let buffer = RGBPixelBuffer::new_with_background(2, 2, color, None, Some(\"RGB\"));\n\n\n\n for pixel in buffer {\n\n assert_eq!(pixel.2, color);\n\n }\n", "file_path": "src/rgb/tests.rs", "rank": 75, "score": 9.60082272736947 }, { "content": "### YUV 4:4:4 interleaved `YUV444iPixelBuffer`\n\n\n\n- `YUV` / `YUV444` YUV 4:4:4, 3 bytes per pixel, full resolution U and V planes.\n\n- `VUY` like `YUV` but with different component ordering\n\n- `UVY` like `YUV` but with different component ordering\n\n- `YVU` like `YUV` but with different component ordering\n\n\n\n### YUV 4:2:2 planar `YUV422pPixelBuffer`\n\n\n\n- `YV21` / `I420` 8 bit Y plane followed by 8 bit 2x2 subsampled U and V planes.\n\n- `YV12` 8 bit Y plane followed by 8 bit 2x2 subsampled V and U planes.\n\n\n\n## TODO\n\n\n\n### YUV 4:2:2 planar/interleaved `YUV422piPixelBuffer`\n\n\n\n- `NV12` 8-bit Y plane followed by an interleaved U/V plane with 2x2 subsampling\n\n- `NV21` As NV12 with U and V reversed in the interleaved plane\n\n\n\n### YCoCg\n\n\n\n- `YCoCg444i` interleaved with stride\n\n- `YCoCg444p` 3 planes no subsampling\n\n- `YCoCg422p` 3 planes, Co and Cg half size subsampled\n\n- `YCoCg422i` interleaved, Co and Cg half size subsampled\n", "file_path": "README.md", "rank": 76, "score": 9.577231552555109 }, { "content": "//! Create iterator for GrayscalePixelBuffer\n\n\n\nuse crate::{ Pixel, PixelBuffer };\n\nuse super::GrayscalePixelBuffer;\n\nuse grapho_color::DigitalGrayscaleColor;\n\n\n\n\n\n/// Pixel iterator for `GrayscalePixelBuffer`\n\n/// \n\n/// Items it will generate are of type `Pixel<DigitalGrayscaleColor>`\n\n#[derive(Debug, PartialEq)]\n\npub struct GrayscalePixelIterator<'a> {\n\n base: GrayscalePixelBuffer<'a>,\n\n x: usize,\n\n y: usize\n\n}\n\n\n\nimpl<'a> Iterator for GrayscalePixelIterator<'a> {\n\n type Item = Pixel<DigitalGrayscaleColor>;\n\n\n", "file_path": "src/grayscale/iter.rs", "rank": 77, "score": 9.557959087876329 }, { "content": "//! Create iterator for RGBPixelBuffer\n\n\n\nuse crate::{ Pixel, PixelBuffer };\n\nuse super::RGBPixelBuffer;\n\nuse grapho_color::DigitalRGBAColor;\n\n\n\n\n\n/// Pixel iterator for `RGBPixelBuffer`\n\n/// \n\n/// Items it will generate are of type `Pixel<DigitalRGBColor>`\n\n#[derive(Debug, PartialEq)]\n\npub struct RGBPixelIterator<'a> {\n\n base: RGBPixelBuffer<'a>,\n\n x: usize,\n\n y: usize\n\n}\n\n\n\nimpl<'a> Iterator for RGBPixelIterator<'a> {\n\n type Item = Pixel<DigitalRGBAColor>;\n\n\n", "file_path": "src/rgb/iter.rs", "rank": 78, "score": 9.557959087876329 }, { "content": "//\n\n// Tests\n\n//\n\npub use crate::{ PixelBuffer, PixelBufferError };\n\npub use crate::yuv422i::YUV422iPixelBuffer;\n\npub use grapho_color::DigitalYCbCrColor;\n\n\n\n#[cfg(test)]\n\nmod iter {\n\n use super::*;\n\n\n\n #[test]\n\n fn iter_buffer() {\n\n let color = DigitalYCbCrColor{ y: 255, cb: 64, cr: 0 };\n\n let buffer = YUV422iPixelBuffer::new_with_background(2, 2, color, None, Some(\"YUV422\"));\n\n\n\n for pixel in buffer {\n\n assert_eq!(pixel.2, color);\n\n }\n\n }\n", "file_path": "src/yuv422i/tests.rs", "rank": 79, "score": 9.450097987453626 }, { "content": "//\n\n// Tests\n\n//\n\npub use crate::{ PixelBuffer, PixelBufferError };\n\npub use crate::yuv422p::YUV422pPixelBuffer;\n\npub use grapho_color::DigitalYCbCrColor;\n\n\n\n#[cfg(test)]\n\nmod iter {\n\n use super::*;\n\n\n\n #[test]\n\n fn iter_buffer() {\n\n let color = DigitalYCbCrColor{ y: 255, cb: 64, cr: 0 };\n\n let buffer = YUV422pPixelBuffer::new_with_background(2, 2, color, None, Some(\"YV12\"));\n\n\n\n for pixel in buffer {\n\n assert_eq!(pixel.2, color);\n\n }\n\n }\n", "file_path": "src/yuv422p/tests.rs", "rank": 80, "score": 9.450097987453626 }, { "content": "//! This module contains bitplane and color conversions\n\nuse crate::PixelBuffer;\n\n\n\nmacro_rules! conversion {\n\n ($t:ty, $u:ty) => (\n\n\n\n impl From<$t> for $u {\n\n\n\n fn from(f: $t) -> Self {\n\n let mut buffer:$u = <$u>::new(f.get_width(), f.get_height(), None, None);\n\n for (x, y, color) in f {\n\n buffer.set_pixel(x, y, color.into()).unwrap();\n\n }\n\n\n\n buffer\n\n }\n\n }\n\n )\n\n}\n\n\n", "file_path": "src/conversion.rs", "rank": 82, "score": 9.390063943076223 }, { "content": "//! Create iterator for RGBPixelBuffer\n\n\n\nuse crate::{ Pixel, PixelBuffer };\n\nuse super::YUV444iPixelBuffer;\n\nuse grapho_color::DigitalYCbCrColor;\n\n\n\n\n\n/// Pixel iterator for `RGBPixelBuffer`\n\n/// \n\n/// Items it will generate are of type `Pixel<DigitalRGBColor>`\n\n#[derive(Debug, PartialEq)]\n\npub struct YUV444iPixelIterator<'a> {\n\n base: YUV444iPixelBuffer<'a>,\n\n x: usize,\n\n y: usize\n\n}\n\n\n\nimpl<'a> Iterator for YUV444iPixelIterator<'a> {\n\n type Item = Pixel<DigitalYCbCrColor>;\n\n\n", "file_path": "src/yuv444i/iter.rs", "rank": 83, "score": 9.295685337690331 }, { "content": "//! Create iterator for YUV422iPixelBuffer\n\n\n\nuse crate::{ Pixel, PixelBuffer };\n\nuse super::YUV422pPixelBuffer;\n\nuse grapho_color::DigitalYCbCrColor;\n\n\n\n\n\n/// Pixel iterator for `YUV422iPixelBuffer`\n\n/// \n\n/// Items it will generate are of type `Pixel<DigitalCrCbColor>`\n\n#[derive(Debug, PartialEq)]\n\npub struct YUV422pPixelIterator<'a> {\n\n base: YUV422pPixelBuffer<'a>,\n\n x: usize,\n\n y: usize\n\n}\n\n\n\nimpl<'a> Iterator for YUV422pPixelIterator<'a> {\n\n type Item = Pixel<DigitalYCbCrColor>;\n\n\n", "file_path": "src/yuv422p/iter.rs", "rank": 84, "score": 9.232466458980127 }, { "content": "//! Create iterator for YUV422iPixelBuffer\n\n\n\nuse crate::{ Pixel, PixelBuffer };\n\nuse super::YUV422iPixelBuffer;\n\nuse grapho_color::DigitalYCbCrColor;\n\n\n\n\n\n/// Pixel iterator for `YUV422iPixelBuffer`\n\n/// \n\n/// Items it will generate are of type `Pixel<DigitalCrCbColor>`\n\n#[derive(Debug, PartialEq)]\n\npub struct YUV422iPixelIterator<'a> {\n\n base: YUV422iPixelBuffer<'a>,\n\n x: usize,\n\n y: usize\n\n}\n\n\n\nimpl<'a> Iterator for YUV422iPixelIterator<'a> {\n\n type Item = Pixel<DigitalYCbCrColor>;\n\n\n", "file_path": "src/yuv422i/iter.rs", "rank": 85, "score": 9.232466458980127 }, { "content": " fn next(&mut self) -> Option<Self::Item> {\n\n if self.y >= self.base.height {\n\n return None;\n\n }\n\n\n\n let color = self.base.get_pixel(self.x, self.y).unwrap();\n\n self.x += 1;\n\n if self.x >= self.base.width {\n\n self.x = 0;\n\n self.y += 1;\n\n }\n\n\n\n Some((self.x, self.y, color))\n\n }\n\n\n\n fn count(self) -> usize {\n\n self.base.width * self.base.height\n\n }\n\n}\n\n\n", "file_path": "src/rgb/iter.rs", "rank": 86, "score": 9.175411121083622 }, { "content": " fn next(&mut self) -> Option<Self::Item> {\n\n if self.y >= self.base.height {\n\n return None;\n\n }\n\n\n\n let color = self.base.get_pixel(self.x, self.y).unwrap();\n\n self.x += 1;\n\n if self.x >= self.base.width {\n\n self.x = 0;\n\n self.y += 1;\n\n }\n\n\n\n Some((self.x, self.y, color))\n\n }\n\n\n\n fn count(self) -> usize {\n\n self.base.width * self.base.height\n\n }\n\n}\n\n\n", "file_path": "src/grayscale/iter.rs", "rank": 87, "score": 9.175411121083622 }, { "content": " fn next(&mut self) -> Option<Self::Item> {\n\n if self.y >= self.base.height {\n\n return None;\n\n }\n\n\n\n let color = self.base.get_pixel(self.x, self.y).unwrap();\n\n self.x += 1;\n\n if self.x >= self.base.width {\n\n self.x = 0;\n\n self.y += 1;\n\n }\n\n\n\n Some((self.x, self.y, color))\n\n }\n\n\n\n fn count(self) -> usize {\n\n self.base.width * self.base.height\n\n }\n\n}\n\n\n", "file_path": "src/yuv444i/iter.rs", "rank": 88, "score": 9.175411121083622 }, { "content": " fn next(&mut self) -> Option<Self::Item> {\n\n if self.y >= self.base.height {\n\n return None;\n\n }\n\n\n\n let color = self.base.get_pixel(self.x, self.y).unwrap();\n\n\n\n self.x += 1;\n\n if self.x >= self.base.width {\n\n self.x = 0;\n\n self.y += 1;\n\n }\n\n\n\n Some((self.x, self.y, color))\n\n }\n\n\n\n fn count(self) -> usize {\n\n self.base.width * self.base.height\n\n }\n\n}\n", "file_path": "src/yuv422p/iter.rs", "rank": 89, "score": 9.175411121083622 }, { "content": " fn next(&mut self) -> Option<Self::Item> {\n\n if self.y >= self.base.height {\n\n return None;\n\n }\n\n\n\n let color = self.base.get_pixel(self.x, self.y).unwrap();\n\n\n\n self.x += 1;\n\n if self.x >= self.base.width {\n\n self.x = 0;\n\n self.y += 1;\n\n }\n\n\n\n Some((self.x, self.y, color))\n\n }\n\n\n\n fn count(self) -> usize {\n\n self.base.width * self.base.height\n\n }\n\n}\n", "file_path": "src/yuv422i/iter.rs", "rank": 90, "score": 9.175411121083622 }, { "content": "# grapho-bitplane\n\n\n\nThis is the pixel buffer library of the `grapho`-set of crates.\n\n\n\nThere is also:\n\n\n\n- `grapho-color`: describes the color primitives used by the `grapho` crates and does color conversion\n\n- `grapho-2d`: describes all kinds of 2D vector graphics and math (like polygon clipping)\n\n- `grapho-rasterize-2d`: 2D rasterizer for the vectors described in `grapho-2d`\n\n- `grapho-filters`: pixel based effects and filters for pixel buffers\n\n- `grapho-cv`: computer vision library for grapho stack\n\n- `grapho-3d`: 3D vector math\n\n\n\n## What does it do\n\n\n\n`grapho-bitplane` describes the pixel buffer primitives that are used for creating and modifying images.\n\nIt will contain color conversion functionality and can work on arbitrary interleaved or non-interleaved\n\ngraphics data.\n\n\n\nConversion from all color-plane types into all others will be implemented.\n\nCurrently the following bitplane types are implemented:\n\n\n\n### RGB interleaved `RGBPixelBuffer`\n\n\n\n- `RGB`, `BGR` 24 bit without alpha\n\n- `RGBA`, `ARGB`, `BGRA`, `ABGR` 32 bit with alpha\n\n\n\n### Grayscale `GrayscalePixelBuffer`\n\n\n\n- `Y` Simple, single Y plane for monochrome images.\n\n- `Yxx` 3 bytes, ignore the last two (interpret a YUV444 image as grayscale)\n\n- `Yx` and `xY`, 2 bytes, ignore the x (interpret a YUV422 image as grayscale)\n\n\n\n### YUV 4:2:2 interleaved `YUV422iPixelBuffer`\n\n\n\n- `UYVY` YUV 4:2:2 (Y sample at every pixel, U and V sampled at every second pixel horizontally on each line). A macropixel contains 2 pixels in 1 `u32`.\n\n- `YUY2`/`YUV422` YUV 4:2:2 as for `UYVY` but with different component ordering within the `u32` macropixel.\n\n- `YVYU` YUV 4:2:2 as for `UYVY` but with different component ordering within the `u32` macropixel.\n\n- `VYUY` YUV 4:2:2 as for `UYVY` but with different component ordering within the `u32` macropixel.\n\n\n", "file_path": "README.md", "rank": 91, "score": 8.568162936874062 }, { "content": "pub enum PixelBufferError {\n\n /// Buffer too small for requested operation\n\n BufferTooSmall,\n\n /// Request out of buffer bounds\n\n RequestOutOfBounds\n\n}\n\n\n\n/// Pixel type used by iterators, contains\n\n/// x, y and color of pixel\n\npub type Pixel<T> = (usize, usize, T);\n\n\n\n#[doc(spotlight)]\n\n/// Pixel buffer trait, all Pixel buffers will implement this\n", "file_path": "src/lib.rs", "rank": 92, "score": 8.567348537659667 }, { "content": " YUVComponent::V => v = self.data[start + i],\n\n YUVComponent::Ignore => ()\n\n }\n\n }\n\n\n\n Ok(DigitalYCbCrColor {\n\n y, cb: u, cr: v\n\n })\n\n }\n\n}\n\n\n\npub mod iter;\n\nmod tests;\n", "file_path": "src/yuv422i/mod.rs", "rank": 93, "score": 8.491249842833504 }, { "content": " assert_eq!(buffer.get_width(), 2);\n\n assert_eq!(buffer.get_height(), 2);\n\n assert_eq!(buffer.get_stride(), 2);\n\n }\n\n\n\n #[test]\n\n fn prefilled_buffer() {\n\n let data = vec![0, 255, 64, 0];\n\n let copy = data.clone();\n\n let buffer = GrayscalePixelBuffer::new_with_data(2, 2, data, None, Some(\"Y\")).unwrap();\n\n assert_eq!(buffer.data.len(), 4);\n\n for x in 0..buffer.data.len() {\n\n assert_eq!(copy[x], buffer.data[x]);\n\n }\n\n }\n\n\n\n #[test]\n\n fn bg_buffer() {\n\n let buffer = GrayscalePixelBuffer::new_with_background(\n\n 2, 2,\n", "file_path": "src/grayscale/tests.rs", "rank": 94, "score": 8.346238638726824 }, { "content": " }\n\n\n\n match buffer.get_pixel(0, 2) {\n\n Err(error) => assert_eq!(error, PixelBufferError::RequestOutOfBounds),\n\n Ok(_color) => assert!(false)\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod yuv422 {\n\n use super::*;\n\n\n\n #[test]\n\n fn empty_buffer() {\n\n let buffer = YUV422iPixelBuffer::new(2, 2, None, Some(\"YUV422\"));\n\n assert_eq!(buffer.data.len(), 8);\n\n assert_eq!(buffer.data[0], 0);\n\n assert_eq!(buffer.data[1], 0);\n\n assert_eq!(buffer.width, 2);\n", "file_path": "src/yuv422i/tests.rs", "rank": 95, "score": 8.206791760781348 }, { "content": " match buffer.get_pixel(0, 2) {\n\n Err(error) => assert_eq!(error, PixelBufferError::RequestOutOfBounds),\n\n Ok(_color) => assert!(false)\n\n }\n\n }\n\n}\n\n\n\n\n\n#[cfg(test)]\n\nmod bgr {\n\n use super::*;\n\n\n\n #[test]\n\n fn empty_buffer() {\n\n let buffer = RGBPixelBuffer::new(2, 2, None, Some(\"BGR\"));\n\n assert_eq!(buffer.data.len(), 12);\n\n assert_eq!(buffer.data[0], 0);\n\n assert_eq!(buffer.data[1], 0);\n\n assert_eq!(buffer.data[2], 0);\n\n assert_eq!(buffer.width, 2);\n", "file_path": "src/rgb/tests.rs", "rank": 96, "score": 8.184971713583522 }, { "content": "//! - `UYVY` YUV 4:2:2 (Y sample at every pixel, U and V sampled at every second pixel horizontally on each line). A macropixel contains 2 pixels in 1 `u32`.\n\n//! - `YUY2`/`YUV422` YUV 4:2:2 as for `UYVY` but with different component ordering within the `u32` macropixel.\n\n//! - `YVYU` YUV 4:2:2 as for `UYVY` but with different component ordering within the `u32` macropixel.\n\n//! - `VYUY` YUV 4:2:2 as for `UYVY` but with different component ordering within the `u32` macropixel.\n\n//!\n\n//! ### YUV 4:2:2 planar `YUV422pPixelBuffer`\n\n//! \n\n//! - `YV21` / `I420` 8 bit Y plane followed by 8 bit 2x2 subsampled U and V planes.\n\n//! - `YV12` 8 bit Y plane followed by 8 bit 2x2 subsampled V and U planes.\n\n\n\n#![feature(repeat_generic_slice)]\n\n#![feature(doc_spotlight)]\n\n\n\nextern crate grapho_color;\n\n\n\npub use std::ops::{Sub, Mul, Add, Div, SubAssign, MulAssign, AddAssign, DivAssign};\n\nuse std::marker::{Sized};\n\n\n\n/// Error type for pixel buffer operations\n\n#[derive(Debug, PartialEq)]\n", "file_path": "src/lib.rs", "rank": 97, "score": 7.6313780340554676 }, { "content": "}\n\n\n\nimpl<'a> GrayscalePixelBuffer<'a> {\n\n fn decode_component_order(fourcc:&'a str) -> Vec<GrayscaleComponent> {\n\n match fourcc {\n\n \"Y\" => vec![GrayscaleComponent::Value],\n\n \"Yxx\" => vec![GrayscaleComponent::Value, GrayscaleComponent::Ignore, GrayscaleComponent::Ignore],\n\n \"Yx\" => vec![GrayscaleComponent::Value, GrayscaleComponent::Ignore],\n\n \"xY\" => vec![GrayscaleComponent::Ignore, GrayscaleComponent::Value],\n\n _ => vec![GrayscaleComponent::Value]\n\n } \n\n }\n\n}\n\n\n\nimpl<'a> PixelBuffer<'a> for GrayscalePixelBuffer<'a> {\n\n type ColorType = DigitalGrayscaleColor;\n\n\n\n /// Create a new pixel buffer with given dimensions\n\n /// \n\n /// # Arguments\n", "file_path": "src/grayscale/mod.rs", "rank": 98, "score": 7.531269923715561 }, { "content": "\n\n match buffer.get_pixel(1, 1) {\n\n Err(_error) => assert!(false),\n\n Ok(color) => assert_eq!(color, DigitalYCbCrColor{ y: 9, cb: 11, cr: 10 })\n\n }\n\n\n\n match buffer.get_pixel(0, 2) {\n\n Err(error) => assert_eq!(error, PixelBufferError::RequestOutOfBounds),\n\n Ok(_color) => assert!(false)\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod uvy {\n\n use super::*;\n\n\n\n #[test]\n\n fn empty_buffer() {\n\n let buffer = YUV444iPixelBuffer::new(2, 2, None, Some(\"UVY\"));\n", "file_path": "src/yuv444i/tests.rs", "rank": 99, "score": 7.417318234715179 } ]
Rust
src/main.rs
kurtbuilds/checkexec
22af898b48c2c10432762406c4ec4b714995f5f2
use std::borrow::Cow; use std::fmt::{Display}; use std::path::{Path}; use std::process::{exit, Command}; use clap::{App, AppSettings, Arg}; use std::fs; use shell_escape::escape; const VERSION: &str = env!("CARGO_PKG_VERSION"); struct Error { message: String, } impl std::fmt::Debug for Error { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.message) } } impl std::fmt::Display for Error { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.message) } } impl std::error::Error for Error {} macro_rules! err { ($($arg:tt)*) => { Error { message: format!($($arg)*), } } } fn infer_dependencies<'a>(command: &[&'a str]) -> Result<Vec<&'a str>, Error> { let inferred_deps = command.iter() .filter_map(|s| fs::metadata(s).ok().map(|_| *s)) .collect::<Vec<&str>>(); if inferred_deps.is_empty() { Err(err!("--infer must find at least one accessible file in command arguments. Command arguments are: {}", command.iter().map(|s| format!("\"{}\"", s)).collect::<Vec<String>>().join(" ") )) } else { Ok(inferred_deps) } } fn should_execute<T: AsRef<Path> + Display>(target: &str, dependencies: &[T]) -> Result<bool, Error> { match fs::metadata(target) { Ok(meta) => { let modified = meta.modified().unwrap(); for dependency in dependencies { let dep_meta = fs::metadata(&dependency) .map_err(|_| err!("{}: Could not read file metadata", &dependency))?; if dep_meta.modified().unwrap() > modified { return Ok(true); } } Ok(false) } Err(_) => Ok(true) } } fn main() -> std::result::Result<(), Error> { let args = App::new("checkexec") .version(VERSION) .about("Conditionally run a command (like `make`)") .setting(AppSettings::ArgRequiredElseHelp) .setting(AppSettings::TrailingVarArg) .arg(Arg::new("target") .help("The file created by this checkexec execution.") .required(true) ) .arg(Arg::new("verbose") .long("verbose") .short('v') .takes_value(false) ) .arg(Arg::new("infer") .long("infer") .takes_value(false) .conflicts_with("dependencies") .help("Infer the dependency list. The inference takes all arguments to the command, filters it for files, and uses that list. \ --infer causes checkexec to fail if it creates an empty dependency list.") ) .arg(Arg::new("dependencies").min_values(0) .help("The list of files") ) .arg(Arg::new("command").min_values(1) .last(true) .required(true) .help("The command to execute if the check passes.") ) .get_matches(); let verbose = args.is_present("verbose"); let target = args.value_of("target").unwrap(); let command_args = args.values_of("command").unwrap().into_iter().skip(1).collect::<Vec<&str>>(); let dependencies = if args.is_present("infer") { infer_dependencies(&command_args)? } else { args.values_of("dependencies").map(|d| d.collect::<Vec<&str>>()).unwrap_or_default() } .iter() .flat_map(|s| s.split('\n')) .collect::<Vec<&str>>(); if verbose { eprintln!("Found {} dependencies:\n{}", dependencies.len(), dependencies.iter().map(|d| escape(Cow::Borrowed(d))).collect::<Vec<_>>().join("\n")); } if should_execute(target, &dependencies)? { let command = args.values_of("command").unwrap().collect::<Vec<&str>>(); if verbose { eprintln!("{} {}", command[0], command.iter().skip(1).map(|s| format!("\"{}\"", s)).collect::<Vec<String>>().join(" ")); } let output = Command::new(command[0]) .args(command[1..].iter()) .status() .map_err(|_| err!("{}: command not found", command[0]))?; exit(output.code().unwrap()); } Ok(()) } #[cfg(test)] mod test { use std::io::Write; use super::*; use tempfile::{TempDir, tempdir}; struct TempFiles { #[allow(dead_code)] dir: TempDir, pub files: Vec<String>, } fn touch(path: &str) -> std::io::Result<()> { let mut file = fs::File::create(path).unwrap(); file.write_all(b"") } fn touch_and_untouch(touched: usize, untouched: usize) -> TempFiles { let tempdir = tempdir().unwrap(); let dir = tempdir.path(); let mut files: Vec<String> = Vec::new(); files.extend((0..touched).map(|i| dir.join(i.to_string()).to_str().unwrap().to_string())); files.extend((touched..(touched + untouched)).map(|i| dir.join(i.to_string()).to_str().unwrap().to_string())); for file in files.iter().take(touched) { touch(file).unwrap(); std::thread::sleep(std::time::Duration::from_millis(10)); } TempFiles { dir: tempdir, files, } } #[test] fn test_infer_dependencies() { let TempFiles { dir: _dir, files } = touch_and_untouch(3, 0); let dependencies = infer_dependencies(&["cc", &files[0], &files[1]]).unwrap(); assert_eq!(dependencies, vec![&files[0], &files[1]]); } #[test] fn test_no_inferred_dependencies_errors() { let TempFiles { dir: _dir, files } = touch_and_untouch(0, 1); assert!(infer_dependencies(&["cc", &files[0]]).is_err()) } #[test] fn test_should_execute_errors_on_failed_dependency_access() { let TempFiles { dir: _dir, files } = touch_and_untouch(1, 1); assert!(should_execute(&files[0], &files[1..]).is_err(), "Should have failed to access file"); } #[test] fn test_should_execute_target_doesnt_exist() { let TempFiles { dir: _dir, files } = touch_and_untouch(1, 1); assert!(should_execute(&files[1], &files[0..1]).unwrap(), "Should execute because target doesn't exist"); } #[test] fn test_should_not_execute_newer_target() { let TempFiles { dir: _dir, files } = touch_and_untouch(2, 0); assert!(!should_execute(&files[1], &files[0..1]).unwrap(), "Should not execute because target is newer"); } #[test] fn test_should_execute_newer_dependencies() { let TempFiles { dir: _dir, files } = touch_and_untouch(2, 0); assert!(should_execute(&files[0], &files[1..]).unwrap()) } }
use std::borrow::Cow; use std::fmt::{Display}; use std::path::{Path}; use std::process::{exit, Command}; use clap::{App, AppSettings, Arg}; use std::fs; use shell_escape::escape; const VERSION: &str = env!("CARGO_PKG_VERSION"); struct Error { message: String, } impl std::fmt::Debug for Error { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.message) } } impl std::fmt::Display for Error { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(f, "{}", self.message) } } impl std::error::Error for Error {} macro_rules! err { ($($arg:tt)*) => { Error { message: format!($($arg)*), } } } fn infer_dependencies<'a>(command: &[&'a str]) -> Result<Vec<&'a str>, Error> { let inferred_deps = command.iter() .filter_map(|s| fs::metadata(s).ok().map(|_| *s)) .collect::<Vec<&str>>(); if inferred_deps.is_empty() { Err(err!("--infer must find at least one accessible file in command arguments. Command arguments are: {}", command.iter().map(|s| format!("\"{}\"", s)).collect::<Vec<String>>().join(" ") )) } else { Ok(inferred_deps) } } fn should_execute<T: AsRef<Path> + Display>(target: &str, dependencies: &[T]) -> Result<bool, Error> { match fs::metadata(target) { Ok(meta) => { let modified = meta.modified().unwrap(); for dependency in dependencies { let dep_meta = fs::metadata(&dependency) .map_err(|_| err!("{}: Could not read file metadata", &dependency))?; if dep_meta.modified().unwrap() > modified { return Ok(true); } } Ok(false) } Err(_) => Ok(true) } } fn main() -> std::result::Result<(), Error> { let args = App::new("checkexec") .version(VERSION) .about("Conditionally run a command (like `make`)") .setting(AppSettings::ArgRequiredElseHelp) .setting(AppSettings::TrailingVarArg) .arg(Arg::new("target") .help("The file created by this checkexec execution.") .required(true) ) .arg(Arg::new("verbose") .long("verbose") .short('v') .takes_value(false) ) .arg(Arg::new("infer") .long("infer") .takes_value(false) .conflicts_with("dependencies") .help("Infer the dependency list. The inference takes all arguments to the command, filters it for files, and uses that list. \ --infer causes checkexec to fail if it creates an empty dependency list.") ) .arg(Arg::new("dependencies").min_values(0) .help("The list of files") ) .arg(Arg::new("command").min_values(1) .last(true) .required(true) .help("The command to execute if the check passes.") ) .
#[cfg(test)] mod test { use std::io::Write; use super::*; use tempfile::{TempDir, tempdir}; struct TempFiles { #[allow(dead_code)] dir: TempDir, pub files: Vec<String>, } fn touch(path: &str) -> std::io::Result<()> { let mut file = fs::File::create(path).unwrap(); file.write_all(b"") } fn touch_and_untouch(touched: usize, untouched: usize) -> TempFiles { let tempdir = tempdir().unwrap(); let dir = tempdir.path(); let mut files: Vec<String> = Vec::new(); files.extend((0..touched).map(|i| dir.join(i.to_string()).to_str().unwrap().to_string())); files.extend((touched..(touched + untouched)).map(|i| dir.join(i.to_string()).to_str().unwrap().to_string())); for file in files.iter().take(touched) { touch(file).unwrap(); std::thread::sleep(std::time::Duration::from_millis(10)); } TempFiles { dir: tempdir, files, } } #[test] fn test_infer_dependencies() { let TempFiles { dir: _dir, files } = touch_and_untouch(3, 0); let dependencies = infer_dependencies(&["cc", &files[0], &files[1]]).unwrap(); assert_eq!(dependencies, vec![&files[0], &files[1]]); } #[test] fn test_no_inferred_dependencies_errors() { let TempFiles { dir: _dir, files } = touch_and_untouch(0, 1); assert!(infer_dependencies(&["cc", &files[0]]).is_err()) } #[test] fn test_should_execute_errors_on_failed_dependency_access() { let TempFiles { dir: _dir, files } = touch_and_untouch(1, 1); assert!(should_execute(&files[0], &files[1..]).is_err(), "Should have failed to access file"); } #[test] fn test_should_execute_target_doesnt_exist() { let TempFiles { dir: _dir, files } = touch_and_untouch(1, 1); assert!(should_execute(&files[1], &files[0..1]).unwrap(), "Should execute because target doesn't exist"); } #[test] fn test_should_not_execute_newer_target() { let TempFiles { dir: _dir, files } = touch_and_untouch(2, 0); assert!(!should_execute(&files[1], &files[0..1]).unwrap(), "Should not execute because target is newer"); } #[test] fn test_should_execute_newer_dependencies() { let TempFiles { dir: _dir, files } = touch_and_untouch(2, 0); assert!(should_execute(&files[0], &files[1..]).unwrap()) } }
get_matches(); let verbose = args.is_present("verbose"); let target = args.value_of("target").unwrap(); let command_args = args.values_of("command").unwrap().into_iter().skip(1).collect::<Vec<&str>>(); let dependencies = if args.is_present("infer") { infer_dependencies(&command_args)? } else { args.values_of("dependencies").map(|d| d.collect::<Vec<&str>>()).unwrap_or_default() } .iter() .flat_map(|s| s.split('\n')) .collect::<Vec<&str>>(); if verbose { eprintln!("Found {} dependencies:\n{}", dependencies.len(), dependencies.iter().map(|d| escape(Cow::Borrowed(d))).collect::<Vec<_>>().join("\n")); } if should_execute(target, &dependencies)? { let command = args.values_of("command").unwrap().collect::<Vec<&str>>(); if verbose { eprintln!("{} {}", command[0], command.iter().skip(1).map(|s| format!("\"{}\"", s)).collect::<Vec<String>>().join(" ")); } let output = Command::new(command[0]) .args(command[1..].iter()) .status() .map_err(|_| err!("{}: command not found", command[0]))?; exit(output.code().unwrap()); } Ok(()) }
function_block-function_prefix_line
[ { "content": "<div id=\"top\"></div>\n\n\n\n<p align=\"center\">\n\n<a href=\"https://github.com/kurtbuilds/checkexec/graphs/contributors\">\n\n <img src=\"https://img.shields.io/github/contributors/kurtbuilds/checkexec.svg?style=flat-square\" alt=\"GitHub Contributors\" />\n\n</a>\n\n<a href=\"https://github.com/kurtbuilds/checkexec/stargazers\">\n\n <img src=\"https://img.shields.io/github/stars/kurtbuilds/checkexec.svg?style=flat-square\" alt=\"Stars\" />\n\n</a>\n\n<a href=\"https://github.com/kurtbuilds/checkexec/actions\">\n\n <img src=\"https://img.shields.io/github/workflow/status/kurtbuilds/checkexec/Run%20Tests?style=flat-square\" alt=\"Build Status\" />\n\n</a>\n\n<a href=\"https://crates.io/crates/checkexec\">\n\n <img src=\"https://img.shields.io/crates/d/checkexec?style=flat-square\" alt=\"Downloads\" />\n\n</a>\n\n<a href=\"https://crates.io/crates/checkexec\">\n\n <img src=\"https://img.shields.io/crates/v/checkexec?style=flat-square\" alt=\"Crates.io\" />\n\n</a>\n\n\n\n</p>\n\n\n\n# Checkexec\n\n\n\n`checkexec` is a tool to conditionally execute commands only when files in a dependency list have been updated.\n\n\n\nThis tool provides the behavior of `make` as a standalone executable, where a command is only run if any of its \n\ndependencies have been updated. Like `make`, `checkexec` runs a command only if the modified time of any dependency \n\nis newer than the modified time of the target. \n\n\n\n# Usage\n\n\n\nThe arguments are: `<target> <dependencies...> -- <command>`. The `--` is a required separator.\n\n\n\n checkexec build/my-bin src/my-program.c -- cc -o build/my-bin src/my-program.c\n\n\n\n`checkexec` executes the command directly, so shell constructs like '&&' and '||' are not supported.\n\nYou can use `/bin/bash -c` as the command, but escaping is tricky. You're likely better off using two invocations of\n\n`checkexec`.\n\n\n\nYou can infer the dependency list with `--infer`, where checkexec will inspect the arguments of `<command>` for \n\naccessible paths. `--infer` will fail if no files are found.\n\n\n\n checkexec build/my-bin --infer -- cc -o build/my-bin src/my-program.c\n\n\n\n# Installation\n\n\n\n cargo install checkexec\n\n\n", "file_path": "README.md", "rank": 11, "score": 9093.384664963998 }, { "content": "# Usage Notes\n\n\n\n`checkexec` is great for when you build files from other files. Instead of relying on\n\necosystem-specific tools, you can use `checkexec` as part of any build tool. Here are some examples:\n\n\n\n- You build, resize, or sample images as part of your build command, but don't want to rebuild them unless needed.\n\n- You build C libaries as part of your Python, Rust, Node (or any other) build process.\n\n- You build Sass/Less/SCSS files and don't want to re-build them unnecessarily.\n\n\n\n`checkexec` pairs well with these tools:\n\n\n\n- [`just`](https://github.com/casey/just) fixes numerous problems with `make`, and `checkexec` adds back the \n\n conditional rebuild functionality of `make`. Together, they create a modular and modern build process and \n\n command runner.\n\n- [`watchexec`](https://github.com/watchexec/watchexec) provides live relaading/re-building, while `checkexec` \n\n has callable behavior, useful as a build step or on CI. The naming similarity is intentional.\n\n- [`fd`](https://github.com/sharkdp/fd) makes it easy to specify a dependency file list. Example here:\n\n\n\n```bash\n\n# Only run your command if a rust file has changed. Note cargo does approximately the \n\n# same thing natively, but you can easily tailor this structure to a custom case.\n\ncheckexec target/debug/hello $(fd -e rs . src) -- cargo build\n\n```\n\n\n\n### Exit codes\n\n\n\n`checkexec` exit codes behave as you would expect, specifically:\n\n\n\n- 0 (success) if the command is not run (i.e. target is up to date)\n\n- 1 if a provided dependency or the command is not found\n\n- Otherwise, when the command is run, checkexec will pass through the command's exit code.\n\n\n\n# Contributing\n\n\n\nContributions are what make the open source community such an amazing place to learn, inspire, and create. \n\nAny contributions you make are **greatly appreciated**.\n\n\n\nIf you have a suggestion that would make this better, please fork the repo and create a pull request. \n\nYou can also simply open an issue with the tag \"enhancement\".\n\nDon't forget to give the project a star!\n\n\n\n<p align=\"right\">(<a href=\"#top\">back to top</a>)</p>\n", "file_path": "README.md", "rank": 12, "score": 9089.824415068668 }, { "content": "The MIT License (MIT)\n\n=====================\n\n\n\nCopyright © `<year>` `<copyright holders>`\n\n\n\nPermission is hereby granted, free of charge, to any person\n\nobtaining a copy of this software and associated documentation\n\nfiles (the “Software”), to deal in the Software without\n\nrestriction, including without limitation the rights to use,\n\ncopy, modify, merge, publish, distribute, sublicense, and/or sell\n\ncopies of the Software, and to permit persons to whom the\n\nSoftware is furnished to do so, subject to the following\n\nconditions:\n\n\n\nThe above copyright notice and this permission notice shall be\n\nincluded in all copies or substantial portions of the Software.\n\n\n\nTHE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND,\n\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES\n\nOF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND\n\nNONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT\n\nHOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,\n\nWHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\n\nFROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR\n", "file_path": "LICENSE.md", "rank": 13, "score": 9083.200739905638 } ]
Rust
shapes/src/plymesh.rs
hackmad/pbr_rust
b7ae75564bf71c4dfea8b20f49d05ac1b89e6734
#![allow(dead_code)] use super::TriangleMesh; use core::geometry::*; use core::paramset::*; use core::texture::FloatTextureMap; use ply_rs::parser::Parser; use ply_rs::ply::*; use std::fs::File; use std::io::BufReader; use std::sync::Arc; use textures::ConstantTexture; pub struct PLYMesh; impl PLYMesh { pub fn from_props( p: ( &ParamSet, ArcTransform, ArcTransform, bool, &FloatTextureMap, ), ) -> Vec<ArcShape> { let (params, o2w, w2o, reverse_orientation, float_textures) = p; let path = params.find_one_filename("filename", String::from("")); assert!(path.len() > 0, "PLY filename not provied"); let alpha_tex_name = params.find_one_texture("alpha", String::from("")); let alpha_tex = if alpha_tex_name.len() > 0 { if let Some(tex) = float_textures.get(&alpha_tex_name) { Arc::clone(&tex) } else { warn!( "Couldn't find float texture '{}' for 'alpha' parameter", alpha_tex_name ); let alpha = params.find_one_float("alpha", 1.0); Arc::new(ConstantTexture::new(alpha)) } } else { let alpha = params.find_one_float("alpha", 1.0); Arc::new(ConstantTexture::new(alpha)) }; let shadow_alpha_tex_name = params.find_one_texture("shadowalpha", String::from("")); let shadow_alpha_tex = if shadow_alpha_tex_name.len() > 0 { if let Some(tex) = float_textures.get(&shadow_alpha_tex_name) { Arc::clone(tex) } else { warn!( "Couldn't find float texture '{}' for 'shadowalpha' parameter. Using float 'shadowalpha' parameterer instead.", alpha_tex_name ); let alpha = params.find_one_float("shadowalpha", 1.0); Arc::new(ConstantTexture::new(alpha)) } } else { let alpha = params.find_one_float("shadowalpha", 1.0); Arc::new(ConstantTexture::new(alpha)) }; let file = File::open(&path).expect(format!("Unable to open PLY file '{}'", path).as_ref()); let mut reader = BufReader::new(file); let parser = Parser::<DefaultElement>::new(); let ply = match parser.read_ply(&mut reader) { Ok(p) => p, Err(e) => panic!("Unable to parse PLY file '{}'. {}.", path, e), }; let mut points: Vec<Point3f> = vec![]; let mut normals: Vec<Normal3f> = vec![]; let mut uvs: Vec<Point2f> = vec![]; let mut has_normals = true; let mut has_uvs = true; let mut vertex_indices: Vec<usize> = vec![]; let mut face_count = 0; for (name, list) in ply.payload.iter() { match name.as_ref() { "vertex" => { for elem in list.iter() { let vertex = Self::parse_vertex(elem); points.push(vertex.point); has_normals = has_normals && vertex.has_normal; if has_normals { normals.push(vertex.normal); } has_uvs = has_uvs && vertex.has_uv; if has_uvs { uvs.push(vertex.uv); } } } "face" => { for elem in list.iter() { Self::parse_face(elem, &mut vertex_indices); face_count += 1; } } s => warn!("Ignoring unexpected element '{}' in '{}'", s, path), } } if points.len() == 0 || face_count == 0 { error!( "PLY file '{}' is invalid! No face/vertex elements found!", path ); return vec![]; } if !has_normals { normals = vec![]; } if !has_uvs { uvs = vec![]; } TriangleMesh::create( Arc::clone(&o2w), Arc::clone(&w2o), reverse_orientation, vertex_indices.len() / 3, vertex_indices, points, normals, vec![], uvs, Some(alpha_tex), Some(shadow_alpha_tex), vec![], ) } fn parse_vertex(elem: &KeyMap<Property>) -> Vertex { let mut p = Point3f::default(); let mut n = Normal3f::default(); let mut uv = Point2f::default(); let mut nc = 0; let mut uvc = 0; for (name, value) in elem.iter() { if let Property::Float(v) = value { match name.as_ref() { "x" => { p.x = *v; } "y" => { p.y = *v; } "z" => { p.z = *v; } "nx" => { n.x = *v; nc += 1; } "ny" => { n.y = *v; nc += 1; } "nz" => { n.z = *v; nc += 1; } "u" | "s" | "texture_u" | "texture_s" => { uv.x = *v; uvc += 1; } "v" | "t" | "texture_v" | "texture_t" => { uv.y = *v; uvc += 1; } s => debug!("Ignoring unexpected vertex element '{}'", s), } } else { debug!("Ignoring unexpected vertex property type"); } } Vertex::new(p, n, uv, nc == 3, uvc == 2) } fn parse_face(elem: &KeyMap<Property>, vertex_indices: &mut Vec<usize>) { for (name, value) in elem.iter() { match name.as_ref() { "vertex_indices" => { if let Property::ListInt(vi) = value { if vi.len() != 3 && vi.len() != 4 { panic!("Only triangles and quads are supported!"); } if vi.len() >= 3 { vertex_indices.push(vi[0] as usize); vertex_indices.push(vi[1] as usize); vertex_indices.push(vi[2] as usize); } if vi.len() == 4 { vertex_indices.push(vi[3] as usize); vertex_indices.push(vi[0] as usize); vertex_indices.push(vi[2] as usize); } } else if let Property::ListUInt(vi) = value { if vi.len() != 3 && vi.len() != 4 { panic!("Only triangles and quads are supported!"); } if vi.len() >= 3 { vertex_indices.push(vi[0] as usize); vertex_indices.push(vi[1] as usize); vertex_indices.push(vi[2] as usize); } if vi.len() == 4 { vertex_indices.push(vi[3] as usize); vertex_indices.push(vi[0] as usize); vertex_indices.push(vi[2] as usize); } } else { debug!("Ignoring unexpected face property type"); } } s => debug!("Ignoring unexpected face element '{}'", s), } } } } struct Vertex { point: Point3f, normal: Normal3f, uv: Point2f, has_normal: bool, has_uv: bool, } impl Vertex { fn new(point: Point3f, normal: Normal3f, uv: Point2f, has_normal: bool, has_uv: bool) -> Self { Self { point, normal, uv, has_normal, has_uv, } } }
#![allow(dead_code)] use super::TriangleMesh; use core::geometry::*; use core::paramset::*; use core::texture::FloatTextureMap; use ply_rs::parser::Parser; use ply_rs::ply::*; use std::fs::File; use std::io::BufReader; use std::sync::Arc; use textures::ConstantTexture; pub struct PLYMesh; impl PLYMesh { pub fn from_props( p: ( &ParamSet, ArcTransform, ArcTransform, bool, &FloatTextureMap, ), ) -> Vec<ArcShape> { let (params, o2w, w2o, reverse_orientation, float_textures) = p; let path = params.find_one_filename("filename", String::from("")); assert!(path.len() > 0, "PLY filename not provied"); let alpha_tex_name = params.find_one_texture("alpha", String::from("")); let alpha_tex = if alpha_tex_name.len() > 0 { if let Some(tex) = float_textures.get(&alpha_tex_name) { Arc::clone(&tex) } else { warn!( "Couldn't find float texture '{}' for 'alpha' parameter", alpha_tex_name ); let alpha = params.find_one_float("alpha", 1.0); Arc::new(ConstantTexture::new(alpha)) } } else { let alpha = params.find_one_float("alpha", 1.0); Arc::new(ConstantTexture::new(alpha)) }; let shadow_alpha_tex_name = params.find_one_texture("shadowalpha", String::from("")); let shadow_alpha_tex = if shadow_alpha_tex_name.len() > 0 { if let Some(tex) = float_textures.get(&shadow_alpha_tex_name) { Arc::clone(tex) } else { warn!( "Couldn't find float texture '{}' for 'shadowalpha' parameter. Using float 'shadowalpha' parameterer instead.", alpha_tex_name ); let alpha = params.find_one_float("shadowalpha", 1.0); Arc::new(ConstantTexture::new(alpha)) } } else { let alpha = params.find_one_float("shadowalpha", 1.0); Arc::new(ConstantTexture::new(alpha)) }; let file = File::open(&path).expect(format!("Unable to open PLY file '{}'", path).as_ref()); let mut reader = BufReader::new(file); let parser = Parser::<DefaultElement>::new(); let ply = match parser.read_ply(&mut reader) { Ok(p) => p, Err(e) => panic!("Unable to parse PLY file '{}'. {}.", path, e), }; let mut points: Vec<Point3f> = vec![]; let mut normals: Vec<Normal3f> = vec![]; let mut uvs: Vec<Point2f> = vec![]; let mut has_normals = true; let mut has_uvs = true; let mut vertex_indices: Vec<usize> = vec![]; let mut face_count = 0; for (name, list) in ply.payload.iter() { match name.as_ref() { "vertex" => { for elem in list.iter() { let vertex = Self::parse_vertex(elem); points.push(vertex.point); has_normals = has_normals && vertex.has_normal; if has_normals { normals.push(vertex.normal); } has_uvs = has_uvs && vertex.has_uv; if has_uvs { uvs.push(vertex.uv); } } } "face" => { for elem in list.iter() { Self::parse_face(elem, &mut vertex_indices); face_count += 1; } } s => warn!("Ignoring unexpected element '{}' in '{}'", s, path), } } if points.len() == 0 || face_count == 0 { error!( "PLY file '{}' is invalid! No face/vertex elements found!", path ); return vec![]; } if !has_normals { normals = vec![]; } if !has_uvs { uvs = vec![]; } TriangleMesh::create( Arc::clone(&o2w), Arc::clone(&w2o), reverse_orientation, vertex_indices.len() / 3, vertex_indices, points, normals, vec![], uvs, Some(alpha_tex), Some(shadow_alpha_tex), vec![], ) } fn parse_vertex(elem: &KeyMap<Property>) -> Vertex { let mut p = Point3f::default(); let mut n = Normal3f::default(); let mut uv = Point2f::default(); let mut nc = 0; let mut uvc = 0; for (name, value) in elem.iter() { if let Property::Float(v) = value { match name.as_ref() { "x" => { p.x = *v; } "y" => { p.y = *v; } "z" => { p.z = *v; } "nx" => { n.x = *v; nc += 1; } "ny" => { n.y = *v; nc += 1; } "nz" => { n.z = *v; nc += 1; } "u" | "s" | "texture_u" | "texture_s" => { uv.x = *v; uvc += 1; } "v" | "t" | "texture_v" | "texture_t" => { uv.y = *v; uvc += 1; } s => debug!("Ignoring unexpected vertex element '{}'", s), } } else { debug!("Ignoring unexpected vertex property type"); } } Vertex::new(p, n, uv, nc == 3, uvc == 2) } fn parse_face(elem: &KeyMap<Property>, vertex_indices: &mut Vec<usize>) { for (name, value) in elem.iter() { match name.as_ref() { "vertex_indices" => { if let Property::ListInt(vi) = value { if vi.len() != 3 && vi.len() != 4 { panic!("Only triangles and quads are supported!"); } if vi.len() >= 3 { vertex_indices.push(vi[0] as usize); vertex_indices.push(vi[1] as usize); vertex_indices.push(vi[2] as usize); } if vi.len() == 4 { vertex_indices.push(vi[3] as usize); vertex_indices.push(vi[0] as usize); vertex_indices.push(vi[2] as usize); } }
} struct Vertex { point: Point3f, normal: Normal3f, uv: Point2f, has_normal: bool, has_uv: bool, } impl Vertex { fn new(point: Point3f, normal: Normal3f, uv: Point2f, has_normal: bool, has_uv: bool) -> Self { Self { point, normal, uv, has_normal, has_uv, } } }
else if let Property::ListUInt(vi) = value { if vi.len() != 3 && vi.len() != 4 { panic!("Only triangles and quads are supported!"); } if vi.len() >= 3 { vertex_indices.push(vi[0] as usize); vertex_indices.push(vi[1] as usize); vertex_indices.push(vi[2] as usize); } if vi.len() == 4 { vertex_indices.push(vi[3] as usize); vertex_indices.push(vi[0] as usize); vertex_indices.push(vi[2] as usize); } } else { debug!("Ignoring unexpected face property type"); } } s => debug!("Ignoring unexpected face element '{}'", s), } } }
function_block-function_prefix_line
[]
Rust
examples/custom_router/src/router/mod.rs
arn-the-long-beard/old_seed_archive
9aed8e64ab6ee5a2a6e9fd650eefb752fcb9144c
mod model; mod path; mod url; mod view; use seed::Url; use std::fmt::Debug; pub use {model::*, path::*, path::*, url::*, url::*, view::*}; use seed::{*, *}; struct_urls!(); impl<'a> Urls<'a> { pub fn build_url(self, segments: Vec<&str>) -> Url { self.base_url().set_path(segments) } } pub enum Move { IsNavigating, IsMovingBack, IsMovingForward, IsReady, } pub struct Router<Routes: Debug + PartialEq + ParsePath + Clone + Default + Navigation> { pub current_route: Option<Routes>, pub current_history_index: usize, pub default_route: Routes, base_url: Url, pub current_move: Move, history: Vec<Routes>, } impl<Routes: Debug + PartialEq + Default + ParsePath + Clone + Navigation> Default for Router<Routes> { fn default() -> Self { Router { current_history_index: 0, default_route: Routes::default(), history: Vec::new(), current_route: None, base_url: Url::new(), current_move: Move::IsReady, } } } impl<Routes: Debug + PartialEq + ParsePath + Default + Clone + Navigation> Router<Routes> { pub fn new() -> Router<Routes> { Router::default() } pub fn set_base_url(&mut self, url: Url) -> &mut Self { self.base_url = url; self } pub fn init_url_and_navigation(&mut self, url: Url) -> &mut Self { self.set_base_url(url.to_base_url()); self.navigate_to_url(url); self } fn push_to_history(&mut self, route: Routes) { self.history.push(route); self.current_history_index = self.history.len() - 1; } fn back(&mut self) -> bool { if let Some(next_route) = self.can_back_with_route() { self.current_route = Routes::parse_path(next_route.as_path().as_str()).ok(); self.current_history_index -= 1; true } else { false } } pub fn can_back_with_route(&self) -> Option<Routes> { if self.history.is_empty() { return None; } if self.current_history_index == 0 { return None; } let next_index = &self.current_history_index - 1; let route = self.history.get(next_index).unwrap(); Some(route.clone()) } pub fn can_back(&self) -> bool { self.can_back_with_route().is_some() } pub fn can_forward(&self) -> bool { self.can_forward_with_route().is_some() } pub fn can_forward_with_route(&self) -> Option<Routes> { if self.history.is_empty() { return None; } if self.current_history_index == self.history.len() - 1 { return None; } let next_index = &self.current_history_index + 1; let route = self.history.get(next_index).unwrap_or_else(|| { panic!( "We should have get route but index is failed {}", next_index ) }); Some(route.clone()) } fn forward(&mut self) -> bool { if let Some(next_route) = &self.can_forward_with_route() { let path: String = next_route.clone().as_path().to_string(); self.current_route = Routes::parse_path(&path).ok(); self.current_history_index += 1; true } else { false } } pub fn is_current_route(&self, route: &Routes) -> bool { if let Some(current_route) = &self.current_route { route.eq(&current_route) } else { false } } fn reload_without_cache() {} pub fn navigate_to_new(&mut self, route: &Routes) { self.current_route = Some(route.clone()); self.push_to_history(route.clone()); } fn navigate_to_url(&mut self, url: Url) { let path = &mut url.to_string(); path.remove(0); if let Ok(route_match) = Routes::parse_path(path) { self.navigate_to_new(&route_match); } else { self.navigate_to_new(&self.default_route.clone()); } } pub fn request_moving_back<F: FnOnce(Url) -> R, R>(&mut self, func: F) { self.current_move = Move::IsMovingBack; if let Some(next_route) = &self.can_back_with_route() { func(next_route.to_url()); } } pub fn request_moving_forward<F: FnOnce(Url) -> R, R>(&mut self, func: F) { self.current_move = Move::IsMovingForward; if let Some(next_route) = &self.can_forward_with_route() { func(next_route.to_url()); } } pub fn base_url(&self) -> &Url { &self.base_url } pub fn confirm_navigation(&mut self, url: Url) { match self.current_move { Move::IsNavigating => { self.navigate_to_url(url); } Move::IsMovingBack => { self.back(); } Move::IsMovingForward => { self.forward(); } Move::IsReady => { self.navigate_to_url(url); } } self.current_move = Move::IsReady; } } #[cfg(test)] mod test { use seed::{prelude::IndexMap, Url}; extern crate router_macro_derive; use super::*; use crate::router; use router::*; use router_macro_derive::*; use wasm_bindgen_test::*; wasm_bindgen_test_configure!(run_in_browser); #[derive(Debug, PartialEq, Copy, Clone, AsUrl)] pub enum DashboardAdminRoutes { Other, #[as_path = ""] Root, } #[derive(Debug, PartialEq, Clone, AsUrl)] pub enum DashboardRoutes { Admin(DashboardAdminRoutes), Profile(u32), #[as_path = ""] Root, } #[derive(Debug, PartialEq, Clone, AsUrl, Root)] enum ExampleRoutes { Login, Register, Stuff, Dashboard(DashboardRoutes), #[default_route] NotFound, #[as_path = ""] Home, } #[wasm_bindgen_test] fn test_router_default_route() { let mut router = Router::<ExampleRoutes>::new(); let url = Url::new().add_path_part("example"); router.navigate_to_url(url); assert_eq!(router.current_route.unwrap(), router.default_route); } #[wasm_bindgen_test] fn test_build_url() { let mut router: Router<ExampleRoutes> = Router::new(); let url = router.base_url().clone().add_path_part(""); router.navigate_to_url(url); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::parse_path("").unwrap() ); let admin_url = router .base_url() .clone() .set_path("dashboard/admin/other".split("/")); router.navigate_to_url(admin_url); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::parse_path("/dashboard/admin/other").unwrap() ); let admin_url = router .base_url() .clone() .set_path("dashboard/profile/1".split("/")); router.navigate_to_url(admin_url); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::parse_path("/dashboard/profile/1").unwrap() ); } #[wasm_bindgen_test] fn test_navigation_to_route() { let mut router: Router<ExampleRoutes> = Router::new(); router.navigate_to_new(&ExampleRoutes::parse_path("/dashboard/profile/1").unwrap()); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::Dashboard(DashboardRoutes::Profile(1)) ); assert_eq!(router.current_history_index, 0); router.navigate_to_new(&ExampleRoutes::parse_path("/dashboard/profile/55").unwrap()); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::Dashboard(DashboardRoutes::Profile(55)) ); assert_eq!(router.current_history_index, 1); router.navigate_to_new(&ExampleRoutes::Home); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::parse_path("").unwrap() ); assert_eq!(router.current_history_index, 2); } #[wasm_bindgen_test] fn test_backward() { let mut router: Router<ExampleRoutes> = Router::new(); let back = router.back(); assert_eq!(back, false, "We should Not have gone backwards"); assert_eq!( router.current_history_index, 0, "We should have current index 0" ); assert_eq!( router.current_route.is_none(), true, "We should not have current rou" ); router.navigate_to_new(&ExampleRoutes::parse_path("").unwrap()); router.navigate_to_new(&ExampleRoutes::parse_path("register").unwrap()); router.navigate_to_new(&ExampleRoutes::parse_path("dashboard/admin/other").unwrap()); assert_eq!(router.current_history_index, 2); let back = router.back(); assert_eq!(back, true, "We should have gone backwards"); assert_eq!(router.current_history_index, 1); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::Register ); assert_eq!(router.is_current_route(&ExampleRoutes::Register), true); let back = router.back(); assert_eq!(back, true, "We should have gone backwards"); assert_eq!(router.current_history_index, 0); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::parse_path("").unwrap() ); router.navigate_to_new(&ExampleRoutes::Dashboard(DashboardRoutes::Root)); assert_eq!( router.is_current_route(&ExampleRoutes::parse_path("dashboard/").unwrap()), true ); let back = router.back(); assert_eq!(back, true); assert_eq!(router.current_history_index, 2); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::parse_path("dashboard/admin/other").unwrap() ); } #[wasm_bindgen_test] fn test_forward() { let mut router: Router<ExampleRoutes> = Router::new(); let back = router.forward(); assert_eq!(back, false, "We should Not have gone backwards"); assert_eq!( router.current_history_index, 0, "We should have current index 0" ); assert_eq!( router.current_route.is_none(), true, "We should not have current rou" ); router.navigate_to_new(&ExampleRoutes::parse_path("").unwrap()); router.navigate_to_new(&ExampleRoutes::parse_path("register").unwrap()); router.navigate_to_new(&ExampleRoutes::parse_path("/dashboard/profile/55").unwrap()); assert_eq!(router.current_history_index, 2); let back = router.back(); let back = router.back(); let forward = router.forward(); assert_eq!(forward, true, "We should have gone forward"); assert_eq!(router.current_history_index, 1); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::Register ); let forward = router.forward(); assert_eq!(forward, true, "We should have gone forward"); assert_eq!(router.current_history_index, 2); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::Dashboard(DashboardRoutes::Profile(55)) ); let forward = router.forward(); assert_eq!(forward, false, "We should Not have gone forward"); } }
mod model; mod path; mod url; mod view; use seed::Url; use std::fmt::Debug; pub use {model::*, path::*, path::*, url::*, url::*, view::*}; use seed::{*, *}; struct_urls!(); impl<'a> Urls<'a> { pub fn build_url(self, segments: Vec<&str>) -> Url { self.base_url().set_path(segments) } } pub enum Move { IsNavigating, IsMovingBack, IsMovingForward, IsReady, } pub struct Router<Routes: Debug + PartialEq + ParsePath + Clone + Default + Navigation> { pub current_route: Option<Routes>, pub current_history_index: usize, pub default_route: Routes, base_url: Url, pub current_move: Move, history: Vec<Routes>, } impl<Routes: Debug + PartialEq + Default + ParsePath + Clone + Navigation> Default for Router<Routes> { fn default() -> Self { Router { current_history_index: 0, default_route: Routes::default(), history: Vec::new(), current_route: None, base_url: Url::new(), current_move: Move::IsReady, } } } impl<Routes: Debug + PartialEq + ParsePath + Default + Clone + Navigation> Router<Routes> { pub fn new() -> Router<Routes> { Router::default() } pub fn set_base_url(&mut self, url: Url) -> &mut Self { self.base_url = url; self } pub fn init_url_and_navigation(&mut self, url: Url) -> &mut Self { self.set_base_url(url.to_base_url()); self.navigate_to_url(url); self } fn push_to_history(&mut self, route: Routes) { self.history.push(route); self.current_history_index = self.history.len() - 1; } fn back(&mut self) -> bool { if let Some(next_route) = self.can_back_with_route() { self.current_route = Routes::parse_path(next_route.as_path().as_str()).ok(); self.current_history_index -= 1; true } else { false } } pub fn can_back_with_route(&self) -> Option<Routes> { if self.history.is_empty() { return None; } if self.current_history_index == 0 { return None; } let next_index = &self.current_history_index - 1; let route = self.history.get(next_index).unwrap(); Some(route.clone()) } pub fn can_back(&self) -> bool { self.can_back_with_route().is_some() } pub fn can_forward(&self) -> bool { self.can_forward_with_route().is_some() } pub fn can_forward_with_route(&self) -> Option<Routes> { if self.history.is_empty() { return None; } if self.current_history_index == self.history.len() - 1 { return None; } let next_index = &self.current_history_index + 1; let route = self.history.get(next_index).unwrap_or_else(|| { panic!( "We should have get route but index is failed {}", next_index ) }); Some(route.clone()) } fn forward(&mut self) -> bool { if let Some(next_route) = &self.can_forward_with_route() { let path: String = next_route.clone().as_path().to_string(); self.current_route = Routes::parse_path(&path).ok(); self.current_history_index += 1; true } else { false } }
fn reload_without_cache() {} pub fn navigate_to_new(&mut self, route: &Routes) { self.current_route = Some(route.clone()); self.push_to_history(route.clone()); } fn navigate_to_url(&mut self, url: Url) { let path = &mut url.to_string(); path.remove(0); if let Ok(route_match) = Routes::parse_path(path) { self.navigate_to_new(&route_match); } else { self.navigate_to_new(&self.default_route.clone()); } } pub fn request_moving_back<F: FnOnce(Url) -> R, R>(&mut self, func: F) { self.current_move = Move::IsMovingBack; if let Some(next_route) = &self.can_back_with_route() { func(next_route.to_url()); } } pub fn request_moving_forward<F: FnOnce(Url) -> R, R>(&mut self, func: F) { self.current_move = Move::IsMovingForward; if let Some(next_route) = &self.can_forward_with_route() { func(next_route.to_url()); } } pub fn base_url(&self) -> &Url { &self.base_url } pub fn confirm_navigation(&mut self, url: Url) { match self.current_move { Move::IsNavigating => { self.navigate_to_url(url); } Move::IsMovingBack => { self.back(); } Move::IsMovingForward => { self.forward(); } Move::IsReady => { self.navigate_to_url(url); } } self.current_move = Move::IsReady; } } #[cfg(test)] mod test { use seed::{prelude::IndexMap, Url}; extern crate router_macro_derive; use super::*; use crate::router; use router::*; use router_macro_derive::*; use wasm_bindgen_test::*; wasm_bindgen_test_configure!(run_in_browser); #[derive(Debug, PartialEq, Copy, Clone, AsUrl)] pub enum DashboardAdminRoutes { Other, #[as_path = ""] Root, } #[derive(Debug, PartialEq, Clone, AsUrl)] pub enum DashboardRoutes { Admin(DashboardAdminRoutes), Profile(u32), #[as_path = ""] Root, } #[derive(Debug, PartialEq, Clone, AsUrl, Root)] enum ExampleRoutes { Login, Register, Stuff, Dashboard(DashboardRoutes), #[default_route] NotFound, #[as_path = ""] Home, } #[wasm_bindgen_test] fn test_router_default_route() { let mut router = Router::<ExampleRoutes>::new(); let url = Url::new().add_path_part("example"); router.navigate_to_url(url); assert_eq!(router.current_route.unwrap(), router.default_route); } #[wasm_bindgen_test] fn test_build_url() { let mut router: Router<ExampleRoutes> = Router::new(); let url = router.base_url().clone().add_path_part(""); router.navigate_to_url(url); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::parse_path("").unwrap() ); let admin_url = router .base_url() .clone() .set_path("dashboard/admin/other".split("/")); router.navigate_to_url(admin_url); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::parse_path("/dashboard/admin/other").unwrap() ); let admin_url = router .base_url() .clone() .set_path("dashboard/profile/1".split("/")); router.navigate_to_url(admin_url); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::parse_path("/dashboard/profile/1").unwrap() ); } #[wasm_bindgen_test] fn test_navigation_to_route() { let mut router: Router<ExampleRoutes> = Router::new(); router.navigate_to_new(&ExampleRoutes::parse_path("/dashboard/profile/1").unwrap()); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::Dashboard(DashboardRoutes::Profile(1)) ); assert_eq!(router.current_history_index, 0); router.navigate_to_new(&ExampleRoutes::parse_path("/dashboard/profile/55").unwrap()); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::Dashboard(DashboardRoutes::Profile(55)) ); assert_eq!(router.current_history_index, 1); router.navigate_to_new(&ExampleRoutes::Home); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::parse_path("").unwrap() ); assert_eq!(router.current_history_index, 2); } #[wasm_bindgen_test] fn test_backward() { let mut router: Router<ExampleRoutes> = Router::new(); let back = router.back(); assert_eq!(back, false, "We should Not have gone backwards"); assert_eq!( router.current_history_index, 0, "We should have current index 0" ); assert_eq!( router.current_route.is_none(), true, "We should not have current rou" ); router.navigate_to_new(&ExampleRoutes::parse_path("").unwrap()); router.navigate_to_new(&ExampleRoutes::parse_path("register").unwrap()); router.navigate_to_new(&ExampleRoutes::parse_path("dashboard/admin/other").unwrap()); assert_eq!(router.current_history_index, 2); let back = router.back(); assert_eq!(back, true, "We should have gone backwards"); assert_eq!(router.current_history_index, 1); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::Register ); assert_eq!(router.is_current_route(&ExampleRoutes::Register), true); let back = router.back(); assert_eq!(back, true, "We should have gone backwards"); assert_eq!(router.current_history_index, 0); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::parse_path("").unwrap() ); router.navigate_to_new(&ExampleRoutes::Dashboard(DashboardRoutes::Root)); assert_eq!( router.is_current_route(&ExampleRoutes::parse_path("dashboard/").unwrap()), true ); let back = router.back(); assert_eq!(back, true); assert_eq!(router.current_history_index, 2); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::parse_path("dashboard/admin/other").unwrap() ); } #[wasm_bindgen_test] fn test_forward() { let mut router: Router<ExampleRoutes> = Router::new(); let back = router.forward(); assert_eq!(back, false, "We should Not have gone backwards"); assert_eq!( router.current_history_index, 0, "We should have current index 0" ); assert_eq!( router.current_route.is_none(), true, "We should not have current rou" ); router.navigate_to_new(&ExampleRoutes::parse_path("").unwrap()); router.navigate_to_new(&ExampleRoutes::parse_path("register").unwrap()); router.navigate_to_new(&ExampleRoutes::parse_path("/dashboard/profile/55").unwrap()); assert_eq!(router.current_history_index, 2); let back = router.back(); let back = router.back(); let forward = router.forward(); assert_eq!(forward, true, "We should have gone forward"); assert_eq!(router.current_history_index, 1); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::Register ); let forward = router.forward(); assert_eq!(forward, true, "We should have gone forward"); assert_eq!(router.current_history_index, 2); assert_eq!( router.current_route.clone().unwrap(), ExampleRoutes::Dashboard(DashboardRoutes::Profile(55)) ); let forward = router.forward(); assert_eq!(forward, false, "We should Not have gone forward"); } }
pub fn is_current_route(&self, route: &Routes) -> bool { if let Some(current_route) = &self.current_route { route.eq(&current_route) } else { false } }
function_block-full_function
[ { "content": "pub fn init(url: Url, model: &mut Model, id: &String, orders: &mut impl Orders<Msg>) -> Model {\n\n Model {}\n\n}\n\n\n\npub struct Model {}\n\npub enum Msg {}\n", "file_path": "examples/custom_router/tests/routing_module/pages/profile.rs", "rank": 0, "score": 480232.6109669417 }, { "content": "pub fn init(url: Url, model: &mut Model, orders: &mut impl Orders<Msg>) -> Model {\n\n Model::default()\n\n}\n\n\n\nimpl Default for DashboardRoutes {\n\n fn default() -> DashboardRoutes {\n\n DashboardRoutes::Root\n\n }\n\n}\n\n#[derive(Default)]\n\npub struct Model {\n\n pub name: String,\n\n pub state: State,\n\n}\n\n#[derive(Default)]\n\npub struct State {\n\n message: message::Model,\n\n statistics: statistics::Model,\n\n tasks: task_list::Model,\n\n}\n\n\n\npub enum Msg {\n\n ChangeName,\n\n Message(message::Msg),\n\n Statistic(statistics::Msg),\n\n Tasks(task_list::Msg),\n\n}\n\n\n", "file_path": "examples/custom_router/src/pages/dashboard/mod.rs", "rank": 1, "score": 442277.7442201057 }, { "content": "pub fn init(url: Url, model: &mut Model, orders: &mut impl Orders<Msg>) -> Model {\n\n Model::default()\n\n}\n\n\n\n#[derive(Default)]\n\npub struct Model {\n\n user: User,\n\n request_state: RequestState<User>,\n\n}\n\n\n\n/// Action on register page\n\npub enum Msg {\n\n Register,\n\n RegisterFailed { message: String, code: String },\n\n RegisterSucceed(User),\n\n PasswordChanged(String),\n\n UsernameChanged(String),\n\n EmailChanged(String),\n\n FirstNameChanged(String),\n\n LastNameChanged(String),\n\n //todo implement a clear for the form ?\n\n Clear,\n\n}\n", "file_path": "examples/custom_router/src/pages/register/mod.rs", "rank": 2, "score": 442277.7442201057 }, { "content": "pub fn init(url: Url, model: &mut Model, orders: &mut impl Orders<Msg>) -> Model {\n\n Model {\n\n stuff: \"\".to_string(),\n\n }\n\n}\n\n\n\npub struct Model {\n\n stuff: String,\n\n}\n\n\n", "file_path": "examples/custom_router/tests/routing_module/pages/dashboard.rs", "rank": 3, "score": 438230.4160406352 }, { "content": "pub fn init(url: Url, model: &Model, orders: &mut impl Orders<Msg>) -> Model {\n\n Model::default()\n\n}\n\n\n\npub struct Model {\n\n pub tasks: Vec<task::Model>,\n\n pub selected_task_no: Option<u32>,\n\n}\n\n\n\nimpl Default for Model {\n\n fn default() -> Self {\n\n Model {\n\n selected_task_no: None,\n\n tasks: get_dummy_data(),\n\n }\n\n }\n\n}\n\n#[derive(Debug, PartialEq, Clone, AsUrl)]\n\npub enum TasksRoutes {\n\n Task {\n", "file_path": "examples/custom_router/src/pages/dashboard/task_list/mod.rs", "rank": 4, "score": 419295.78977207455 }, { "content": "pub fn view(routes: &AdminRoutes, model: &Model) -> Node<Msg> {\n\n routes.view(model)\n\n}\n", "file_path": "examples/custom_router/src/pages/admin/mod.rs", "rank": 5, "score": 407077.96269839373 }, { "content": "pub fn init(mut url: Url) -> Option<Model> {\n\n Some(Model {\n\n report_page: match url.next_hash_path_part() {\n\n Some(REPORT) => page::report::init(url)?,\n\n _ => None?,\n\n },\n\n })\n\n}\n\n\n\n// ------ ------\n\n// Model\n\n// ------ ------\n\n\n\npub struct Model {\n\n report_page: page::report::Model,\n\n}\n\n\n\n// ------ ------\n\n// Urls\n\n// ------ ------\n", "file_path": "examples/pages_hash_routing/src/page/admin.rs", "rank": 6, "score": 405681.18342516106 }, { "content": "pub fn view(dashboard_routes: &DashboardRoutes, model: &Model) -> Node<Msg> {\n\n match dashboard_routes {\n\n DashboardRoutes::Root => root(dashboard_routes, model),\n\n DashboardRoutes::Message => message::view(&model.state.message).map_msg(Msg::Message),\n\n DashboardRoutes::Statistics => {\n\n statistics::view(&model.state.statistics).map_msg(Msg::Statistic)\n\n }\n\n DashboardRoutes::Tasks(task_routes) => {\n\n task_list::view(task_routes, &model.state.tasks).map_msg(Msg::Tasks)\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples/custom_router/src/pages/dashboard/mod.rs", "rank": 7, "score": 402435.53157503996 }, { "content": "pub fn init(mut url: Url) -> Option<Model> {\n\n let base_url = url.to_hash_base_url();\n\n\n\n let frequency = match url.remaining_hash_path_parts().as_slice() {\n\n [] => {\n\n Urls::new(&base_url).default().go_and_replace();\n\n Frequency::default()\n\n }\n\n [DAILY] => Frequency::Daily,\n\n [WEEKLY] => Frequency::Weekly,\n\n _ => None?,\n\n };\n\n\n\n Some(Model {\n\n base_url,\n\n frequency,\n\n })\n\n}\n\n\n\n// ------ ------\n\n// Model\n\n// ------ ------\n\n\n\npub struct Model {\n\n base_url: Url,\n\n frequency: Frequency,\n\n}\n\n\n\n// ------ Frequency ------\n\n\n", "file_path": "examples/pages_hash_routing/src/page/admin/page/report.rs", "rank": 8, "score": 397158.1502103682 }, { "content": "pub fn view(task_routes: &TasksRoutes, model: &Model) -> Node<Msg> {\n\n div![vec![\n\n render_tasks(model),\n\n match task_routes {\n\n TasksRoutes::Task { id } => {\n\n let task = model.tasks.iter().find(|t| t.task_no.to_string() == *id);\n\n task::view(task.unwrap()).map_msg(Msg::Task)\n\n }\n\n TasksRoutes::Root => div![\"no task selected\"],\n\n },\n\n ]]\n\n}\n", "file_path": "examples/custom_router/src/pages/dashboard/task_list/mod.rs", "rank": 9, "score": 393616.64669585484 }, { "content": "pub fn init(mut url: Url, model: &mut Option<Model>) -> Option<()> {\n\n let model = model.get_or_insert_with(Model::default);\n\n model.page_id.replace(match url.next_path_part() {\n\n Some(REPORT) => page::report::init(url, &mut model.report_model).map(|_| PageId::Report)?,\n\n _ => None?,\n\n });\n\n Some(())\n\n}\n\n\n\n// ------ ------\n\n// Model\n\n// ------ ------\n\n\n\n#[derive(Default)]\n\npub struct Model {\n\n page_id: Option<PageId>,\n\n report_model: Option<page::report::Model>,\n\n}\n\n\n\n// ------ PageId ------\n\n\n", "file_path": "examples/pages_keep_state/src/page/admin.rs", "rank": 10, "score": 385813.18933078717 }, { "content": "/// view of register page\n\npub fn view(model: &Model) -> Node<Msg> {\n\n match &model.request_state {\n\n RequestState::Success(user) => div![\n\n C![\"centred\"],\n\n p![format!(\n\n \"Thank you for your registration {} {}. :)\",\n\n user.first_name, user.last_name\n\n )],\n\n br![],\n\n p![\n\n span![\"You can now \"],\n\n a![attrs! { At::Href => \"./login\" }, \"login\",],\n\n span![\" as \",],\n\n span![\n\n style! {St::Color => \"darkblue\"},\n\n user.credentials.username(),\n\n \".\"\n\n ]\n\n ]\n\n ],\n\n RequestState::IsPending(status) => form(model, status),\n\n RequestState::Failed { message, code } => p![\n\n C![\"centred\"],\n\n format!(\"An error happened {} with the code {}\", message, code)\n\n ],\n\n }\n\n}\n\n\n", "file_path": "examples/custom_router/src/pages/register/mod.rs", "rank": 11, "score": 381677.66006067523 }, { "content": "pub fn view(model: &Model) -> Node<Msg> {\n\n match &model.request_state {\n\n RequestState::Success(user) => div![p![\n\n C![\"centred\"],\n\n \"Welcome \",\n\n style! {St::Color => \"darkblue\"},\n\n user.username(),\n\n \". :)\"\n\n ]],\n\n RequestState::IsPending(status) => form(model, status),\n\n RequestState::Failed { message, code } => p![\n\n C![\"centred\"],\n\n format!(\"An error happened {} with the code {}\", message, code)\n\n ],\n\n }\n\n}\n\n\n", "file_path": "examples/custom_router/src/pages/login/mod.rs", "rank": 12, "score": 381672.53554687917 }, { "content": "pub fn init(mut url: Url, model: &mut Option<Model>) -> Option<()> {\n\n let model = model.get_or_insert_with(|| Model {\n\n base_url: url.to_base_url(),\n\n frequency: Frequency::Daily,\n\n });\n\n\n\n model.frequency = match url.remaining_path_parts().as_slice() {\n\n [] => {\n\n match model.frequency {\n\n Frequency::Daily => Urls::new(&model.base_url).daily().go_and_replace(),\n\n Frequency::Weekly => Urls::new(&model.base_url).weekly().go_and_replace(),\n\n }\n\n model.frequency\n\n }\n\n [DAILY] => Frequency::Daily,\n\n [WEEKLY] => Frequency::Weekly,\n\n _ => None?,\n\n };\n\n Some(())\n\n}\n", "file_path": "examples/pages_keep_state/src/page/admin/page/report.rs", "rank": 13, "score": 379712.7540333256 }, { "content": "pub fn view(model: &Model) -> Node<Msg> {\n\n div![]\n\n}\n\n\n", "file_path": "examples/custom_router/tests/routing_module/pages/profile.rs", "rank": 14, "score": 377048.4975945631 }, { "content": "pub fn view(model: &Model) -> Node<Msg> {\n\n div![]\n\n}\n\n\n", "file_path": "examples/custom_router/tests/routing_module/pages/admin.rs", "rank": 15, "score": 377048.4975945631 }, { "content": "pub fn init(mut url: Url) -> Option<Model> {\n\n Some(Model {\n\n report_page: match url.next_path_part() {\n\n Some(REPORT) => page::report::init(url)?,\n\n _ => None?,\n\n },\n\n })\n\n}\n\n\n\n// ------ ------\n\n// Model\n\n// ------ ------\n\n\n\npub struct Model {\n\n report_page: page::report::Model,\n\n}\n\n\n\n// ------ ------\n\n// Urls\n\n// ------ ------\n", "file_path": "examples/pages/src/page/admin.rs", "rank": 16, "score": 375420.28807453165 }, { "content": "pub fn view(children: &Routes, model: &Model) -> Node<Msg> {\n\n div![]\n\n}\n\npub enum Msg {}\n", "file_path": "examples/custom_router/tests/routing_module/pages/other.rs", "rank": 17, "score": 374644.1903136361 }, { "content": "pub fn view(nested: &Routes, model: &Model) -> Node<Msg> {\n\n div![]\n\n}\n\n\n", "file_path": "examples/custom_router/tests/routing_module/pages/dashboard.rs", "rank": 18, "score": 370555.39795281005 }, { "content": "pub fn extract_query_params(url_string: String) -> IndexMap<String, String> {\n\n let mut query: IndexMap<String, String> = IndexMap::new();\n\n let url_parts: Vec<&str> = url_string.split('?').collect();\n\n let mut parts_iter = url_parts.iter();\n\n let skip_paths = parts_iter.next();\n\n if let Some(sub_string) = parts_iter.next() {\n\n let key_value: Vec<&str> = sub_string.split('&').collect();\n\n\n\n for pair in key_value {\n\n let mut sub = pair.split('=');\n\n let key = sub.next().expect(\"we should have a key for the parameter\");\n\n let value = sub.next().expect(\"we should have a value for this key\");\n\n query.insert(key.to_string(), value.to_string());\n\n }\n\n }\n\n query\n\n}\n\n#[cfg(test)]\n\nmod test {\n\n\n", "file_path": "examples/custom_router/src/router/url.rs", "rank": 19, "score": 368702.8092184506 }, { "content": "pub fn init(mut url: Url) -> Option<Model> {\n\n let base_url = url.to_base_url();\n\n\n\n let frequency = match url.remaining_path_parts().as_slice() {\n\n [] => {\n\n Urls::new(&base_url).default().go_and_replace();\n\n Frequency::default()\n\n }\n\n [DAILY] => Frequency::Daily,\n\n [WEEKLY] => Frequency::Weekly,\n\n _ => None?,\n\n };\n\n\n\n Some(Model {\n\n base_url,\n\n frequency,\n\n })\n\n}\n\n\n\n// ------ ------\n\n// Model\n\n// ------ ------\n\n\n\npub struct Model {\n\n base_url: Url,\n\n frequency: Frequency,\n\n}\n\n\n\n// ------ Frequency ------\n\n\n", "file_path": "examples/pages/src/page/admin/page/report.rs", "rank": 20, "score": 367808.3104869545 }, { "content": "pub fn convert_to_string(query: IndexMap<String, String>) -> String {\n\n let mut query_string = \"\".to_string();\n\n for (i, q) in query.iter().enumerate() {\n\n query_string += format!(\"{}={}\", q.0, q.1).as_str();\n\n\n\n if i != query.len() - 1 {\n\n query_string += format!(\"&\").as_str();\n\n }\n\n }\n\n query_string\n\n}\n\n\n", "file_path": "examples/custom_router/src/router/url.rs", "rank": 21, "score": 364291.61657530797 }, { "content": "/// Update on register pages\n\npub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {\n\n match msg {\n\n Msg::Register => {\n\n model.request_state = RequestState::IsPending(true);\n\n let request = Request::new(\"/api/register\")\n\n .method(Method::Post)\n\n .json(&model.user)\n\n .expect(\"Serialization failed\");\n\n model.user.credentials.set_password(\"\".to_string());\n\n orders.perform_cmd(async {\n\n let response = fetch(request).await.expect(\"HTTP request failed\");\n\n\n\n if response.status().is_ok() {\n\n Msg::RegisterSucceed(response.json().await.unwrap())\n\n } else {\n\n Msg::RegisterFailed {\n\n message: response.text().await.unwrap(),\n\n code: response.status().code.to_string(),\n\n }\n\n }\n", "file_path": "examples/custom_router/src/pages/register/mod.rs", "rank": 22, "score": 353464.6758511794 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {\n\n match msg {\n\n Msg::ChangeName => {}\n\n Msg::Message(message) => message::update(\n\n message,\n\n &mut model.state.message,\n\n &mut orders.proxy(Msg::Message),\n\n ),\n\n Msg::Statistic(statistics) => statistics::update(\n\n statistics,\n\n &mut model.state.statistics,\n\n &mut orders.proxy(Msg::Statistic),\n\n ),\n\n Msg::Tasks(task) => {\n\n task_list::update(task, &mut model.state.tasks, &mut orders.proxy(Msg::Tasks))\n\n }\n\n }\n\n}\n", "file_path": "examples/custom_router/src/pages/dashboard/mod.rs", "rank": 23, "score": 353464.6758511794 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {\n\n match msg {\n\n Msg::Login => {\n\n model.request_state = RequestState::IsPending(true);\n\n let request = Request::new(\"/api/auth\")\n\n .method(Method::Post)\n\n .json(&model.credentials)\n\n .expect(\"Serialization failed\");\n\n\n\n model.credentials.set_password(\"\".to_string());\n\n orders.perform_cmd(async {\n\n let response = fetch(request).await.expect(\"HTTP request failed\");\n\n\n\n if response.status().is_ok() {\n\n Msg::LoginSucceed(response.json().await.unwrap())\n\n } else {\n\n Msg::LoginFailed {\n\n message: response.text().await.unwrap(),\n\n code: response.status().code.to_string(),\n\n }\n", "file_path": "examples/custom_router/src/pages/login/mod.rs", "rank": 24, "score": 353464.67585117946 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {}\n\n\n", "file_path": "examples/custom_router/src/pages/admin/mod.rs", "rank": 25, "score": 353464.6758511794 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {\n\n match msg {\n\n Msg::ClickTask(no) => {}\n\n Msg::LoadTasks => model.tasks = get_dummy_data(),\n\n Msg::Task(task) => {\n\n let index: usize = model.selected_task_no.unwrap() as usize;\n\n task::update(task, model.tasks.get_mut(index).unwrap())\n\n }\n\n }\n\n}\n\n// pub fn view(model: &Model, router: &SuperRouter<Routes>) -> Node<Msg> {\n\n// div![\"my tasks\", render_tasks(model, router),]\n\n// }\n\n\n", "file_path": "examples/custom_router/src/pages/dashboard/task_list/mod.rs", "rank": 26, "score": 346431.8440152075 }, { "content": "pub fn view(model: &Model) -> Node<Msg> {\n\n div![\"messages list\"]\n\n}\n", "file_path": "examples/custom_router/src/pages/dashboard/message.rs", "rank": 27, "score": 342899.98720707325 }, { "content": "pub fn view(model: &Model) -> Node<Msg> {\n\n div![\"route visited => {}\", &model.routes_history_count]\n\n}\n", "file_path": "examples/custom_router/src/pages/dashboard/statistics.rs", "rank": 28, "score": 342899.98720707325 }, { "content": "pub fn extract_id_parameter(url_string: String) -> String {\n\n let mut single_paths = url_string.split('/');\n\n\n\n let root = single_paths.next();\n\n\n\n if root.is_some() && !root.unwrap().is_empty() {\n\n eprintln!(\"root path should be like '' because urls starts with / \");\n\n }\n\n // make error if root is not empty\n\n let mut param_id = single_paths\n\n .next()\n\n .map(|r| r.to_string())\n\n .expect(\"Should have param id\");\n\n\n\n if param_id.contains('?') {\n\n param_id = param_id\n\n .split('?')\n\n .next()\n\n .map(|r| r.to_string())\n\n .expect(\"We should have a id parameter but got empty string\")\n\n }\n\n param_id\n\n}\n\n\n", "file_path": "examples/custom_router/src/router/url.rs", "rank": 29, "score": 341245.3903705539 }, { "content": "pub fn get_media_href(path: &str) -> String {\n\n format!(\"/public/media/{}\", path)\n\n}\n", "file_path": "examples/bunnies/src/config.rs", "rank": 30, "score": 339736.72170449496 }, { "content": "pub fn view(model: &Model) -> Node<Msg> {\n\n div![\n\n \"Title\",\n\n h3![model.task_title.to_string()],\n\n p![model.task_description.to_string()]\n\n ]\n\n}\n", "file_path": "examples/custom_router/src/pages/dashboard/task_list/task.rs", "rank": 31, "score": 335589.3826970913 }, { "content": "pub fn root(dashboard_routes: &DashboardRoutes, model: &Model) -> Node<Msg> {\n\n div![\"root for dashboard\"]\n\n}\n", "file_path": "examples/custom_router/src/pages/dashboard/mod.rs", "rank": 32, "score": 334882.26051884785 }, { "content": "pub fn extract_children_string(url_string: String, param_id: Option<String>) -> String {\n\n let mut full_query = url_string.clone();\n\n let mut children_path: Option<String> = None;\n\n\n\n if param_id.is_some() {\n\n println!(\"We have id param\");\n\n children_path = full_query\n\n .trim_start_matches('/')\n\n .to_string()\n\n .strip_prefix(&param_id.clone().unwrap())\n\n .map(|r| r.to_string());\n\n } else {\n\n println!(\"No id param\");\n\n children_path = Some(full_query)\n\n }\n\n\n\n children_path.expect(\"We should have a children path\")\n\n}\n\n\n", "file_path": "examples/custom_router/src/router/url.rs", "rank": 33, "score": 328859.1542986623 }, { "content": "pub fn update(msg: Msg, model: &mut Model) {\n\n match msg {\n\n Msg::ClickTask => {}\n\n }\n\n}\n", "file_path": "examples/custom_router/src/pages/dashboard/task_list/task.rs", "rank": 34, "score": 326457.9378846113 }, { "content": "fn init(url: Url, orders: &mut impl Orders<Msg>) -> Model {\n\n orders\n\n .subscribe(Msg::UrlChanged)\n\n .subscribe(Msg::UrlRequested)\n\n .subscribe(Msg::UserLogged);\n\n\n\n let mut router: Router<Routes> = Router::new();\n\n router.init_url_and_navigation(url);\n\n\n\n Model {\n\n theme: Theme::default(),\n\n register: Default::default(),\n\n login: Default::default(),\n\n dashboard: Default::default(),\n\n admin: Default::default(),\n\n router,\n\n logged_user: None,\n\n }\n\n}\n\n#[derive(Debug, PartialEq, Clone, RoutingModules)]\n", "file_path": "examples/custom_router/src/lib.rs", "rank": 35, "score": 326383.43926026137 }, { "content": "fn is_current_url(url: Url) -> bool {\n\n Url::current() == url\n\n}\n", "file_path": "examples/custom_router/src/pages/dashboard/task_list/mod.rs", "rank": 36, "score": 324279.4392158135 }, { "content": "fn init(url: Url, orders: &mut impl Orders<Msg>) -> Model {\n\n orders.subscribe(Msg::UrlChanged);\n\n Model {\n\n ctx: Context {\n\n logged_user: \"John Doe\",\n\n },\n\n base_url: url.to_hash_base_url(),\n\n page: Page::init(url),\n\n }\n\n}\n\n\n\n// ------ ------\n\n// Model\n\n// ------ ------\n\n\n", "file_path": "examples/pages_hash_routing/src/lib.rs", "rank": 37, "score": 323415.82227306603 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {}\n", "file_path": "examples/custom_router/src/pages/dashboard/statistics.rs", "rank": 38, "score": 318945.2827193966 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {\n\n match msg {\n\n Msg::AddMessage(name) => {}\n\n }\n\n}\n", "file_path": "examples/custom_router/src/pages/dashboard/message.rs", "rank": 39, "score": 318945.2827193966 }, { "content": "pub fn view<Ms>(model: &Model, ctx: &Context) -> Node<Ms> {\n\n page::report::view(&model.report_page, ctx)\n\n}\n", "file_path": "examples/pages_hash_routing/src/page/admin.rs", "rank": 40, "score": 318164.8500518791 }, { "content": "pub fn get_dummy_data() -> Vec<task::Model> {\n\n vec![\n\n task::Model {\n\n task_no: 0,\n\n task_title: \"Nested Url\".to_string(),\n\n task_description: \"Try to find an easy way to manipulate nested route\".to_string(),\n\n },\n\n task::Model {\n\n task_no: 1,\n\n task_title: \"Guard & permission\".to_string(),\n\n task_description: \"FInd a way to set Guard for protected routes\".to_string(),\n\n },\n\n task::Model {\n\n task_no: 2,\n\n task_title: \"Stuff\".to_string(),\n\n task_description: \"Additional stuff to do\".to_string(),\n\n },\n\n ]\n\n}\n", "file_path": "examples/custom_router/src/pages/dashboard/task_list/mod.rs", "rank": 41, "score": 316784.6455607624 }, { "content": "pub fn view(model: &Model) -> Node<Msg> {\n\n form![\n\n ev(Ev::Submit, |event| {\n\n event.prevent_default();\n\n Msg::Submit\n\n }),\n\n label![\n\n \"Name\",\n\n input![\n\n attrs! {At::Value => model.form.name},\n\n input_ev(Ev::Input, Msg::NameChanged),\n\n ]\n\n ],\n\n button![\"Submit\"],\n\n if let Some(message) = &model.message {\n\n span![message]\n\n } else {\n\n empty![]\n\n },\n\n ]\n\n}\n", "file_path": "examples/fetch/src/post.rs", "rank": 42, "score": 314489.95075675676 }, { "content": "pub fn view(model: &Model) -> Node<Msg> {\n\n div![\n\n style! {St::TextAlign => \"center\"},\n\n button![ev(Ev::Click, |_| Msg::Decrement), \"-\"],\n\n div![model.value],\n\n button![ev(Ev::Click, |_| Msg::Increment), \"+\"],\n\n ]\n\n}\n", "file_path": "examples/subscribe/src/counter.rs", "rank": 43, "score": 314489.95075675676 }, { "content": "pub fn view(model: &Model) -> Node<Msg> {\n\n div![\n\n button![ev(Ev::Click, |_| Msg::Fetch), \"Fetch user\"],\n\n model\n\n .user\n\n .as_ref()\n\n .map(|user| div![format!(\"User: {}\", user.name)])\n\n ]\n\n}\n", "file_path": "examples/fetch/src/simple.rs", "rank": 44, "score": 314489.95075675676 }, { "content": "fn init(_: Url, _: &mut impl Orders<Msg>) -> Model {\n\n Model::new()\n\n}\n\n\n\n// ------ ------\n\n// Model\n\n// ------ ------\n\n\n\n// ------ Model ------\n\n\n", "file_path": "examples/el_key/src/lib.rs", "rank": 45, "score": 312126.3486401041 }, { "content": "pub fn view<Ms>(model: &Model, ctx: &Context) -> Node<Ms> {\n\n let (frequency, link) = match &model.frequency {\n\n Frequency::Daily => (\n\n \"daily\",\n\n a![\n\n \"Switch to weekly\",\n\n attrs! {\n\n At::Href => Urls::new(&model.base_url).weekly()\n\n }\n\n ],\n\n ),\n\n Frequency::Weekly => (\n\n \"weekly\",\n\n a![\n\n \"Switch to daily\",\n\n attrs! {\n\n At::Href => Urls::new(&model.base_url).daily()\n\n }\n\n ],\n\n ),\n\n };\n\n div![\n\n format!(\n\n \"Hello {}! This is your {} report.\",\n\n ctx.logged_user, frequency\n\n ),\n\n link,\n\n ]\n\n}\n", "file_path": "examples/pages_hash_routing/src/page/admin/page/report.rs", "rank": 46, "score": 311593.9139207635 }, { "content": "pub fn video(model: &Model) -> Node<Msg> {\n\n div![]\n\n}\n", "file_path": "examples/custom_router/tests/routing_module/pages/other.rs", "rank": 47, "score": 308307.19416197424 }, { "content": "pub fn files(model: &Model) -> Node<Msg> {\n\n div![]\n\n}\n", "file_path": "examples/custom_router/tests/routing_module/pages/other.rs", "rank": 48, "score": 308307.19416197424 }, { "content": "pub fn root(model: &Model) -> Node<Msg> {\n\n div![]\n\n}\n", "file_path": "examples/custom_router/tests/routing_module/pages/other.rs", "rank": 49, "score": 308307.19416197424 }, { "content": "pub fn update(msg: Msg, model: &mut Model) {\n\n match msg {\n\n Msg::Increment => model.value += 1,\n\n Msg::Decrement => model.value -= 1,\n\n Msg::Reset => model.value = 0,\n\n }\n\n}\n\n\n\n// ------ ------\n\n// View\n\n// ------ ------\n\n\n", "file_path": "examples/subscribe/src/counter.rs", "rank": 50, "score": 305482.6791300715 }, { "content": "fn form(model: &Model, status: &bool) -> Node<Msg> {\n\n form![\n\n ev(Ev::Submit, |event| {\n\n event.prevent_default();\n\n Msg::Login\n\n }),\n\n fieldset![\n\n attrs! {\n\n At::Disabled=> status.as_at_value(),\n\n },\n\n legend![\"credentials\"],\n\n label![attrs! { At::For => \"username\"}, \"Username/Email\"],\n\n input![\n\n id!(\"username\"),\n\n attrs! {\n\n At::Required => true,\n\n At::Value=> model.credentials.target(),\n\n At::MinLength=> \"5\",\n\n At::Name => \"username\",\n\n At::MaxLength=> \"25\",\n", "file_path": "examples/custom_router/src/pages/login/mod.rs", "rank": 51, "score": 303367.38148960273 }, { "content": "fn form(model: &Model, status: &bool) -> Node<Msg> {\n\n let user = &model.user;\n\n form![\n\n ev(Ev::Submit, |event| {\n\n event.prevent_default();\n\n Msg::Register\n\n }),\n\n fieldset![\n\n attrs! {\n\n At::Disabled=> status.as_at_value(),\n\n },\n\n legend![\"credentials\"],\n\n label![attrs! { At::For => \"username\"}, \"Username\"],\n\n input![\n\n id!(\"username\"),\n\n attrs! {\n\n At::Required => true,\n\n At::Value=> user.credentials.username(),\n\n At::MinLength=> \"5\",\n\n At::Name => \"username\",\n", "file_path": "examples/custom_router/src/pages/register/mod.rs", "rank": 52, "score": 303367.38148960273 }, { "content": "fn init(url: Url, orders: &mut impl Orders<Msg>) -> Model {\n\n orders.subscribe(Msg::UrlChanged);\n\n\n\n Model::new(url, orders.clone_base_path())\n\n}\n\n\n\n// ------ ------\n\n// Model\n\n// ------ ------\n\n\n", "file_path": "examples/url/src/lib.rs", "rank": 53, "score": 297804.26351844764 }, { "content": "fn who_is_connected(model: &Model) -> String {\n\n if let Some(user) = &model.logged_user {\n\n let full_welcome = format!(\"Welcome {} {}\", user.first_name, user.last_name);\n\n full_welcome\n\n } else {\n\n \"Welcome Guest\".to_string()\n\n }\n\n}\n\n\n", "file_path": "examples/custom_router/src/lib.rs", "rank": 54, "score": 293832.8223549254 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {\n\n match msg {\n\n Msg::NameChanged(name) => model.form.name = name,\n\n Msg::Submit => {\n\n orders.skip(); // No need to rerender\n\n\n\n let token = \"YWxhZGRpbjpvcGVuc2VzYW1l\";\n\n // Created outside async block because of lifetime reasons\n\n // (we can't use reference to `model.form` in async function).\n\n let request = Request::new(\"/\")\n\n .method(Method::Post)\n\n .header(Header::custom(\"Accept-Language\", \"en\"))\n\n .header(Header::bearer(token))\n\n .json(&model.form)\n\n .expect(\"Serialization failed\");\n\n\n\n orders.perform_cmd(async {\n\n let response = fetch(request).await.expect(\"HTTP request failed\");\n\n\n\n if response.status().is_ok() {\n", "file_path": "examples/fetch/src/post.rs", "rank": 55, "score": 292604.5793110347 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {\n\n match msg {\n\n Msg::Fetch => {\n\n orders.skip(); // No need to rerender\n\n orders.perform_cmd(async {\n\n let response = fetch(\"user.json\").await.expect(\"HTTP request failed\");\n\n\n\n let user = response\n\n .check_status() // ensure we've got 2xx status\n\n .expect(\"status check failed\")\n\n .json::<User>()\n\n .await\n\n .expect(\"deserialization failed\");\n\n\n\n Msg::Received(user)\n\n });\n\n }\n\n Msg::Received(user) => {\n\n model.user = Some(user);\n\n }\n\n }\n\n}\n\n\n\n// ------ ------\n\n// View\n\n// ------ ------\n\n\n", "file_path": "examples/fetch/src/simple.rs", "rank": 56, "score": 292604.5793110347 }, { "content": "fn init(url: Url, orders: &mut impl Orders<Msg>) -> Model {\n\n orders.subscribe(Msg::UrlChanged);\n\n\n\n let user = LocalStorage::get(STORAGE_KEY).ok();\n\n Model {\n\n email: \"john@example.com\".to_owned(),\n\n password: \"1234\".to_owned(),\n\n base_url: url.to_base_url(),\n\n page: Page::init(url, user.as_ref(), orders),\n\n secret_message: None,\n\n user,\n\n }\n\n}\n\n\n\n// ------ ------\n\n// Model\n\n// ------ ------\n\n\n", "file_path": "examples/auth/src/lib.rs", "rank": 57, "score": 292107.26498764835 }, { "content": "fn init(url: Url, orders: &mut impl Orders<Msg>) -> Model {\n\n orders\n\n .subscribe(Msg::UrlChanged)\n\n .notify(subs::UrlChanged(url));\n\n\n\n Model {\n\n data: LocalStorage::get(STORAGE_KEY).unwrap_or_default(),\n\n refs: Refs::default(),\n\n }\n\n}\n\n\n\n// ------ ------\n\n// Model\n\n// ------ ------\n\n\n\n// ------ Model ------\n\n\n", "file_path": "examples/todomvc/src/lib.rs", "rank": 58, "score": 292107.2649876484 }, { "content": "fn init(url: Url, orders: &mut impl Orders<Msg>) -> Model {\n\n orders\n\n .subscribe(Msg::UrlRequested)\n\n .subscribe(Msg::UrlChanged)\n\n .notify(subs::UrlChanged(url))\n\n .stream(streams::window_event(Ev::Resize, |_| Msg::OnResize))\n\n .stream(streams::document_event(Ev::SelectionChange, |_| {\n\n Msg::OnSelection\n\n }));\n\n\n\n Model {\n\n sub_handles: Vec::new(),\n\n timer_handle: None,\n\n timeout_handle: None,\n\n seconds: 0,\n\n counter: counter::init(&mut orders.proxy(Msg::Counter)),\n\n window_size: window_size(),\n\n }\n\n}\n\n\n\n// ------ ------\n\n// Model\n\n// ------ ------\n\n\n", "file_path": "examples/subscribe/src/lib.rs", "rank": 59, "score": 292107.2649876484 }, { "content": "fn init(url: Url, orders: &mut impl Orders<Msg>) -> Model {\n\n orders.subscribe(Msg::UrlChanged);\n\n Model {\n\n ctx: Context {\n\n logged_user: \"John Doe\",\n\n },\n\n base_url: url.to_base_url(),\n\n page: Page::init(url),\n\n }\n\n}\n\n\n\n// ------ ------\n\n// Model\n\n// ------ ------\n\n\n", "file_path": "examples/pages/src/lib.rs", "rank": 60, "score": 292107.2649876484 }, { "content": "// Add a new route using history's `push_state` method.\n\n//\n\n// # References\n\n// * [MDN docs](https://developer.mozilla.org/en-US/docs/Web/API/History_API)\n\npub fn push_route<U: Into<Url>>(url: U) -> Url {\n\n let url = url.into();\n\n // We use data to evaluate the path instead of the path displayed in the url.\n\n let data =\n\n JsValue::from_str(&serde_json::to_string(&url).expect(\"Problem serializing route data\"));\n\n\n\n util::history()\n\n .push_state_with_url(&data, \"\", Some(&url.to_string()))\n\n .expect(\"Problem pushing state\");\n\n url\n\n}\n\n\n", "file_path": "src/browser/service/routing.rs", "rank": 61, "score": 291052.2740274749 }, { "content": "fn view(model: &Model) -> Node<Msg> {\n\n ol![\n\n li![\n\n button![\n\n \"Go to '/ui/a/b/c?x=1?#hash'` and reload the page\",\n\n ev(Ev::Click, |_| {\n\n Url::new()\n\n .set_path(&[\"ui\", \"a\", \"b\", \"c\"])\n\n .set_search(UrlSearch::new(vec![\n\n (\"x\", vec![\"1\"])\n\n ]))\n\n .set_hash(\"hash\")\n\n .go_and_load()\n\n })\n\n ],\n\n ],\n\n li![\n\n format!(\"Base path ...... \\\"{}\\\" ...... (comment out `base` element in `index.html`, refresh the page and watch changes)\", &model.base_path.join(\"/\")),\n\n ],\n\n li![\n", "file_path": "examples/url/src/lib.rs", "rank": 62, "score": 289287.8756073215 }, { "content": "#[allow(clippy::needless_pass_by_value)]\n\nfn init(url: Url, orders: &mut impl Orders<Msg>) -> Model {\n\n // https://developer.mozilla.org/en-US/docs/Web/API/WindowEventHandlers/onbeforeunload\n\n orders\n\n .stream(streams::window_event(Ev::BeforeUnload, Msg::BeforeUnload))\n\n .subscribe(Msg::UrlRequested);\n\n\n\n let text = LocalStorage::get(STORAGE_KEY).unwrap_or_default();\n\n Model {\n\n base_url: url.to_base_url(),\n\n saved_text_hash: calculate_hash(&text),\n\n text,\n\n }\n\n}\n\n\n\n// ------ ------\n\n// Model\n\n// ------ ------\n\n\n", "file_path": "examples/unsaved_changes/src/lib.rs", "rank": 63, "score": 289144.62874574075 }, { "content": "pub fn _fill_all_classes(all_classes: &mut Vec<String>, classes: Option<Vec<String>>) {\n\n if let Some(classes) = classes {\n\n for class in classes {\n\n if !class.is_empty() {\n\n all_classes.push(class);\n\n }\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/shortcuts.rs", "rank": 64, "score": 288657.6822431377 }, { "content": "fn load_models() -> HashMap<String, (String, String)> {\n\n let mut models: HashMap<String, (String, String)> = HashMap::new();\n\n\n\n models.insert(\n\n \"1\".to_string(),\n\n (\n\n \"Custom Router\".to_string(),\n\n \"Develop a Custom Router for Seed\".to_string(),\n\n ),\n\n );\n\n models.insert(\n\n \"2\".to_string(),\n\n (\n\n \"Seed Router\".to_string(),\n\n \"Help to make an official Router for Seed\".to_string(),\n\n ),\n\n );\n\n\n\n models\n\n}\n", "file_path": "examples/custom_router/src/pages/admin/mod.rs", "rank": 65, "score": 288075.6789948675 }, { "content": "pub fn modules_path(ident: Ident, attrs: std::slice::Iter<'_, Attribute>) -> Option<String> {\n\n let mut attrs =\n\n attrs.filter_map(\n\n |attr| match get_string_from_attribute(\"modules_path\", attr) {\n\n Ok(op) => op,\n\n Err(err) => abort!(Diagnostic::new(Level::Error, err.to_string())),\n\n },\n\n );\n\n let name = if attrs.clone().count() > 1 {\n\n abort!(Diagnostic::new(\n\n Level::Error,\n\n \"Multiple path names defined.\".into()\n\n ))\n\n } else if let Some(name) = attrs.next() {\n\n name.value()\n\n } else {\n\n \"\".to_string()\n\n };\n\n if name.to_string().is_empty() {\n\n None\n\n } else {\n\n Some(name)\n\n }\n\n}\n\n\n", "file_path": "examples/custom_router/router_macro_derive/src/routing_modules.rs", "rank": 66, "score": 287575.30366429186 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {\n\n match msg {\n\n Msg::TitleChanged(title) => model.form_mut().title = title,\n\n Msg::DescriptionChanged(description) => model.form_mut().description = description,\n\n Msg::FileChanged(file) => {\n\n model.form_mut().file = file;\n\n }\n\n Msg::AnswerChanged => toggle(&mut model.form_mut().answer),\n\n Msg::FormSubmitted(id) => {\n\n let form = mem::take(model.form_mut());\n\n let form_data = form.to_form_data().expect(\"create from data from form\");\n\n orders.perform_cmd(async { Msg::ServerResponded(send_request(form_data).await) });\n\n *model = Model::WaitingForResponse(form);\n\n log!(format!(\"Form {} submitted.\", id));\n\n }\n\n Msg::ServerResponded(Ok(response_data)) => {\n\n *model = Model::ReadyToSubmit(Form::default());\n\n clear_file_input();\n\n log_2(\n\n &\"%cResponse data:\".into(),\n", "file_path": "examples/server_integration/client/src/example_e.rs", "rank": 67, "score": 287530.0375649915 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {\n\n match msg {\n\n Msg::SendRequest => {\n\n let (request, controller) = Request::new(get_request_url())\n\n .timeout(TIMEOUT)\n\n .controller();\n\n\n\n model.status = Status::WaitingForResponse(TimeoutStatus::Enabled);\n\n model.fetch_result = None;\n\n model.request_controller = Some(controller);\n\n\n\n orders.perform_cmd(async {\n\n Msg::Fetched(async { fetch(request).await?.text().await }.await)\n\n });\n\n }\n\n\n\n Msg::DisableTimeout => {\n\n if let Some(controller) = &model.request_controller {\n\n controller.disable_timeout().expect(\"disable timeout\");\n\n }\n", "file_path": "examples/server_integration/client/src/example_d.rs", "rank": 68, "score": 287530.0375649915 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {\n\n match msg {\n\n Msg::NewMessageChanged(message) => {\n\n model.new_message = message;\n\n }\n\n Msg::SendRequest => {\n\n orders.skip().perform_cmd({\n\n let message = model.new_message.clone();\n\n async { Msg::Fetched(send_message(message).await) }\n\n });\n\n }\n\n\n\n Msg::Fetched(Ok(response_data)) => {\n\n model.response_data = Some(response_data);\n\n }\n\n\n\n Msg::Fetched(Err(fetch_error)) => {\n\n log!(\"Example_A error:\", fetch_error);\n\n orders.skip();\n\n }\n", "file_path": "examples/server_integration/client/src/example_a.rs", "rank": 69, "score": 287530.0375649915 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {\n\n match msg {\n\n Msg::SendRequest => {\n\n orders.skip().perform_cmd(async {\n\n Msg::Fetched(\n\n async { fetch(get_request_url()).await?.check_status()?.json().await }.await,\n\n )\n\n });\n\n }\n\n\n\n Msg::Fetched(fetch_result) => {\n\n model.fetch_result = Some(fetch_result);\n\n }\n\n }\n\n}\n\n\n\n// ------ ------\n\n// View\n\n// ------ ------\n\n\n", "file_path": "examples/server_integration/client/src/example_b.rs", "rank": 70, "score": 287530.0375649915 }, { "content": "pub fn update(msg: Msg, model: &mut Model, orders: &mut impl Orders<Msg>) {\n\n match msg {\n\n Msg::SendRequest => {\n\n let (request, controller) = Request::new(get_request_url()).controller();\n\n model.status = Status::WaitingForResponse;\n\n model.fetch_result = None;\n\n model.request_controller = Some(controller);\n\n orders.perform_cmd(async {\n\n Msg::Fetched(async { fetch(request).await?.text().await }.await)\n\n });\n\n }\n\n\n\n Msg::AbortRequest => {\n\n if let Some(controller) = &model.request_controller {\n\n controller.abort();\n\n }\n\n model.status = Status::RequestAborted;\n\n }\n\n\n\n Msg::Fetched(fetch_result) => {\n\n model.status = Status::ReadyToSendRequest;\n\n model.fetch_result = Some(fetch_result);\n\n }\n\n }\n\n}\n\n\n\n// ------ ------\n\n// View\n\n// ------ ------\n\n\n", "file_path": "examples/server_integration/client/src/example_c.rs", "rank": 71, "score": 287530.0375649915 }, { "content": "pub fn view<Ms>(model: &Model, ctx: &Context) -> Node<Ms> {\n\n page::report::view(&model.report_page, ctx)\n\n}\n", "file_path": "examples/pages/src/page/admin.rs", "rank": 72, "score": 287358.5517681857 }, { "content": "fn init(url: Url, orders: &mut impl Orders<Msg>) -> Model {\n\n let base_url = url.to_base_url();\n\n orders\n\n .subscribe(Msg::UrlChanged)\n\n .notify(subs::UrlChanged(url));\n\n\n\n Model {\n\n ctx: Context {\n\n logged_user: \"John Doe\",\n\n },\n\n base_url,\n\n page_id: None,\n\n admin_model: None,\n\n }\n\n}\n\n\n\n// ------ ------\n\n// Model\n\n// ------ ------\n\n\n", "file_path": "examples/pages_keep_state/src/lib.rs", "rank": 73, "score": 286289.99141245044 }, { "content": "fn guard(model: &Model) -> Option<bool> {\n\n // could check local storage, cookie or what ever you want\n\n if model.logged_user.is_some() {\n\n Some(true)\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "examples/custom_router/src/lib.rs", "rank": 74, "score": 285989.7869964201 }, { "content": "/// Identify the default route and catch error if none or too many\n\npub fn get_default_route(variants: Iter<'_, Variant>) -> Result<Variant> {\n\n let mut i = 0;\n\n let mut default_variant: Option<Variant> = None;\n\n for v in variants {\n\n let default = variant_default_route(v.ident.clone(), v.attrs.iter());\n\n if default {\n\n i += 1;\n\n default_variant = Some(v.clone());\n\n }\n\n }\n\n if i == 0 {\n\n abort!(Diagnostic::new(\n\n Level::Error,\n\n \"You need at least one default route with the attribute #[default_route].\".into()\n\n ));\n\n } else if i > 1 {\n\n abort!(Diagnostic::new(\n\n Level::Error,\n\n \"You cannot have multiple default routes.\".into()\n\n ));\n\n } else {\n\n Ok(default_variant.unwrap())\n\n }\n\n}\n\n\n", "file_path": "examples/custom_router/router_macro_derive/src/root.rs", "rank": 75, "score": 282777.8352341542 }, { "content": "fn admin_guard(model: &Model) -> Option<bool> {\n\n // could check local storage, cookie or what ever you want\n\n if let Some(user) = &model.logged_user {\n\n match user.role {\n\n Role::StandardUser => Some(false),\n\n Role::Admin => Some(true),\n\n }\n\n } else {\n\n None\n\n }\n\n}\n\n\n", "file_path": "examples/custom_router/src/lib.rs", "rank": 76, "score": 282665.34735980385 }, { "content": "pub fn commit(positions: View<Position>, mut instance_positions: UniqueViewMut<InstancePositions>) {\n\n let instance_positions = &mut instance_positions.0[..];\n\n\n\n (&positions).iter().enumerate().for_each(|(index, pos)| {\n\n //Set the instance data from bunny positions\n\n let instance_idx = index * 2;\n\n instance_positions[instance_idx] = pos.0.x as f32;\n\n instance_positions[instance_idx + 1] = pos.0.y as f32;\n\n });\n\n}\n\n\n", "file_path": "examples/bunnies/src/systems.rs", "rank": 77, "score": 282236.812617834 }, { "content": "#[allow(clippy::single_match_else)]\n\npub fn view<Ms>(model: &Model, ctx: &Context) -> Node<Ms> {\n\n match model.page_id {\n\n Some(PageId::Report) => {\n\n page::report::view(model.report_model.as_ref().expect(\"report model\"), ctx)\n\n }\n\n None => div![\"404\"],\n\n }\n\n}\n", "file_path": "examples/pages_keep_state/src/page/admin.rs", "rank": 78, "score": 281757.160538987 }, { "content": "pub fn view<Ms>(model: &Model, ctx: &Context) -> Node<Ms> {\n\n let (frequency, link) = match &model.frequency {\n\n Frequency::Daily => (\n\n \"daily\",\n\n a![\n\n \"Switch to weekly\",\n\n attrs! {\n\n At::Href => Urls::new(&model.base_url).weekly()\n\n }\n\n ],\n\n ),\n\n Frequency::Weekly => (\n\n \"weekly\",\n\n a![\n\n \"Switch to daily\",\n\n attrs! {\n\n At::Href => Urls::new(&model.base_url).daily()\n\n }\n\n ],\n\n ),\n\n };\n\n div![\n\n format!(\n\n \"Hello {}! This is your {} report.\",\n\n ctx.logged_user, frequency\n\n ),\n\n link,\n\n ]\n\n}\n", "file_path": "examples/pages/src/page/admin/page/report.rs", "rank": 79, "score": 281757.160538987 }, { "content": "fn view(model: &Model) -> impl IntoNodes<Msg> {\n\n div![\n\n id![\"content\"],\n\n h1![id![\"title\"], \"Element key example\"],\n\n card_table(model),\n\n control_buttons(),\n\n options(model),\n\n readme(&model.readme),\n\n ]\n\n}\n\n\n", "file_path": "examples/el_key/src/lib.rs", "rank": 80, "score": 278499.34316394845 }, { "content": "// ------ ------\n\n// View\n\n// ------ ------\n\n/// View function which renders stuff to html\n\nfn view(model: &Model) -> impl IntoNodes<Msg> {\n\n vec![\n\n header(&model),\n\n if let Some(route) = &model.router.current_route {\n\n route.view(model)\n\n } else {\n\n home(&model.theme)\n\n },\n\n ]\n\n}\n\n\n\n// /// Auto generated by proc macro attribute and called inside view\n\n// impl Guarded<Routes, Model, Msg> for Routes {\n\n// fn check_before_load(&self, scoped_state: &Model) -> Option<bool> {\n\n// if scoped_logged_user.is_some() {\n\n// // this party will be a function which the user has full control on, could be use for user permission as well\n\n// Some(true)\n\n// } else {\n\n// None\n\n// }\n\n// }\n\n// }\n\n\n", "file_path": "examples/custom_router/src/lib.rs", "rank": 81, "score": 277775.9999566061 }, { "content": "/// Checks whether the old element can be updated with a new one.\n\npub fn el_can_be_patched<Ms>(el_old: &El<Ms>, el_new: &El<Ms>) -> bool {\n\n el_old.namespace == el_new.namespace && el_old.tag == el_new.tag && el_old.key == el_new.key\n\n}\n\n\n", "file_path": "src/virtual_dom/patch/patch_gen.rs", "rank": 82, "score": 276944.79567149206 }, { "content": "pub fn view<Ms>(model: &Model, ctx: &Context) -> Node<Ms> {\n\n let (frequency, link) = match &model.frequency {\n\n Frequency::Daily => (\n\n \"daily\",\n\n a![\n\n \"Switch to weekly\",\n\n attrs! {\n\n At::Href => Urls::new(&model.base_url).weekly()\n\n }\n\n ],\n\n ),\n\n Frequency::Weekly => (\n\n \"weekly\",\n\n a![\n\n \"Switch to daily\",\n\n attrs! {\n\n At::Href => Urls::new(&model.base_url).daily()\n\n }\n\n ],\n\n ),\n\n };\n\n div![\n\n format!(\n\n \"Hello {}! This is your {} report.\",\n\n ctx.logged_user, frequency\n\n ),\n\n link,\n\n ]\n\n}\n", "file_path": "examples/pages_keep_state/src/page/admin/page/report.rs", "rank": 83, "score": 276542.2635251916 }, { "content": "fn init(_: Url, _: &mut impl Orders<Msg>) -> Model {\n\n Model::default()\n\n}\n\n\n\n// ------ ------\n\n// Model\n\n// ------ ------\n\n\n", "file_path": "examples/counter/src/lib.rs", "rank": 84, "score": 275951.1033198447 }, { "content": "fn init(_: Url, _: &mut impl Orders<Msg>) -> Model {\n\n Model {\n\n i18n: I18n::new(DEFAULT_LANG),\n\n }\n\n}\n\n\n\n// ------ ------\n\n// Model\n\n// ------ ------\n\n\n\npub struct Model {\n\n i18n: I18n,\n\n}\n\n\n\n// ------ ------\n\n// Update\n\n// ------ ------\n\n\n", "file_path": "examples/i18n/src/lib.rs", "rank": 85, "score": 275951.10331984464 }, { "content": "fn init(_: Url, _: &mut impl Orders<Msg>) -> Model {\n\n Model::default()\n\n}\n\n\n\n// ------ ------\n\n// Model\n\n// ------ ------\n\n\n", "file_path": "examples/fetch/src/lib.rs", "rank": 86, "score": 275951.1033198447 }, { "content": "fn init(_: Url, _: &mut impl Orders<Msg>) -> Model {\n\n Model::default()\n\n}\n\n\n\n// ------ ------\n\n// Model\n\n// ------ ------\n\n\n", "file_path": "examples/tests/src/lib.rs", "rank": 87, "score": 275951.1033198447 }, { "content": "fn init(_: Url, _: &mut impl Orders<Msg>) -> Model {\n\n Model {\n\n counter: 0,\n\n redraw_text_field: true,\n\n }\n\n}\n\n\n\n// ------ ------\n\n// Model\n\n// ------ ------\n\n\n", "file_path": "examples/no_change/src/lib.rs", "rank": 88, "score": 275951.10331984464 }, { "content": "fn init(_: Url, _: &mut impl Orders<Msg>) -> Model {\n\n Model\n\n}\n\n\n\n// ----- ------\n\n// Model\n\n// ----- -----\n\n\n", "file_path": "examples/markdown/src/lib.rs", "rank": 89, "score": 275951.1033198447 }, { "content": "pub fn init(orders: &mut impl Orders<Msg>) -> Model {\n\n Model {\n\n value: 0,\n\n _sub_handle: orders.subscribe_with_handle(|_: DoReset| Msg::Reset),\n\n }\n\n}\n\n\n\n// ------ ------\n\n// Model\n\n// ------ ------\n\n\n\npub struct Model {\n\n value: i32,\n\n _sub_handle: SubHandle,\n\n}\n\n\n\n// ------ ------\n\n// Update\n\n// ------ ------\n\n\n\npub enum Msg {\n\n Increment,\n\n Decrement,\n\n Reset,\n\n}\n\n\n", "file_path": "examples/subscribe/src/counter.rs", "rank": 90, "score": 275567.87750886637 }, { "content": "fn view(model: &Model) -> impl IntoNodes<Msg> {\n\n vec![\n\n header(&model.base_url),\n\n match &model.page {\n\n Page::Home => div![\"Welcome home!\"],\n\n Page::Admin(admin_model) => page::admin::view(admin_model, &model.ctx),\n\n Page::NotFound => div![\"404\"],\n\n },\n\n ]\n\n}\n\n\n", "file_path": "examples/pages_hash_routing/src/lib.rs", "rank": 91, "score": 275241.32936777436 }, { "content": "fn init(_: Url, _: &mut impl Orders<Msg>) -> Model {\n\n Model::default()\n\n}\n\n\n\n// ------ ------\n\n// Model\n\n// ------ ------\n\n\n", "file_path": "examples/window_events/src/lib.rs", "rank": 92, "score": 272612.2481299513 }, { "content": "fn init(_: Url, _: &mut impl Orders<Msg>) -> Model {\n\n Model::default()\n\n}\n\n\n\n// ------ ------\n\n// Model\n\n// ------ ------\n\n\n", "file_path": "examples/update_from_js/src/lib.rs", "rank": 93, "score": 272612.2481299513 }, { "content": "fn init(_: Url, orders: &mut impl Orders<Msg>) -> Model {\n\n Model {\n\n sent_messages_count: 0,\n\n messages: Vec::new(),\n\n input_text: String::new(),\n\n input_binary: String::new(),\n\n web_socket: create_websocket(orders),\n\n web_socket_reconnector: None,\n\n }\n\n}\n\n\n\n// ------ ------\n\n// Update\n\n// ------ ------\n\n\n\npub enum Msg {\n\n WebSocketOpened,\n\n TextMessageReceived(shared::ServerMessage),\n\n BinaryMessageReceived(shared::ServerMessage),\n\n CloseWebSocket,\n\n WebSocketClosed(CloseEvent),\n\n WebSocketFailed,\n\n ReconnectWebSocket(usize),\n\n InputTextChanged(String),\n\n InputBinaryChanged(String),\n\n SendMessage(shared::ClientMessage),\n\n SendBinaryMessage(shared::ClientMessage),\n\n}\n\n\n", "file_path": "examples/websocket/src/client.rs", "rank": 94, "score": 272612.2481299513 }, { "content": "fn init(_: Url, _: &mut impl Orders<Msg>) -> Model {\n\n Model::default()\n\n}\n\n\n\n// ------ ------\n\n// Model\n\n// ------ ------\n\n\n", "file_path": "examples/component_builder/src/lib.rs", "rank": 95, "score": 272612.2481299513 }, { "content": "fn init(_: Url, _: &mut impl Orders<Msg>) -> Model {\n\n Model::default()\n\n}\n\n\n\n// ------ ------\n\n// Model\n\n// ------ ------\n\n\n", "file_path": "examples/tea_component/src/lib.rs", "rank": 96, "score": 272612.2481299513 }, { "content": "fn init(_: Url, orders: &mut impl Orders<Msg>) -> Model {\n\n orders.perform_cmd(async {\n\n Msg::ContinentsFetched(\n\n send_graphql_request(&QContinents::build_query(q_continents::Variables)).await,\n\n )\n\n });\n\n Model::default()\n\n}\n\n\n\n// ------ ------\n\n// Model\n\n// ------ ------\n\n\n", "file_path": "examples/graphql/src/lib.rs", "rank": 97, "score": 272612.2481299513 }, { "content": "fn init(_: Url, _: &mut impl Orders<Msg>) -> Model {\n\n Model {\n\n drop_zone_active: false,\n\n drop_zone_content: vec![div![\"Drop files here\"]],\n\n file_texts: Vec::new(),\n\n }\n\n}\n\n\n\n// ------ ------\n\n// Model\n\n// ------ ------\n\n\n", "file_path": "examples/drop_zone/src/lib.rs", "rank": 98, "score": 272612.2481299513 }, { "content": "fn init(_: Url, orders: &mut impl Orders<Msg>) -> Model {\n\n orders.after_next_render(|_| Msg::Rendered);\n\n Model::default()\n\n}\n\n\n\n// ------ ------\n\n// Model\n\n// ------ ------\n\n\n", "file_path": "examples/canvas/src/lib.rs", "rank": 99, "score": 272612.2481299513 } ]
Rust
src/ui/app.rs
xfbs/afp
0bd950504f24e2c762029b83f1c5142a6973664c
extern crate gio; extern crate gtk; use crate::ui::*; use gio::prelude::*; use gtk::prelude::*; use std::cell::RefCell; use std::env; use std::rc::Rc; const STYLE: &'static str = include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/data/style.css")); #[derive(Clone)] pub struct App { app: gtk::Application, window: Rc<RefCell<Option<gtk::ApplicationWindow>>>, main: MainController, } impl App { pub fn new(name: &str) -> App { App { app: gtk::Application::new(name, gio::ApplicationFlags::FLAGS_NONE) .expect("application startup failed"), window: Rc::new(RefCell::new(None)), main: MainController::new(), } } fn startup(&self) { self.setup_accels(); self.load_css(); self.main.startup(); } fn setup_accels(&self) { self.app.set_accels_for_action("app.quit", &["<Primary>Q"]); } fn load_css(&self) { let provider = gtk::CssProvider::new(); provider .load_from_data(STYLE.as_bytes()) .expect("Failed to load CSS"); gtk::StyleContext::add_provider_for_screen( &gdk::Screen::get_default().expect("Error initializing gtk css provider."), &provider, gtk::STYLE_PROVIDER_PRIORITY_APPLICATION, ); } fn shutdown(&self) { self.main.shutdown(); } fn activate(&self) { let window = gtk::ApplicationWindow::new(&self.app); *self.window.borrow_mut() = Some(window.clone()); self.main.activate(); self.setup_menu(); self.setup_actions(); self.main.add_window(&window); } fn setup_menu(&self) { let menu = gio::Menu::new(); let menu_bar = gio::Menu::new(); menu.append("About", "app.about"); menu.append("Quit", "app.quit"); self.app.set_app_menu(&menu); self.app.set_menubar(&menu_bar); } fn setup_actions(&self) { let quit = gio::SimpleAction::new("quit", None); let app = self.clone(); quit.connect_activate(move |_, _| { if let Some(window) = app.window.borrow().clone() { window.destroy(); } }); let about = gio::SimpleAction::new("about", None); let app = self.clone(); about.connect_activate(move |_, _| { let dialog = gtk::AboutDialog::new(); dialog.set_authors(&[env!("CARGO_PKG_AUTHORS")]); dialog.set_website_label(Some("Webseite")); dialog.set_website(Some(env!("CARGO_PKG_REPOSITORY"))); dialog.set_license_type(gtk::License::MitX11); dialog.set_program_name("Amateurfunkprüfer"); dialog.set_version(env!("CARGO_PKG_VERSION")); dialog.set_comments(env!("CARGO_PKG_DESCRIPTION")); dialog.set_title("Über Amateurfunkprüfer"); if let Some(window) = app.window.borrow().as_ref() { dialog.set_transient_for(Some(window)); } dialog.run(); dialog.destroy(); }); self.app.add_action(&quit); self.app.add_action(&about); } pub fn init(&self) { let app = self.clone(); self.app.connect_startup(move |_| app.startup()); let app = self.clone(); self.app.connect_shutdown(move |_| { app.shutdown(); }); let app = self.clone(); self.app.connect_activate(move |_| app.activate()); } pub fn run(&self) { self.app.run(&env::args().collect::<Vec<_>>()); } }
extern crate gio; extern crate gtk; use crate::ui::*; use gio::prelude::*; use gtk::prelude::*; use std::cell::RefCell; use std::env; use std::rc::Rc; const STYLE: &'static str = include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/data/style.css")); #[derive(Clone)] pub struct App { app: gtk::Application, window: Rc<RefCell<Option<gtk::ApplicationWindow>>>, main: MainController, } impl App { pub fn new(name: &str) -> App { App { app: gtk::Application::new(name, gio::ApplicationFlags::FLAGS_NONE) .expect("application startup failed"), window: Rc::new(RefCell::new(None)), main: MainController::new(), } } fn startup(&self) { self.setup_accels(); self.load_css(); self.main.startup(); } fn setup_accels(&self) { self.app.set_accels_for_action("app.quit", &["<Primary>Q"]); } fn load_css(&self) { let provider = gtk::CssProvider::new(); provider .load_from_data(STYLE.as_bytes()) .expect("Failed to load CSS"); gtk::StyleContext::add_provider_for_screen( &gdk::Screen::get_default().expect("Error initializing gtk css provider."), &provider, gtk::STYLE_PROVIDER_PRIORITY_APPLICATION, ); } fn shutdown(&self) { self.main.shutdown(); } fn activate(&self) { let window = gtk::ApplicationWindow::new(&self.app); *self.window.borrow_mut() = Some(window.clone()); self.main.activate(); self.setup_menu(); self.setup_actions(); self.main.add_window(&window); } fn setup_menu(&self) { let menu = gio::Menu::new(); let menu_bar = gio::Menu::new(); menu.append("About", "app.about"); menu.append("Quit", "app.quit"); self.app.set_app_menu(&menu); self.app.set_menubar(&menu_bar); } fn setup_actions(&self) { let quit = gio::SimpleAction::new("quit", None)
if let Some(window) = app.window.borrow().as_ref() { dialog.set_transient_for(Some(window)); } dialog.run(); dialog.destroy(); }); self.app.add_action(&quit); self.app.add_action(&about); } pub fn init(&self) { let app = self.clone(); self.app.connect_startup(move |_| app.startup()); let app = self.clone(); self.app.connect_shutdown(move |_| { app.shutdown(); }); let app = self.clone(); self.app.connect_activate(move |_| app.activate()); } pub fn run(&self) { self.app.run(&env::args().collect::<Vec<_>>()); } }
; let app = self.clone(); quit.connect_activate(move |_, _| { if let Some(window) = app.window.borrow().clone() { window.destroy(); } }); let about = gio::SimpleAction::new("about", None); let app = self.clone(); about.connect_activate(move |_, _| { let dialog = gtk::AboutDialog::new(); dialog.set_authors(&[env!("CARGO_PKG_AUTHORS")]); dialog.set_website_label(Some("Webseite")); dialog.set_website(Some(env!("CARGO_PKG_REPOSITORY"))); dialog.set_license_type(gtk::License::MitX11); dialog.set_program_name("Amateurfunkprüfer"); dialog.set_version(env!("CARGO_PKG_VERSION")); dialog.set_comments(env!("CARGO_PKG_DESCRIPTION")); dialog.set_title("Über Amateurfunkprüfer");
random
[ { "content": "#[test]\n\nfn test_load_file() {\n\n let mut d = std::path::PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"));\n\n d.push(\"test/datastore.yaml\");\n\n\n\n let ds = DataStore::load(&d);\n\n assert!(ds.is_ok());\n\n let ds = ds.ok().unwrap();\n\n assert_eq!(&ds.filename, &d);\n\n}\n\n\n", "file_path": "src/datastore.rs", "rank": 0, "score": 57648.47842678863 }, { "content": "#[test]\n\nfn test_check_sections() {\n\n let mut d = std::path::PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"));\n\n d.push(\"test/datastore.yaml\");\n\n let ds = DataStore::load(&d).ok().unwrap();\n\n\n\n assert_eq!(ds.sections.len(), 4);\n\n assert_eq!(\n\n ds.section(0).unwrap().name(),\n\n \"Technische Kenntnisse der Klasse E\"\n\n );\n\n assert_eq!(\n\n ds.section(1).unwrap().name(),\n\n \"Technische Kenntnisse der Klasse A\"\n\n );\n\n assert_eq!(ds.section(2).unwrap().name(), \"Betriebliche Kenntnisse\");\n\n assert_eq!(ds.section(3).unwrap().name(), \"Kenntnisse von Vorschriften\");\n\n\n\n assert_eq!(ds.section(0).unwrap().short(), \"Technik E\");\n\n assert_eq!(ds.section(1).unwrap().short(), \"Technik A\");\n\n assert_eq!(ds.section(2).unwrap().short(), \"Betrieb\");\n\n assert_eq!(ds.section(3).unwrap().short(), \"Vorschriften\");\n\n\n\n assert_eq!(ds.section(0).unwrap().questions().len(), 4);\n\n assert_eq!(ds.section(1).unwrap().questions().len(), 0);\n\n assert_eq!(ds.section(2).unwrap().questions().len(), 0);\n\n assert_eq!(ds.section(3).unwrap().questions().len(), 0);\n\n}\n\n\n", "file_path": "src/datastore.rs", "rank": 1, "score": 34446.235553795035 }, { "content": "#[test]\n\nfn test_check_questions() {\n\n let mut d = std::path::PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"));\n\n d.push(\"test/datastore.yaml\");\n\n let ds = DataStore::load(&d).ok().unwrap();\n\n\n\n assert_eq!(ds.section(0).unwrap().questions().len(), 4);\n\n assert_eq!(ds.section(0).unwrap().questions()[0].id(), \"TA101\");\n\n assert_eq!(\n\n ds.section(0).unwrap().questions()[0].question(),\n\n \"0,042 A entspricht\"\n\n );\n\n assert_eq!(\n\n ds.section(0).unwrap().questions()[0].answers(),\n\n &vec![\"40\", \"41\", \"42\", \"43\"]\n\n .into_iter()\n\n .map(String::from)\n\n .collect() as &Vec<String>\n\n );\n\n\n\n assert_eq!(\n", "file_path": "src/datastore.rs", "rank": 2, "score": 34446.235553795035 }, { "content": "#[derive(Debug, PartialEq, Serialize, Deserialize)]\n\nstruct DataStoreHistory {\n\n time: u64,\n\n choice: usize,\n\n}\n\n\n\nimpl From<&DataStore> for DataStoreFile {\n\n fn from(datastore: &DataStore) -> DataStoreFile {\n\n DataStoreFile {\n\n sections: datastore\n\n .sections()\n\n .iter()\n\n .map(|section| section.into())\n\n .collect(),\n\n }\n\n }\n\n}\n\n\n\nimpl DataStoreFileSection {\n\n pub fn load(self) -> Result<Section, Box<Error>> {\n\n Ok(Section::new(\n", "file_path": "src/datastore/file.rs", "rank": 3, "score": 33845.655064029204 }, { "content": "#[derive(Debug, PartialEq, Serialize, Deserialize)]\n\nstruct DataStoreQuestion {\n\n id: String,\n\n question: String,\n\n answers: Vec<String>,\n\n subsection: usize,\n\n subsubsection: usize,\n\n history: Vec<DataStoreHistory>,\n\n}\n\n\n", "file_path": "src/datastore/file.rs", "rank": 4, "score": 33845.655064029204 }, { "content": "def fix(str)\n\n s = str.gsub(\"%\", \"\\\\%\")\n\n\n\n if s =~ /{|}/\n\n puts s\n\n end\n\n\n\n s\n\nend\n\n\n\nsections = CSV.new(File.read(ARGV[1])).to_a\n\n\n\nquestions.keys.sort.each do |id|\n\n question = questions[id]\n\n\n\n while sections[0] && question[:id][0...sections[0][0].size] == sections[0][0]\n\n if sections[0][0].size == 2\n\n puts \"\\\\section{#{fix(sections[0][1])}}\"\n\n else\n\n puts \"\\\\subsection{#{fix(sections[0][1])}}\"\n", "file_path": "data/process_scrap.rb", "rank": 5, "score": 33587.54851899411 }, { "content": "def handle(str)\n\n str\n\nend\n\n\n\ndatastore = {}\n\ndatastore[\"sections\"] = sections.map do |name|\n\n questions = File.read(\"#{name}_questions.tex\")\n\n section = {}\n\n section[\"name\"] = \"Technische Kenntnisse der Klasse E\"\n\n section[\"short\"] = \"Technik E\"\n\n section[\"questions\"] = []\n\n section[\"subsections\"] = []\n\n \n\n questions.split(\"\\n\\n\").each do |qsec|\n\n case qsec\n\n when /\\\\section{(.+)}/\n\n section[\"subsections\"] << {\"name\" => handle($1), \"subsubsections\" => []}\n\n when /\\\\subsection{(.+)}/\n\n section[\"subsections\"].last[\"subsubsections\"] << handle($1)\n\n when /\\\\begin{question}/\n", "file_path": "data/generate_datastore.rb", "rank": 6, "score": 33587.54851899411 }, { "content": "#[test]\n\nfn test_mut_question_state() {\n\n let mut d = std::path::PathBuf::from(env!(\"CARGO_MANIFEST_DIR\"));\n\n d.push(\"test/datastore.yaml\");\n\n let mut ds = DataStore::load(&d).ok().unwrap();\n\n\n\n let section = ds.section_mut(0).unwrap();\n\n let question = section.question_mut(0).unwrap();\n\n\n\n assert!(question.stale_time().is_none());\n\n assert_eq!(question.state(), QuestionState::Red);\n\n\n\n question.answer(1);\n\n assert!(question.stale_time().is_some());\n\n assert_eq!(question.state(), QuestionState::Red);\n\n\n\n question.answer(0);\n\n assert_eq!(question.state(), QuestionState::Yellow);\n\n question.answer(2);\n\n question.answer(1);\n\n question.answer(2);\n\n assert_eq!(question.state(), QuestionState::Red);\n\n question.answer(0);\n\n assert_eq!(question.state(), QuestionState::Yellow);\n\n question.answer(0);\n\n assert_eq!(question.state(), QuestionState::Yellow);\n\n question.answer(0);\n\n assert_eq!(question.state(), QuestionState::Green);\n\n}\n", "file_path": "src/datastore.rs", "rank": 7, "score": 33322.32632649355 }, { "content": "#[derive(Debug, PartialEq, Serialize, Deserialize)]\n\nstruct DataStoreSubSection {\n\n name: String,\n\n subsubsections: Vec<String>,\n\n}\n\n\n", "file_path": "src/datastore/file.rs", "rank": 8, "score": 32824.13590112467 }, { "content": "pub trait Controller {\n\n fn startup(&self);\n\n fn activate(&self);\n\n fn shutdown(&self);\n\n}\n", "file_path": "src/ui/controller/mod.rs", "rank": 9, "score": 32035.198690734105 }, { "content": "pub trait View {\n\n fn widget(&self) -> gtk::Widget;\n\n}\n\n\n", "file_path": "src/ui/view/mod.rs", "rank": 10, "score": 32035.198690734105 }, { "content": "pub trait Labeled {\n\n fn label(&self) -> gtk::Label;\n\n}\n", "file_path": "src/ui/view/mod.rs", "rank": 11, "score": 32035.198690734105 }, { "content": "extern crate gio;\n\nextern crate gtk;\n\n\n\nuse crate::ui::view::*;\n\nuse gtk::prelude::*;\n\n\n\n#[derive(Clone)]\n\npub struct MainView {\n\n pub area: gtk::Notebook,\n\n}\n\n\n\nimpl MainView {\n\n pub fn new() -> MainView {\n\n MainView {\n\n area: gtk::Notebook::new(),\n\n }\n\n }\n\n\n\n pub fn add_tab<T: View + Labeled>(&self, page: &T) {\n\n self.area.append_page(&page.widget(), Some(&page.label()));\n\n }\n\n}\n\n\n\nimpl View for MainView {\n\n fn widget(&self) -> gtk::Widget {\n\n self.area.clone().upcast()\n\n }\n\n}\n", "file_path": "src/ui/view/main.rs", "rank": 18, "score": 24270.645804414726 }, { "content": "use crate::ui::*;\n\nuse crate::*;\n\nuse gtk::prelude::*;\n\nuse std::cell::RefCell;\n\nuse std::rc::Rc;\n\nuse ui::view::View;\n\n\n\n#[derive(Clone)]\n\npub struct MainController {\n\n view: MainView,\n\n overview: OverviewController,\n\n sections: Rc<RefCell<Vec<SectionController>>>,\n\n data: Rc<RefCell<DataStore>>,\n\n}\n\n\n\nimpl MainController {\n\n pub fn new() -> MainController {\n\n let data = Rc::new(RefCell::new(DataStore::new()));\n\n\n\n MainController {\n", "file_path": "src/ui/controller/main.rs", "rank": 19, "score": 24264.455131872317 }, { "content": " view: MainView::new(),\n\n overview: OverviewController::new(&data),\n\n sections: Rc::new(RefCell::new(Vec::new())),\n\n data: data,\n\n }\n\n }\n\n\n\n pub fn view(&self) -> &MainView {\n\n &self.view\n\n }\n\n\n\n pub fn startup(&self) {\n\n self.overview.startup();\n\n }\n\n\n\n pub fn activate(&self) {\n\n self.load_data();\n\n self.activate_overview();\n\n self.activate_sections();\n\n }\n", "file_path": "src/ui/controller/main.rs", "rank": 20, "score": 24261.305570949928 }, { "content": "\n\n fn activate_sections(&self) {\n\n for (i, _) in self.data.borrow().sections().iter().enumerate() {\n\n self.activate_section(i);\n\n }\n\n }\n\n\n\n fn activate_section(&self, index: usize) {\n\n let section = SectionController::new(&self.data, index);\n\n section.startup();\n\n section.activate();\n\n self.view.add_tab(section.view());\n\n self.sections.borrow_mut().push(section);\n\n }\n\n\n\n pub fn add_window(&self, window: &gtk::ApplicationWindow) {\n\n window.add(&self.view.widget());\n\n window.set_default_size(500, 400);\n\n window.set_position(gtk::WindowPosition::Center);\n\n window.show_all();\n\n }\n\n}\n", "file_path": "src/ui/controller/main.rs", "rank": 21, "score": 24260.04553891089 }, { "content": "\n\n pub fn shutdown(&self) {\n\n self.overview.shutdown();\n\n self.data.borrow().save().unwrap_or_else(|error| {\n\n println!(\"{:?}\", error);\n\n panic!(\"error saving file!\")\n\n });\n\n }\n\n\n\n fn load_data(&self) {\n\n *self.data.borrow_mut() = DataStore::load(&std::path::PathBuf::from(\n\n \"/Users/pelsen/.config/afp/datastore.yml\",\n\n ))\n\n .unwrap();\n\n }\n\n\n\n fn activate_overview(&self) {\n\n self.overview.activate();\n\n self.view.add_tab(self.overview.view());\n\n }\n", "file_path": "src/ui/controller/main.rs", "rank": 22, "score": 24255.457920481473 }, { "content": "extern crate gtk;\n\n\n\nuse crate::ui::*;\n\nuse gtk::prelude::*;\n\n\n\n#[derive(Clone)]\n\npub struct SectionView {\n\n label: gtk::Label,\n\n stack: gtk::Stack,\n\n}\n\n\n\nimpl SectionView {\n\n pub fn new() -> SectionView {\n\n SectionView {\n\n label: gtk::Label::new(None),\n\n stack: gtk::Stack::new(),\n\n }\n\n }\n\n\n\n pub fn widget(&self) -> &gtk::Stack {\n", "file_path": "src/ui/view/section.rs", "rank": 23, "score": 16.016473542970882 }, { "content": "extern crate gio;\n\nextern crate gtk;\n\n\n\nuse crate::ui::view::*;\n\nuse crate::*;\n\nuse gtk::prelude::*;\n\nuse std::cell::RefCell;\n\nuse std::f64::consts::PI;\n\nuse std::rc::Rc;\n\n\n\n#[derive(Clone)]\n\npub struct OverView {\n\n body: gtk::Grid,\n\n label: gtk::Label,\n\n title: gtk::Label,\n\n section_labels: Rc<RefCell<Vec<gtk::Label>>>,\n\n section_charts: Rc<RefCell<Vec<gtk::DrawingArea>>>,\n\n}\n\n\n\nimpl OverView {\n", "file_path": "src/ui/view/overview.rs", "rank": 24, "score": 15.677178233732185 }, { "content": "extern crate gtk;\n\n\n\nuse crate::ui::*;\n\nuse gtk::prelude::*;\n\n\n\n#[derive(Clone)]\n\npub struct SectionOverView {\n\n body: gtk::Grid,\n\n title: gtk::Label,\n\n subsections: gtk::FlowBox,\n\n exam: gtk::Button,\n\n practise: gtk::Button,\n\n}\n\n\n\nimpl SectionOverView {\n\n pub fn new() -> SectionOverView {\n\n SectionOverView {\n\n body: gtk::Grid::new(),\n\n title: gtk::Label::new(None),\n\n subsections: gtk::FlowBox::new(),\n", "file_path": "src/ui/view/section_overview.rs", "rank": 25, "score": 14.556682877152834 }, { "content": "extern crate glib;\n\nextern crate gtk;\n\n\n\nuse crate::ui::*;\n\nuse gtk::prelude::*;\n\nuse std::cell::RefCell;\n\nuse std::rc::Rc;\n\n\n\n#[derive(Clone)]\n\npub struct PractiseView {\n\n body: gtk::Grid,\n\n title_box: gtk::Box,\n\n title: gtk::Label,\n\n section: gtk::Label,\n\n subsection: gtk::Label,\n\n id: gtk::Label,\n\n question: gtk::Label,\n\n answers: gtk::Grid,\n\n back: gtk::Button,\n\n answer_fn: Rc<RefCell<Option<Box<dyn Fn(&gtk::Button, usize)>>>>,\n", "file_path": "src/ui/view/practise.rs", "rank": 26, "score": 14.246961868377497 }, { "content": " }\n\n\n\n pub fn connect_exam<F: Fn() + 'static>(&self, fun: F) {\n\n self.exam.connect_clicked(move |_| {\n\n fun();\n\n });\n\n }\n\n\n\n pub fn connect_practise<F: Fn() + 'static>(&self, fun: F) {\n\n self.practise.connect_clicked(move |_| {\n\n fun();\n\n });\n\n }\n\n}\n\n\n\nimpl View for SectionOverView {\n\n fn widget(&self) -> gtk::Widget {\n\n self.body.clone().upcast()\n\n }\n\n}\n", "file_path": "src/ui/view/section_overview.rs", "rank": 27, "score": 11.579306927680374 }, { "content": " }\n\n\n\n pub fn widget(&self) -> &gtk::Grid {\n\n &self.body\n\n }\n\n\n\n /// Connect a closure to the back button.\n\n pub fn connect_back<F: Fn(&gtk::Button) + 'static>(&self, f: F) {\n\n self.back.connect_clicked(f);\n\n }\n\n\n\n /// Connect a closure to when a choice is made. The argument is the numeric\n\n /// index of the choice, with 0 being the first (and correct) one always.\n\n pub fn connect_choose<F: Fn(&gtk::Button, usize) + 'static>(&self, f: F) {\n\n *self.answer_fn.borrow_mut() = Some(Box::new(f));\n\n }\n\n}\n\n\n\nimpl View for PractiseView {\n\n fn widget(&self) -> gtk::Widget {\n\n self.body.clone().upcast()\n\n }\n\n}\n", "file_path": "src/ui/view/practise.rs", "rank": 28, "score": 11.249290101275395 }, { "content": "extern crate permutation;\n\nextern crate rand;\n\n\n\nuse crate::ui::*;\n\nuse crate::*;\n\nuse gtk::prelude::*;\n\nuse permutation::Permutation;\n\nuse rand::seq::SliceRandom;\n\nuse std::cell::{Cell, RefCell};\n\nuse std::rc::Rc;\n\n\n\n#[derive(Clone)]\n\npub struct PractiseController {\n\n section: usize,\n\n question: Rc<Cell<usize>>,\n\n permutation: Rc<RefCell<Permutation>>,\n\n view: PractiseView,\n\n data: Rc<RefCell<DataStore>>,\n\n filter: Rc<Cell<QuestionFilter>>,\n\n}\n", "file_path": "src/ui/controller/practise.rs", "rank": 29, "score": 10.922825758661794 }, { "content": "use crate::ui::*;\n\nuse crate::*;\n\nuse std::cell::RefCell;\n\nuse std::rc::Rc;\n\n\n\n#[derive(Clone)]\n\npub struct OverviewController {\n\n view: OverView,\n\n data: Rc<RefCell<DataStore>>,\n\n}\n\n\n\nimpl OverviewController {\n\n pub fn new(data: &Rc<RefCell<DataStore>>) -> OverviewController {\n\n OverviewController {\n\n view: OverView::new(),\n\n data: data.clone(),\n\n }\n\n }\n\n\n\n pub fn startup(&self) {}\n", "file_path": "src/ui/controller/overview.rs", "rank": 30, "score": 10.787681109542874 }, { "content": "}\n\n\n\nimpl PractiseView {\n\n pub fn new() -> PractiseView {\n\n PractiseView {\n\n body: gtk::Grid::new(),\n\n title_box: gtk::Box::new(gtk::Orientation::Horizontal, 10),\n\n title: gtk::Label::new(None),\n\n section: gtk::Label::new(None),\n\n subsection: gtk::Label::new(None),\n\n id: gtk::Label::new(None),\n\n answers: gtk::Grid::new(),\n\n question: gtk::Label::new(None),\n\n back: gtk::Button::new_from_icon_name(\"go-previous\", gtk::IconSize::Button),\n\n answer_fn: Rc::new(RefCell::new(None)),\n\n }\n\n }\n\n\n\n pub fn setup(&self) {\n\n self.title.set_text(\"Übung\");\n", "file_path": "src/ui/view/practise.rs", "rank": 31, "score": 10.581343014015234 }, { "content": " }\n\n\n\n pub fn set_subsection(&self, text: &str) {\n\n self.subsection.set_text(text);\n\n }\n\n\n\n pub fn set_id(&self, text: &str) {\n\n self.id.set_text(text);\n\n }\n\n\n\n pub fn set_question(&self, text: &str) {\n\n self.question.set_text(text);\n\n }\n\n\n\n pub fn add_answer(&self, row: usize) {\n\n if self.answers.get_child_at(0, row as i32).is_none() {\n\n let button = gtk::Button::new();\n\n let me = self.clone();\n\n button.connect_clicked(move |button| {\n\n let answer_fn = me.answer_fn.borrow();\n", "file_path": "src/ui/view/practise.rs", "rank": 32, "score": 10.523028387731976 }, { "content": "//! # DataStore Module\n\n//!\n\n//! Data structures and methods for keeping track of questions and recording\n\n//! interactions.\n\n\n\nextern crate rand;\n\n\n\nmod file;\n\n\n\nuse rand::seq::SliceRandom;\n\nuse std::error::Error;\n\nuse std::fs::File;\n\nuse std::fs::OpenOptions;\n\nuse std::io::{BufReader, BufWriter};\n\nuse std::path::Path;\n\nuse std::path::PathBuf;\n\nuse std::time::{Duration, SystemTime};\n\n\n\npub const DEFAULT_DATASTORE: &'static str =\n\n include_str!(concat!(env!(\"CARGO_MANIFEST_DIR\"), \"/data/datastore.yml\"));\n", "file_path": "src/datastore.rs", "rank": 33, "score": 10.354489849444658 }, { "content": " me.show_main();\n\n }\n\n }\n\n },\n\n None => panic!(),\n\n }\n\n });\n\n */\n\n\n\n pub fn set_label(&self, label: &str) {\n\n self.label.set_text(label);\n\n }\n\n\n\n pub fn add_named<T: View>(&self, page: &T, name: &str) {\n\n self.stack.add_named(&page.widget(), name);\n\n }\n\n\n\n pub fn show(&self, child: &str, transition: gtk::StackTransitionType) {\n\n self.stack.set_visible_child_full(child, transition);\n\n }\n", "file_path": "src/ui/view/section.rs", "rank": 34, "score": 10.26574716496561 }, { "content": " style.remove_class(class);\n\n } else {\n\n panic!();\n\n }\n\n }\n\n\n\n pub fn get_button(&self, index: usize) -> Option<gtk::Widget> {\n\n match self.subsections.get_child_at_index(index as i32) {\n\n Some(child) => child.get_child(),\n\n None => None,\n\n }\n\n }\n\n\n\n pub fn add_button(&self, label: &str) -> gtk::Button {\n\n let button = gtk::Button::new();\n\n button.set_label(label);\n\n button.set_hexpand(false);\n\n button.show();\n\n self.subsections.add(&button);\n\n button\n", "file_path": "src/ui/view/section_overview.rs", "rank": 35, "score": 10.074971637130993 }, { "content": "//! Data structures and methods for loading and storing a DataStore\n\n//! to and from a file.\n\n\n\nextern crate serde;\n\nextern crate serde_yaml;\n\n\n\nuse serde::{Deserialize, Serialize};\n\nuse std::error::Error;\n\nuse std::time::{Duration, SystemTime};\n\n\n\nuse crate::datastore::*;\n\n\n\n#[derive(Debug, PartialEq, Serialize, Deserialize)]\n\npub struct DataStoreFile {\n\n pub sections: Vec<DataStoreFileSection>,\n\n}\n\n\n\n#[derive(Debug, PartialEq, Serialize, Deserialize)]\n\npub struct DataStoreFileSection {\n\n name: String,\n\n short: String,\n\n questions: Vec<DataStoreQuestion>,\n\n subsections: Vec<DataStoreSubSection>,\n\n}\n\n\n\n#[derive(Debug, PartialEq, Serialize, Deserialize)]\n", "file_path": "src/datastore/file.rs", "rank": 36, "score": 9.936712804449954 }, { "content": "mod app;\n\nmod controller;\n\nmod view;\n\n\n\npub use app::*;\n\npub use controller::*;\n\npub use view::*;\n", "file_path": "src/ui/mod.rs", "rank": 37, "score": 9.808615873449227 }, { "content": "use crate::ui::*;\n\nuse crate::*;\n\nuse gtk::prelude::*;\n\nuse std::cell::RefCell;\n\nuse std::rc::Rc;\n\n\n\n#[derive(Clone)]\n\npub struct SectionOverviewController {\n\n index: usize,\n\n view: SectionOverView,\n\n data: Rc<RefCell<DataStore>>,\n\n}\n\n\n\nimpl SectionOverviewController {\n\n pub fn new(data: &Rc<RefCell<DataStore>>, index: usize) -> SectionOverviewController {\n\n SectionOverviewController {\n\n index: index,\n\n view: SectionOverView::new(),\n\n data: data.clone(),\n\n }\n", "file_path": "src/ui/controller/section_overview.rs", "rank": 38, "score": 9.572496984758338 }, { "content": " if let Some(ref fun) = *answer_fn {\n\n fun(button, row);\n\n }\n\n });\n\n self.answers.attach(&button, 0, row as i32, 1, 1);\n\n let label = gtk::Label::new(None);\n\n self.answers.attach(&label, 1, row as i32, 4, 1);\n\n }\n\n }\n\n\n\n pub fn set_answer(&self, row: usize, btn: &str, text: &str) {\n\n self.add_answer(row);\n\n\n\n if let Some(button) = self.get_answer_button(row) {\n\n button.show();\n\n button.get_style_context().remove_class(\"red\");\n\n button.set_label(btn);\n\n button.set_vexpand(false);\n\n }\n\n\n", "file_path": "src/ui/view/practise.rs", "rank": 39, "score": 9.525249949668225 }, { "content": "mod main;\n\nmod overview;\n\nmod practise;\n\nmod section;\n\nmod section_overview;\n\n\n\npub use main::*;\n\npub use overview::*;\n\npub use practise::*;\n\npub use section::*;\n\npub use section_overview::*;\n\n\n", "file_path": "src/ui/controller/mod.rs", "rank": 40, "score": 9.51921222217543 }, { "content": "mod main;\n\nmod overview;\n\nmod practise;\n\nmod section;\n\nmod section_overview;\n\n\n\npub use main::*;\n\npub use overview::*;\n\npub use practise::*;\n\npub use section::*;\n\npub use section_overview::*;\n\n\n", "file_path": "src/ui/view/mod.rs", "rank": 41, "score": 9.51921222217543 }, { "content": "//! # AfP\n\n//!\n\n//! This crate is a small app for practising ham radio exam questions. It exposes\n\n//! a database of questions along with facilities to record interactions (answering\n\n//! questions) in DataStore.\n\n//!\n\n//! It also exposes some modules in ui that are used to draw a GUI to show and\n\n//! interact with the questions.\n\n\n\nmod datastore;\n\npub use datastore::*;\n\n\n\npub mod ui;\n", "file_path": "src/lib.rs", "rank": 42, "score": 8.351152712390249 }, { "content": " self.exam.set_label(\"Prüfung\");\n\n self.practise.set_label(\"Üben\");\n\n }\n\n\n\n pub fn set_title(&self, title: &str) {\n\n self.title.set_text(title);\n\n }\n\n\n\n pub fn button_add_class(&self, index: usize, class: &str) {\n\n if let Some(button) = self.get_button(index) {\n\n let style = button.get_style_context();\n\n style.add_class(class);\n\n } else {\n\n panic!();\n\n }\n\n }\n\n\n\n pub fn button_remove_class(&self, index: usize, class: &str) {\n\n if let Some(button) = self.get_button(index) {\n\n let style = button.get_style_context();\n", "file_path": "src/ui/view/section_overview.rs", "rank": 43, "score": 8.255045243826947 }, { "content": "use crate::ui::*;\n\nuse crate::*;\n\nuse std::cell::Cell;\n\nuse std::cell::RefCell;\n\nuse std::rc::Rc;\n\n\n\n#[derive(Clone)]\n\npub struct SectionController {\n\n index: usize,\n\n view: SectionView,\n\n overview: SectionOverviewController,\n\n practise: PractiseController,\n\n data: Rc<RefCell<DataStore>>,\n\n filter: Rc<Cell<QuestionFilter>>,\n\n}\n\n\n\nimpl SectionController {\n\n pub fn new(data: &Rc<RefCell<DataStore>>, index: usize) -> SectionController {\n\n SectionController {\n\n index: index,\n", "file_path": "src/ui/controller/section.rs", "rank": 44, "score": 8.14559001727528 }, { "content": " None => None,\n\n }\n\n }\n\n\n\n pub fn filter(&self) -> QuestionFilter {\n\n match (self.subsection, self.subsubsection) {\n\n (0, 0) => QuestionFilter::All,\n\n (ss, 0) => QuestionFilter::SubSection(ss - 1),\n\n (ss, sss) => QuestionFilter::SubSubSection(ss - 1, sss - 1),\n\n }\n\n }\n\n\n\n pub fn history(&self) -> &Vec<History> {\n\n &self.history\n\n }\n\n}\n\n\n\nimpl History {\n\n pub fn new(time: SystemTime, choice: usize) -> History {\n\n History {\n", "file_path": "src/datastore.rs", "rank": 45, "score": 6.547300266429319 }, { "content": " self.subsection,\n\n self.subsubsection,\n\n self.history\n\n .into_iter()\n\n .map(|s| s.load().unwrap())\n\n .collect(),\n\n ))\n\n }\n\n}\n\n\n\nimpl DataStoreHistory {\n\n pub fn load(self) -> Result<History, Box<Error>> {\n\n Ok(History::new(\n\n SystemTime::UNIX_EPOCH + Duration::from_secs(self.time),\n\n self.choice,\n\n ))\n\n }\n\n}\n", "file_path": "src/datastore/file.rs", "rank": 46, "score": 6.48664461859652 }, { "content": " controller.show_practise();\n\n });\n\n }\n\n\n\n fn activate_views(&self) {\n\n self.view.add_named(self.overview.view(), \"main\");\n\n self.view.add_named(self.practise.view(), \"practise\");\n\n }\n\n\n\n fn activate_practise_buttons(&self) {\n\n let controller = self.clone();\n\n self.practise.connect_back(move || {\n\n controller.show_overview();\n\n });\n\n }\n\n}\n\n\n\nimpl Controller for SectionController {\n\n fn startup(&self) {\n\n self.overview.startup();\n", "file_path": "src/ui/controller/section.rs", "rank": 47, "score": 6.338797916714524 }, { "content": " }\n\n}\n\n\n\nimpl Section {\n\n pub fn new(\n\n name: String,\n\n short: String,\n\n questions: Vec<Question>,\n\n subsections: Vec<SubSection>,\n\n ) -> Self {\n\n Section {\n\n name: name,\n\n short: short,\n\n questions: questions,\n\n subsections: subsections,\n\n }\n\n }\n\n\n\n pub fn name(&self) -> &str {\n\n &self.name\n", "file_path": "src/datastore.rs", "rank": 48, "score": 6.324526293893077 }, { "content": " &self.stack\n\n }\n\n\n\n pub fn label(&self) -> &gtk::Label {\n\n &self.label\n\n }\n\n\n\n /*\n\n self.overview.init();\n\n self.stack.add_named(&self.overview.widget(), \"main\");\n\n\n\n self.question.init(ds);\n\n self.stack.add_named(self.question.widget(), \"question\");\n\n\n\n // connect the back button of the question.\n\n let me = self.clone();\n\n self.question.connect_back(move |_| {\n\n me.show_main();\n\n });\n\n\n", "file_path": "src/ui/view/section.rs", "rank": 49, "score": 6.0905898566153205 }, { "content": " .subsubsections()\n\n .iter()\n\n .map(|subsubsection| subsubsection.into())\n\n .collect(),\n\n }\n\n }\n\n}\n\n\n\nimpl From<&SubSubSection> for String {\n\n fn from(subsubsection: &SubSubSection) -> String {\n\n subsubsection.name().into()\n\n }\n\n}\n\n\n\nimpl DataStoreQuestion {\n\n pub fn load(self) -> Result<Question, Box<Error>> {\n\n Ok(Question::new(\n\n self.id,\n\n self.question,\n\n self.answers,\n", "file_path": "src/datastore/file.rs", "rank": 50, "score": 6.076302926545318 }, { "content": "}\n\n\n\nimpl View for SectionView {\n\n fn widget(&self) -> gtk::Widget {\n\n self.stack.clone().upcast()\n\n }\n\n}\n\n\n\nimpl Labeled for SectionView {\n\n fn label(&self) -> gtk::Label {\n\n self.label.clone()\n\n }\n\n}\n", "file_path": "src/ui/view/section.rs", "rank": 51, "score": 5.973836525922584 }, { "content": "\n\n /// Retrieves a subsubsection.\n\n pub fn subsubsection(&self, n: usize) -> Option<&SubSubSection> {\n\n if n == 0 {\n\n None\n\n } else {\n\n self.subsubsections.get(n - 1)\n\n }\n\n }\n\n\n\n /// Gets a list of all subsubsections in this subsection.\n\n pub fn subsubsections(&self) -> &Vec<SubSubSection> {\n\n &self.subsubsections\n\n }\n\n}\n\n\n\nimpl SubSubSection {\n\n pub fn new(name: String) -> SubSubSection {\n\n SubSubSection { name: name }\n\n }\n", "file_path": "src/datastore.rs", "rank": 52, "score": 5.9572449270702705 }, { "content": "\n\n /// Get the name of this subsubsection.\n\n pub fn name(&self) -> &str {\n\n &self.name\n\n }\n\n}\n\n\n\n/// Represents a single question.\n\nimpl Question {\n\n pub fn new(\n\n id: String,\n\n question: String,\n\n answers: Vec<String>,\n\n subsection: usize,\n\n subsubsection: usize,\n\n history: Vec<History>,\n\n ) -> Question {\n\n Question {\n\n id: id,\n\n question: question,\n", "file_path": "src/datastore.rs", "rank": 53, "score": 5.9525723455388455 }, { "content": " pub fn new() -> OverView {\n\n let body = gtk::Grid::new();\n\n let label = gtk::Label::new(\"Übersicht\");\n\n let title = gtk::Label::new(None);\n\n\n\n OverView {\n\n body: body,\n\n label: label,\n\n title: title,\n\n section_labels: Rc::new(RefCell::new(Vec::new())),\n\n section_charts: Rc::new(RefCell::new(Vec::new())),\n\n }\n\n }\n\n\n\n pub fn init(&self, datastore: &DataStore) {\n\n // this method may be called multiple times, so here we clean\n\n // out the trash\n\n self.body.foreach(|widget| {\n\n self.body.remove(widget);\n\n });\n", "file_path": "src/ui/view/overview.rs", "rank": 54, "score": 5.726907511962244 }, { "content": " cairo.set_source_rgba(0.80, 0.20, 0.20, 0.8);\n\n cairo.stroke();\n\n }\n\n\n\n Inhibit(false)\n\n });\n\n }\n\n }\n\n}\n\n\n\nimpl View for OverView {\n\n fn widget(&self) -> gtk::Widget {\n\n self.body.clone().upcast()\n\n }\n\n}\n\n\n\nimpl Labeled for OverView {\n\n fn label(&self) -> gtk::Label {\n\n self.label.clone()\n\n }\n\n}\n", "file_path": "src/ui/view/overview.rs", "rank": 55, "score": 5.611568981863492 }, { "content": "\n\nimpl Controller for PractiseController {\n\n fn startup(&self) {\n\n self.view.setup();\n\n }\n\n\n\n fn activate(&self) {\n\n self.activate_choose();\n\n }\n\n\n\n fn shutdown(&self) {}\n\n}\n", "file_path": "src/ui/controller/practise.rs", "rank": 56, "score": 5.4234486617132305 }, { "content": " }\n\n}\n\n\n\nimpl DataStoreSubSection {\n\n pub fn load(self) -> Result<SubSection, Box<Error>> {\n\n Ok(SubSection::new(\n\n self.name,\n\n self.subsubsections\n\n .into_iter()\n\n .map(|s| SubSubSection::new(s))\n\n .collect(),\n\n ))\n\n }\n\n}\n\n\n\nimpl From<&SubSection> for DataStoreSubSection {\n\n fn from(subsection: &SubSection) -> Self {\n\n DataStoreSubSection {\n\n name: subsection.name().into(),\n\n subsubsections: subsection\n", "file_path": "src/datastore/file.rs", "rank": 57, "score": 5.394571691703563 }, { "content": " match (has_non_red, is_all_green) {\n\n (true, true) => QuestionState::Green,\n\n (true, false) => QuestionState::Yellow,\n\n (false, _) => QuestionState::Red,\n\n }\n\n }\n\n}\n\n\n\nimpl SubSection {\n\n pub fn new(name: String, subsubsections: Vec<SubSubSection>) -> SubSection {\n\n SubSection {\n\n name: name,\n\n subsubsections: subsubsections,\n\n }\n\n }\n\n\n\n /// Get the name of this subsection.\n\n pub fn name(&self) -> &str {\n\n &self.name\n\n }\n", "file_path": "src/datastore.rs", "rank": 58, "score": 5.273425018187836 }, { "content": "\n\n pub fn subsection(&self, n: usize) -> Option<&SubSection> {\n\n if n == 0 {\n\n None\n\n } else {\n\n self.subsections.get(n - 1)\n\n }\n\n }\n\n\n\n pub fn subsections(&self) -> &Vec<SubSection> {\n\n &self.subsections\n\n }\n\n\n\n pub fn subsubsection(&self, ss: usize, sss: usize) -> Option<&SubSubSection> {\n\n match self.subsection(ss) {\n\n Some(ss) => ss.subsubsection(sss),\n\n _ => None,\n\n }\n\n }\n\n\n", "file_path": "src/datastore.rs", "rank": 59, "score": 5.250111895824587 }, { "content": " answers: answers,\n\n subsection: subsection,\n\n subsubsection: subsubsection,\n\n history: history,\n\n }\n\n }\n\n\n\n /// Identifier string of question. Ideally unique.\n\n pub fn id(&self) -> &str {\n\n &self.id\n\n }\n\n\n\n /// Question string.\n\n pub fn question(&self) -> &str {\n\n &self.question\n\n }\n\n\n\n /// List of answers (as string) the question has. The first one is the\n\n /// correct one.\n\n pub fn answers(&self) -> &Vec<String> {\n", "file_path": "src/datastore.rs", "rank": 60, "score": 5.190702819088507 }, { "content": " self.section\n\n .get_style_context()\n\n .add_class(\"question-subsection\");\n\n self.section.set_xalign(0.0);\n\n self.section.set_line_wrap(true);\n\n self.subsection\n\n .get_style_context()\n\n .add_class(\"question-subsubsection\");\n\n self.subsection.set_xalign(0.0);\n\n self.subsection.set_line_wrap(true);\n\n self.id.set_xalign(0.5);\n\n self.id.set_yalign(0.0);\n\n self.id.get_style_context().add_class(\"question-id\");\n\n self.question.set_xalign(0.0);\n\n self.question.set_line_wrap(true);\n\n self.question.get_style_context().add_class(\"question-text\");\n\n }\n\n\n\n pub fn set_section(&self, text: &str) {\n\n self.section.set_text(text);\n", "file_path": "src/ui/view/practise.rs", "rank": 61, "score": 5.139361445075913 }, { "content": " if let Some(label) = self.get_answer_label(row) {\n\n label.show();\n\n label.set_text(text);\n\n label.set_xalign(0.0);\n\n label.set_line_wrap(true);\n\n }\n\n }\n\n\n\n fn get_answer_button(&self, row: usize) -> Option<gtk::Button> {\n\n match self.answers.get_child_at(0, row as i32) {\n\n Some(widget) => widget.downcast().ok(),\n\n None => None,\n\n }\n\n }\n\n\n\n fn get_answer_label(&self, row: usize) -> Option<gtk::Label> {\n\n match self.answers.get_child_at(1, row as i32) {\n\n Some(widget) => widget.downcast().ok(),\n\n None => None,\n\n }\n", "file_path": "src/ui/view/practise.rs", "rank": 62, "score": 5.001034277883018 }, { "content": " }\n\n\n\n pub fn startup(&self) {\n\n self.view.setup();\n\n }\n\n\n\n pub fn activate(&self) {\n\n self.activate_title();\n\n self.activate_buttons();\n\n }\n\n\n\n pub fn shutdown(&self) {}\n\n\n\n pub fn view(&self) -> &SectionOverView {\n\n &self.view\n\n }\n\n\n\n fn activate_title(&self) {\n\n if let Some(section) = self.data.borrow().section(self.index) {\n\n self.view.set_title(section.name());\n", "file_path": "src/ui/controller/section_overview.rs", "rank": 63, "score": 4.914315125999223 }, { "content": " }\n\n\n\n pub fn connect_back<F: Fn() + 'static>(&self, fun: F) {\n\n self.view.connect_back(move |_| {\n\n fun();\n\n });\n\n }\n\n\n\n pub fn activate_choose(&self) {\n\n let me = self.clone();\n\n self.view.connect_choose(move |button, index| {\n\n // compute actual answer number\n\n let num = me.permutation.borrow().apply_inv_idx(index);\n\n\n\n {\n\n // record answer (in a different scope so the borrowed mut\n\n // data doesn't prevent it from being able to borrow it as\n\n // immutable later).\n\n let mut data = me.data.borrow_mut();\n\n if let Some(section) = data.section_mut(me.section) {\n", "file_path": "src/ui/controller/practise.rs", "rank": 64, "score": 4.89236875109861 }, { "content": "\n\n pub fn time_since(&self) -> Option<Duration> {\n\n match self.time.elapsed() {\n\n Ok(duration) => Some(duration),\n\n _ => None,\n\n }\n\n }\n\n}\n\n\n\n#[test]\n", "file_path": "src/datastore.rs", "rank": 65, "score": 4.791608359833547 }, { "content": " self.view.show(\"main\", gtk::StackTransitionType::SlideRight);\n\n }\n\n\n\n /// Switch to the practise view.\n\n pub fn show_practise(&self) {\n\n self.practise.show();\n\n self.view\n\n .show(\"practise\", gtk::StackTransitionType::SlideLeft);\n\n }\n\n\n\n fn activate_overview_buttons(&self) {\n\n let controller = self.clone();\n\n self.overview.setup_buttons(move |filter| {\n\n controller.practise.set_filter(filter);\n\n controller.show_practise();\n\n });\n\n\n\n let controller = self.clone();\n\n self.overview.view().connect_practise(move || {\n\n controller.practise.set_filter(QuestionFilter::All);\n", "file_path": "src/ui/controller/section.rs", "rank": 66, "score": 4.78908882644768 }, { "content": " }\n\n\n\n pub fn short(&self) -> &str {\n\n &self.short\n\n }\n\n\n\n pub fn count(&self) -> usize {\n\n self.questions.len()\n\n }\n\n\n\n pub fn count_by_state(&self, state: QuestionState) -> usize {\n\n self.questions.iter().filter(|q| q.state() == state).count()\n\n }\n\n\n\n pub fn questions(&self) -> &Vec<Question> {\n\n &self.questions\n\n }\n\n\n\n pub fn question_mut(&mut self, n: usize) -> Option<&mut Question> {\n\n self.questions.get_mut(n)\n", "file_path": "src/datastore.rs", "rank": 67, "score": 4.66757552381946 }, { "content": " exam: gtk::Button::new(),\n\n practise: gtk::Button::new(),\n\n }\n\n }\n\n\n\n pub fn setup(&self) {\n\n self.title.set_hexpand(true);\n\n self.title.get_style_context().add_class(\"title\");\n\n self.subsections.set_hexpand(true);\n\n self.body.set_margin_top(10);\n\n self.body.set_margin_bottom(10);\n\n self.body.set_margin_start(10);\n\n self.body.set_margin_end(10);\n\n self.body.set_column_spacing(20);\n\n self.body.set_row_spacing(20);\n\n self.body.set_column_homogeneous(true);\n\n self.body.attach(&self.title, 0, 0, 2, 1);\n\n self.body.attach(&self.subsections, 0, 1, 2, 1);\n\n self.body.attach(&self.practise, 0, 2, 1, 1);\n\n self.body.attach(&self.exam, 1, 2, 1, 1);\n", "file_path": "src/ui/view/section_overview.rs", "rank": 68, "score": 4.436132998419219 }, { "content": " self.name,\n\n self.short,\n\n self.questions\n\n .into_iter()\n\n .map(|s| s.load().unwrap())\n\n .collect(),\n\n self.subsections\n\n .into_iter()\n\n .map(|s| s.load().unwrap())\n\n .collect(),\n\n ))\n\n }\n\n}\n\n\n\nimpl From<&Section> for DataStoreFileSection {\n\n fn from(section: &Section) -> DataStoreFileSection {\n\n DataStoreFileSection {\n\n name: section.name().into(),\n\n short: section.short().into(),\n\n questions: section\n", "file_path": "src/datastore/file.rs", "rank": 69, "score": 4.360833173540213 }, { "content": "\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct SubSection {\n\n name: String,\n\n subsubsections: Vec<SubSubSection>,\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct SubSubSection {\n\n name: String,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct DataStore {\n\n sections: Vec<Section>,\n\n filename: PathBuf,\n\n}\n\n\n\nimpl DataStore {\n\n pub fn new() -> DataStore {\n", "file_path": "src/datastore.rs", "rank": 70, "score": 4.193115978763631 }, { "content": " self.body.attach(&label, i as i32, 1, 1, 1);\n\n self.section_labels.borrow_mut().push(label);\n\n\n\n let area = gtk::DrawingArea::new();\n\n area.set_size_request(100, 100);\n\n area.set_hexpand(true);\n\n self.body.attach(&area, i as i32, 2, 1, 1);\n\n self.section_charts.borrow_mut().push(area);\n\n }\n\n }\n\n\n\n pub fn update(&self, datastore: &DataStore) {\n\n for (i, section) in datastore.sections().iter().enumerate() {\n\n // title\n\n self.section_labels.borrow()[i].set_text(section.short());\n\n self.section_labels.borrow()[i]\n\n .get_style_context()\n\n .add_class(\"subtitle\");\n\n\n\n let count = section.count();\n", "file_path": "src/ui/view/overview.rs", "rank": 71, "score": 3.849360908070844 }, { "content": "\n\n self.section_labels.borrow_mut().clear();\n\n self.section_charts.borrow_mut().clear();\n\n\n\n self.body.set_margin_top(10);\n\n self.body.set_margin_bottom(10);\n\n self.body.set_margin_start(10);\n\n self.body.set_margin_end(10);\n\n self.body.set_column_spacing(20);\n\n self.body.set_row_spacing(20);\n\n self.body.set_column_homogeneous(true);\n\n self.title.set_text(\"Übersicht\");\n\n self.title.get_style_context().add_class(\"title\");\n\n self.title.set_hexpand(true);\n\n self.body\n\n .attach(&self.title, 0, 0, datastore.sections().len() as i32, 1);\n\n\n\n for (i, _section) in datastore.sections().iter().enumerate() {\n\n let label = gtk::Label::new(None);\n\n label.set_hexpand(true);\n", "file_path": "src/ui/view/overview.rs", "rank": 72, "score": 3.6709158823387664 }, { "content": " DataStore {\n\n sections: Vec::new(),\n\n filename: PathBuf::new(),\n\n }\n\n }\n\n\n\n pub fn load(path: &Path) -> Result<DataStore, Box<Error>> {\n\n let file = File::open(path)?;\n\n let reader = BufReader::new(file);\n\n let ds: file::DataStoreFile = serde_yaml::from_reader(reader)?;\n\n\n\n Ok(DataStore {\n\n // FIXME: error handling.\n\n sections: ds.sections.into_iter().map(|s| s.load().unwrap()).collect(),\n\n filename: path.to_path_buf(),\n\n })\n\n }\n\n\n\n pub fn save_as(&self, path: &Path) -> Result<(), std::io::Error> {\n\n let file = OpenOptions::new().write(true).truncate(true).open(path)?;\n", "file_path": "src/datastore.rs", "rank": 73, "score": 3.6317494740482847 }, { "content": "# Ham Radio Exam Trainer [![Build Status](https://travis-ci.org/xfbs/afp.svg?branch=master)](https://travis-ci.org/xfbs/afp)\n\n\n\nTranslation: [🇩🇪 Deutsch](README.de.md)\n\n\n\nA small tool to study for the (German) ham radio exam. It's written in Rust, uses GTK+3 and [gtk-rs](https://github.com/gtk-rs/gtk) to be cross-platform and look somewhat decent. Documentation is available [here](https://xfbs.github.io/afp/afp).\n\n\n\n## Installation\n\n\n\nShould work on all Platforms somehow, but only macOS and Linux are officially supported. You need to install GTK+3, which on macOS you can do with\n\n\n\n brew install gtk+3 pkg-config\n\n export PKG_CONFIG_PATH=\"/usr/local/opt/libffi/lib/pkgconfig\"\n\n\n\nYou can build it with cargo:\n\n\n\n cargo build\n\n\n\nYou can run it:\n\n\n\n cargo run\n\n\n\nAnd it's advised to run tests to see if everything works as intended.\n\n\n\n cargo test\n\n\n\n## License\n\n\n\nSee [LICENSE.md](LICENSE.md).\n", "file_path": "README.md", "rank": 74, "score": 3.5707905457994573 }, { "content": "\n\n /// Creates buttons for each question with specified target function.\n\n pub fn setup_buttons<F: Fn(QuestionFilter) + Clone + 'static>(&self, f: F) {\n\n // TODO: save fun?\n\n let data = self.data.borrow();\n\n if let Some(section) = data.section(self.index) {\n\n // go through all the questions\n\n for (ss_id, ss) in section.subsections().iter().enumerate() {\n\n let button = self.view.add_button(&format!(\"{}\", ss_id + 1));\n\n button.set_tooltip_text(ss.name());\n\n let filter = QuestionFilter::SubSection(ss_id);\n\n let fun = f.clone();\n\n button.connect_clicked(move |_| {\n\n fun(filter);\n\n });\n\n\n\n for (sss_id, sss) in ss.subsubsections().iter().enumerate() {\n\n let button = self\n\n .view\n\n .add_button(&format!(\"{}.{}\", ss_id + 1, sss_id + 1));\n", "file_path": "src/ui/controller/section_overview.rs", "rank": 75, "score": 3.5153874636499505 }, { "content": "\n\nimpl PractiseController {\n\n pub fn new(data: &Rc<RefCell<DataStore>>, section: usize) -> PractiseController {\n\n PractiseController {\n\n section: section,\n\n question: Rc::new(Cell::new(0)),\n\n permutation: Rc::new(RefCell::new(Permutation::one(0))),\n\n view: PractiseView::new(),\n\n data: data.clone(),\n\n filter: Rc::new(Cell::new(QuestionFilter::All)),\n\n }\n\n }\n\n\n\n pub fn view(&self) -> &PractiseView {\n\n &self.view\n\n }\n\n\n\n pub fn show(&self) {\n\n let data = self.data.borrow();\n\n if let Some(section) = data.section(self.section) {\n", "file_path": "src/ui/controller/practise.rs", "rank": 76, "score": 3.509967327629541 }, { "content": " if let Some(question) = section.question_mut(me.question.get()) {\n\n question.answer(num);\n\n }\n\n }\n\n }\n\n\n\n // mark button as red or show next question if it was correct.\n\n if num != 0 {\n\n // answer is wrong. mark button.\n\n button.get_style_context().add_class(\"red\");\n\n } else {\n\n me.show();\n\n }\n\n });\n\n }\n\n\n\n pub fn set_filter(&self, filter: QuestionFilter) {\n\n self.filter.set(filter);\n\n }\n\n}\n", "file_path": "src/ui/controller/practise.rs", "rank": 77, "score": 3.3212369169002653 }, { "content": "\n\n pub fn activate(&self) {\n\n self.view.init(&self.data.borrow());\n\n self.view.update(&self.data.borrow());\n\n }\n\n\n\n pub fn shutdown(&self) {}\n\n\n\n pub fn view(&self) -> &OverView {\n\n &self.view\n\n }\n\n}\n", "file_path": "src/ui/controller/overview.rs", "rank": 78, "score": 2.914188668304665 }, { "content": "# The MIT License (MIT)\n\n\n\nCopyright © 2019 Patrick Elsen (xfbs)\n\n\n\nPermission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:\n\n\n\nThe above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.\n\n\n\n**THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.**\n", "file_path": "LICENSE.md", "rank": 79, "score": 2.8613191369382407 }, { "content": " time: time,\n\n choice: choice,\n\n }\n\n }\n\n\n\n pub fn choose(choice: usize) -> History {\n\n Self::new(SystemTime::now(), choice)\n\n }\n\n\n\n pub fn time(&self) -> SystemTime {\n\n self.time\n\n }\n\n\n\n pub fn choice(&self) -> usize {\n\n self.choice\n\n }\n\n\n\n pub fn correct(&self) -> bool {\n\n self.choice == 0\n\n }\n", "file_path": "src/datastore.rs", "rank": 80, "score": 2.853676596322223 }, { "content": " &self.answers\n\n }\n\n\n\n /// Record an answer.\n\n pub fn answer(&mut self, n: usize) {\n\n self.history.push(History::choose(n))\n\n }\n\n\n\n /// Subsection ID of this question.\n\n pub fn subsection(&self) -> usize {\n\n self.subsection\n\n }\n\n\n\n /// Subsubsection ID of this question.\n\n pub fn subsubsection(&self) -> usize {\n\n self.subsubsection\n\n }\n\n\n\n /// State (if the questions is considered to be answered correctly).\n\n pub fn state(&self) -> QuestionState {\n", "file_path": "src/datastore.rs", "rank": 81, "score": 2.754266737255433 }, { "content": " let writer = BufWriter::new(&file);\n\n let data: file::DataStoreFile = self.into();\n\n serde_yaml::to_writer(writer, &data).unwrap();\n\n Ok(())\n\n }\n\n\n\n pub fn save(&self) -> Result<(), std::io::Error> {\n\n self.save_as(&self.filename)\n\n }\n\n\n\n pub fn sections(&self) -> &Vec<Section> {\n\n &self.sections\n\n }\n\n\n\n pub fn section(&self, n: usize) -> Option<&Section> {\n\n self.sections.get(n)\n\n }\n\n\n\n pub fn section_mut(&mut self, n: usize) -> Option<&mut Section> {\n\n self.sections.get_mut(n)\n", "file_path": "src/datastore.rs", "rank": 82, "score": 2.5472201962717094 }, { "content": " SubSection(usize),\n\n SubSubSection(usize, usize),\n\n}\n\n\n\nimpl QuestionFilter {\n\n pub fn includes(self, other: QuestionFilter) -> bool {\n\n match self {\n\n QuestionFilter::All => true,\n\n QuestionFilter::SubSection(ss) => match other {\n\n QuestionFilter::SubSection(ssp) => ss == ssp,\n\n QuestionFilter::SubSubSection(ssp, _) => ss == ssp,\n\n _ => false,\n\n },\n\n QuestionFilter::SubSubSection(ss, sss) => match other {\n\n QuestionFilter::SubSubSection(ssp, sssp) => ss == ssp && sss == sssp,\n\n _ => false,\n\n },\n\n }\n\n }\n\n}\n", "file_path": "src/datastore.rs", "rank": 83, "score": 2.4840758843428916 }, { "content": "# Amateurfunkprüfer [![Build Status](https://travis-ci.org/xfbs/afp.svg?branch=master)](https://travis-ci.org/xfbs/afp)\n\n\n\nÜbersetzung: [🇬🇧 Englisch](README.md)\n\n\n\nEin kleines Tool zum Üben für die Amateurfunkprüfung. Geschrieben in [Rust](https://rust-lang.org), welches dank [gtk](https://www.gtk.org) und [gtk-rs](https://gtk-rs.org/) auf allen möglichen Platformen läuft. Es steht unter der [MIT Lizenz](LICENSE.md). Dokumentation ist [hier](https://xfbs.github.com/afp/afp) verfügbar.\n\n\n\n## Installieren\n\n\n\nEs wird GTK+3 benötigt, das kann (unter macOS) einfach mit Homebrew installiert werden. Außerdem muss ein Pfad gesetzt werden, damit pkg-config libffi findet.\n\n\n\n brew install gtk+3 pkg-config\n\n export PKG_CONFIG_PATH=\"/usr/local/opt/libffi/lib/pkgconfig\"\n\n\n\nDas Tool kann mit `cargo` kompiliert und ausgeführt werden.\n\n\n\n cargo build\n\n cargo run\n\n\n\nMitgelieferte Tests können ebenso ausgeführt werden.\n\n\n\n cargo test\n\n\n\n## Lizenz\n\n\n\nSiehe [LICENSE.md](LICENSE.md).\n", "file_path": "README.de.md", "rank": 84, "score": 2.416785652063984 }, { "content": "\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct History {\n\n time: SystemTime,\n\n choice: usize,\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct Question {\n\n id: String,\n\n question: String,\n\n answers: Vec<String>,\n\n subsection: usize,\n\n subsubsection: usize,\n\n history: Vec<History>,\n\n}\n\n\n\n#[derive(Debug, PartialEq)]\n\npub enum QuestionState {\n\n /// never tried or always wrong\n", "file_path": "src/datastore.rs", "rank": 85, "score": 2.3705289522750714 }, { "content": " self.practise.startup();\n\n }\n\n\n\n fn activate(&self) {\n\n self.overview.activate();\n\n self.activate_overview_buttons();\n\n\n\n self.practise.activate();\n\n self.activate_practise_buttons();\n\n\n\n self.activate_label();\n\n self.activate_views();\n\n }\n\n\n\n fn shutdown(&self) {\n\n self.overview.shutdown();\n\n self.practise.shutdown();\n\n }\n\n}\n", "file_path": "src/ui/controller/section.rs", "rank": 86, "score": 2.3056897194941275 }, { "content": " Red,\n\n\n\n /// correct at least once in last three attempts\n\n Yellow,\n\n\n\n /// corrent three times last three attampts\n\n Green,\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq)]\n\npub struct Section {\n\n name: String,\n\n short: String,\n\n questions: Vec<Question>,\n\n subsections: Vec<SubSection>,\n\n}\n\n\n\n#[derive(Copy, Clone, Debug, PartialEq)]\n\npub enum QuestionFilter {\n\n All,\n", "file_path": "src/datastore.rs", "rank": 87, "score": 2.0636144285191507 }, { "content": " }\n\n\n\n pub fn question(&self, n: usize) -> Option<&Question> {\n\n self.questions.get(n)\n\n }\n\n\n\n /// Find a question that might be a good candidate to practise that\n\n /// matches the filter.\n\n pub fn practise(&self, filter: QuestionFilter) -> Option<usize> {\n\n let candidates = self\n\n .questions\n\n .iter()\n\n .enumerate()\n\n .filter(|(_, question)| filter.includes(question.filter()))\n\n .map(|(index, _)| index)\n\n .collect::<Vec<usize>>();\n\n\n\n let mut rng = rand::thread_rng();\n\n candidates.choose(&mut rng).map(|v| *v)\n\n }\n", "file_path": "src/datastore.rs", "rank": 88, "score": 1.9451527813954355 }, { "content": " view: SectionView::new(),\n\n overview: SectionOverviewController::new(data, index),\n\n practise: PractiseController::new(data, index),\n\n data: data.clone(),\n\n filter: Rc::new(Cell::new(QuestionFilter::All)),\n\n }\n\n }\n\n\n\n pub fn view(&self) -> &SectionView {\n\n &self.view\n\n }\n\n\n\n fn activate_label(&self) {\n\n if let Some(section) = self.data.borrow().section(self.index) {\n\n self.view.set_label(section.short());\n\n }\n\n }\n\n\n\n /// Switch to the section overview.\n\n pub fn show_overview(&self) {\n", "file_path": "src/ui/controller/section.rs", "rank": 89, "score": 1.9137643489918517 }, { "content": " .questions()\n\n .iter()\n\n .map(|question| question.into())\n\n .collect(),\n\n subsections: section\n\n .subsections()\n\n .iter()\n\n .map(|subsection| subsection.into())\n\n .collect(),\n\n }\n\n }\n\n}\n\n\n\nimpl From<&Question> for DataStoreQuestion {\n\n fn from(question: &Question) -> Self {\n\n DataStoreQuestion {\n\n id: question.id().into(),\n\n question: question.question().into(),\n\n answers: question.answers().clone(),\n\n subsection: question.subsection(),\n", "file_path": "src/datastore/file.rs", "rank": 90, "score": 1.7449237604666514 }, { "content": " subsubsection: question.subsubsection(),\n\n history: question\n\n .history()\n\n .iter()\n\n .map(|history| history.into())\n\n .collect(),\n\n }\n\n }\n\n}\n\n\n\nimpl From<&History> for DataStoreHistory {\n\n fn from(history: &History) -> Self {\n\n DataStoreHistory {\n\n time: history\n\n .time()\n\n .duration_since(SystemTime::UNIX_EPOCH)\n\n .map(|time| time.as_secs())\n\n .unwrap_or(0),\n\n choice: history.choice(),\n\n }\n", "file_path": "src/datastore/file.rs", "rank": 91, "score": 1.7233261893964213 }, { "content": " let me = self.clone();\n\n let ds: Rc<RefCell<DataStore>> = ds.clone();\n\n self.question.connect_choose(move |question, choice| {\n\n println!(\"choose: {} {}\", question, choice);\n\n\n\n let mut ds = ds.borrow_mut();\n\n match ds.section_mut(me.index) {\n\n Some(section) => {\n\n match section.question_mut(question) {\n\n Some(question) => {\n\n question.answer(choice);\n\n },\n\n None => panic!(),\n\n }\n\n\n\n if choice == 0 {\n\n let next = section.practise();\n\n if let Some(question) = section.question(next) {\n\n me.show_question(next, question);\n\n } else {\n", "file_path": "src/ui/view/section.rs", "rank": 92, "score": 1.5185570441527636 }, { "content": " // get new question\n\n section\n\n .practise(self.filter.get())\n\n .map(|question| self.question.set(question))\n\n .unwrap_or_else(|| panic!(\"can't load question!\"));\n\n\n\n // display question\n\n if let Some(question) = section.question(self.question.get()) {\n\n if let Some(subsection) = section.subsection(question.subsection()) {\n\n self.view.set_section(subsection.name());\n\n\n\n if let Some(subsubsection) = subsection.subsubsection(question.subsubsection())\n\n {\n\n self.view.set_subsection(subsubsection.name());\n\n }\n\n }\n\n\n\n self.view.set_id(question.id());\n\n self.view.set_question(question.question());\n\n self.create_permutation(question.answers().len());\n", "file_path": "src/ui/controller/practise.rs", "rank": 93, "score": 1.4383504238963636 }, { "content": " let correct_of_last_three = self\n\n .history\n\n .iter()\n\n .rev()\n\n .take(3)\n\n .map(|h| h.correct())\n\n .map(|c| if c { 1 } else { 0 })\n\n .sum();\n\n\n\n match correct_of_last_three {\n\n 3 => QuestionState::Green,\n\n 0 => QuestionState::Red,\n\n _ => QuestionState::Yellow,\n\n }\n\n }\n\n\n\n /// Time since the question has been last answered.\n\n pub fn stale_time(&self) -> Option<Duration> {\n\n match self.history.last() {\n\n Some(entry) => entry.time_since(),\n", "file_path": "src/datastore.rs", "rank": 94, "score": 1.4324552691836656 }, { "content": " pub fn state(&self, filter: QuestionFilter) -> QuestionState {\n\n let mut has_non_red = false;\n\n let mut is_all_green = true;\n\n\n\n self.questions()\n\n .iter()\n\n .filter(|question| filter.includes(question.filter()))\n\n .for_each(|question| match question.state() {\n\n QuestionState::Green => {\n\n has_non_red = true;\n\n }\n\n QuestionState::Yellow => {\n\n has_non_red = true;\n\n is_all_green = false;\n\n }\n\n QuestionState::Red => {\n\n is_all_green = false;\n\n }\n\n });\n\n\n", "file_path": "src/datastore.rs", "rank": 95, "score": 1.3664378717816703 }, { "content": " }\n\n }\n\n\n\n fn activate_buttons(&self) {\n\n // every time we show the view, update the color for the buttons.\n\n let controller = self.clone();\n\n self.view.widget().connect_map(move |_| {\n\n let data = controller.data.borrow();\n\n if let Some(section) = data.section(controller.index) {\n\n // go through all the questions\n\n let mut i = 0;\n\n for (ss_id, ss) in section.subsections().iter().enumerate() {\n\n // TODO use filter\n\n let state = section.state(QuestionFilter::SubSection(ss_id));\n\n controller.set_button_state(i, state);\n\n i += 1;\n\n\n\n for (sss_id, _) in ss.subsubsections().iter().enumerate() {\n\n let state = section.state(QuestionFilter::SubSubSection(ss_id, sss_id));\n\n controller.set_button_state(i, state);\n", "file_path": "src/ui/controller/section_overview.rs", "rank": 96, "score": 1.2261747921435768 }, { "content": " self.body.set_margin_top(10);\n\n self.body.set_margin_bottom(10);\n\n self.body.set_margin_start(10);\n\n self.body.set_margin_end(10);\n\n self.body.set_column_spacing(10);\n\n self.body.set_row_spacing(15);\n\n self.body.set_column_homogeneous(true);\n\n self.answers.set_column_homogeneous(true);\n\n self.answers.set_column_spacing(10);\n\n self.answers.set_row_spacing(5);\n\n self.title_box.add(&self.back);\n\n self.title_box.set_center_widget(&self.title);\n\n self.body.attach(&self.title_box, 0, 0, 7, 1);\n\n self.body.attach(&self.section, 1, 1, 5, 1);\n\n self.body.attach(&self.subsection, 1, 2, 5, 1);\n\n self.body.attach(&self.id, 1, 3, 1, 1);\n\n self.body.attach(&self.question, 2, 3, 4, 1);\n\n self.body.attach(&self.answers, 1, 4, 5, 1);\n\n self.title.get_style_context().add_class(\"title\");\n\n self.section.set_hexpand(true);\n", "file_path": "src/ui/view/practise.rs", "rank": 97, "score": 1.1827316613920438 } ]
Rust
hphp/hack/src/rupro/lib/shallow_decl_provider/provider.rs
ianhoffman/hhvm
decc4e479e0e689c65f936f0828cb761d34075b1
use std::rc::Rc; use std::{fs, io}; use bumpalo::Bump; use crate::decl_defs::{ShallowClass, ShallowFun, ShallowMethod}; use crate::decl_ty_provider::DeclTyProvider; use crate::pos::{RelativePath, RelativePathCtx, Symbol}; use crate::reason::Reason; use crate::shallow_decl_provider::ShallowDeclCache; #[derive(Debug)] pub struct ShallowDeclProvider<R: Reason> { cache: Rc<dyn ShallowDeclCache<R = R>>, decl_ty_provider: Rc<DeclTyProvider<R>>, relative_path_ctx: Rc<RelativePathCtx>, } impl<R: Reason> ShallowDeclProvider<R> { pub fn new( cache: Rc<dyn ShallowDeclCache<R = R>>, decl_ty_provider: Rc<DeclTyProvider<R>>, relative_path_ctx: Rc<RelativePathCtx>, ) -> Self { Self { cache, decl_ty_provider, relative_path_ctx, } } pub fn get_decl_ty_provider(&self) -> &Rc<DeclTyProvider<R>> { &self.decl_ty_provider } pub fn get_shallow_class(&self, name: &Symbol) -> Option<Rc<ShallowClass<R>>> { self.cache.get_shallow_class(name) } pub fn add_from_oxidized_class(&self, sc: &oxidized_by_ref::shallow_decl_defs::ClassDecl<'_>) { let res = Rc::new(self.utils().mk_shallow_class(sc)); self.cache.put_shallow_class(res.sc_name.id().clone(), res); } pub fn add_from_oxidized_fun( &self, name: &str, sf: &oxidized_by_ref::shallow_decl_defs::FunDecl<'_>, ) { let res = Rc::new(self.utils().mk_shallow_fun(sf)); let name = self.decl_ty_provider.get_pos_provider().mk_symbol(name); self.cache.put_shallow_fun(name, res); } pub fn add_from_oxidized_decls(&self, decls: &oxidized_by_ref::direct_decl_parser::Decls<'_>) { for (name, decl) in decls.iter() { use oxidized_by_ref::direct_decl_parser::Decl::*; match decl { Class(sc) => drop(self.add_from_oxidized_class(sc)), Fun(sf) => drop(self.add_from_oxidized_fun(name, sf)), decl => unimplemented!("new_local_with_decls: {:?}", decl), } } } pub fn add_from_files( &self, filenames: &mut dyn Iterator<Item = &RelativePath>, ) -> io::Result<()> { for rel_fln in filenames { let arena = Bump::new(); let fln = rel_fln.to_absolute(&self.relative_path_ctx); let text = arena.alloc_slice_clone(fs::read_to_string(&fln)?.as_bytes()); let rel_path = oxidized::relative_path::RelativePath::make( oxidized::relative_path::Prefix::Dummy, fln, ); let parsed_file = stack_limit::with_elastic_stack(|stack_limit| { direct_decl_parser::parse_decls( oxidized_by_ref::decl_parser_options::DeclParserOptions::DEFAULT, rel_path.clone(), text, &arena, Some(stack_limit), ) }) .unwrap_or_else(|failure| { panic!( "Rust decl parser FFI exceeded maximum allowed stack of {} KiB", failure.max_stack_size_tried / stack_limit::KI ); }); self.add_from_oxidized_decls(&parsed_file.decls); } Ok(()) } fn utils(&self) -> ShallowDeclUtils<R> { ShallowDeclUtils::new(self.decl_ty_provider.clone()) } } struct ShallowDeclUtils<R: Reason> { decl_ty_provider: Rc<DeclTyProvider<R>>, } impl<R: Reason> ShallowDeclUtils<R> { fn new(decl_ty_provider: Rc<DeclTyProvider<R>>) -> Self { Self { decl_ty_provider } } fn mk_shallow_method( &self, sm: &oxidized_by_ref::shallow_decl_defs::ShallowMethod<'_>, ) -> ShallowMethod<R> { let decl_tys = &self.decl_ty_provider; ShallowMethod { sm_name: decl_tys.get_pos_provider().mk_pos_id_of_ref::<R>(sm.name), sm_type: decl_tys.mk_decl_ty_from_parsed(sm.type_), } } fn mk_shallow_class( &self, sc: &oxidized_by_ref::shallow_decl_defs::ClassDecl<'_>, ) -> ShallowClass<R> { let decl_tys = &self.decl_ty_provider; ShallowClass { sc_name: decl_tys.get_pos_provider().mk_pos_id_of_ref::<R>(sc.name), sc_extends: sc .extends .iter() .map(|ty| decl_tys.mk_decl_ty_from_parsed(ty)) .collect(), sc_methods: sc .methods .iter() .map(|sm| self.mk_shallow_method(sm)) .collect(), } } fn mk_shallow_fun( &self, sf: &oxidized_by_ref::shallow_decl_defs::FunDecl<'_>, ) -> ShallowFun<R> { let decl_tys = &self.decl_ty_provider; ShallowFun { fe_pos: decl_tys.get_pos_provider().mk_pos_of_ref::<R>(sf.pos), fe_type: decl_tys.mk_decl_ty_from_parsed(sf.type_), } } }
use std::rc::Rc; use std::{fs, io}; use bumpalo::Bump; use crate::decl_defs::{ShallowClass, ShallowFun, ShallowMethod}; use crate::decl_ty_provider::DeclTyProvider; use crate::pos::{RelativePath, RelativePathCtx, Symbol}; use crate::reason::Reason; use crate::shallow_decl_provider::ShallowDeclCache; #[derive(Debug)] pub struct ShallowDeclProvider<R: Reason> { cache: Rc<dyn ShallowDeclCache<R = R>>, decl_ty_provider: Rc<DeclTyProvider<R>>, relative_path_ctx: Rc<RelativePathCtx>, } impl<R: Reason> ShallowDeclProvider<R> { pub fn new( cache: Rc<dyn ShallowDeclCache<R = R>>, decl_ty_provider: Rc<DeclTyProvider<R>>, relative_path_ctx: Rc<RelativePathCtx>, ) -> Self { Self { cache, decl_ty_provider, relative_path_ctx, } } pub fn get_decl_ty_provider(&self) -> &Rc<DeclTyProvider<R>> { &self.decl_ty_provider } pub fn get_shallow_class(&self, name: &Symbol) -> Option<Rc<ShallowClass<R>>> { self.cache.get_shallow_class(name) } pub fn add_from_oxidized_class(&self, sc: &oxidized_by_ref::shallow_decl_defs::ClassDecl<'_>) { let res = Rc::new(self.utils().mk_shallow_class(sc)); self.cache.put_shallow_class(res.sc_name.id().clone(), res); } pub fn add_from_oxidized_fun( &self, name: &str, sf: &oxidized_by_ref::shallow_decl_defs::FunDecl<'_>, ) { let res = Rc::new(self.utils().mk_shallow_fun(sf)); let name = self.decl_ty_provider.get_pos_provider().mk_symbol(name); self.cache.put_shallow_fun(name, res); } pub fn add_from_oxidized_decls(&self, decls: &oxidized_by_ref::direct_decl_parser::Decls<'_>) { for (name, decl) in decls.iter() { use oxidized_by_ref::direct_decl_parser::Decl::*; match decl { Class(sc) => drop(self.add_from_oxidized_class(sc)), Fun(sf) => drop(self.add_from_oxidized_fun(name, sf)), decl => unimplemented!("new_local_with_decls: {:?}", decl), } } } pub fn add_from_files( &self, filenames: &mut dyn Iterator<Item = &RelativePath>, ) -> io::Result<()> { for rel_fln in filenames { let arena = Bump::new(); let fln = rel_fln.to_absolute(&self.relative_path_ctx); let text = arena.alloc_slice_clone(fs::read_to_string(&fln)?.as_bytes()); let rel_path = oxidized::relative_path::RelativePath::make( oxidized::relative_path::Prefix::Dummy, fln, ); let parsed_file = stack_limit::with_elastic_stack(|stack_limit| { direct_decl_parser::parse_decls( oxidized_by_ref::decl_parser_options::DeclParserOptions::DEFAULT, rel_path.clone(), text, &arena, Some(stack_limit), ) }) .unwrap_or_else(|failure| { panic!( "Rust decl parser FFI exceeded maximum allowed stack of {} KiB", failure.max_stack_size_tried / stack_limit::KI ); }); self.add_from_oxidized_decls(&parsed_file.decls); } Ok(()) } fn utils(&self) -> ShallowDeclUtils<R> { ShallowDeclUtils::new(self.decl_ty_provider.clone()) } } struct ShallowDeclUtils<R: Reason> { decl_ty_provider: Rc<DeclTyProvider<R>>, } impl<R: Reason> ShallowDeclUtils<R> { fn new(decl_ty_provider: Rc<DeclTyProvider<R>>) -> Self { Self { decl_ty_provider } } fn mk_shallow_method( &self, sm: &oxidized_by_ref::shallow_decl_defs::ShallowMethod<'_>, ) -> ShallowMethod<R> { let decl_tys = &self.decl_ty_provider; ShallowMethod { sm_name: decl_tys.get_pos_provider().mk_pos_id_of_ref::<R>(sm.name), sm_type: decl_tys.mk_decl_ty_from_parsed(sm.type_), } } fn mk_shallow_class( &self, sc: &oxidized_by_ref::shallow_decl_defs::ClassDecl<'_>, ) -> ShallowClass<R> { let decl_tys = &self.decl_ty_provider; ShallowClass { sc_name: decl_tys.get_pos_provider().mk_pos_id_of_ref::<R>(sc.name), sc_extends: sc .extends .iter() .map(|ty| decl_tys.mk_decl_ty_from_parsed(ty)) .collect(), sc_methods: sc .methods .iter() .map(|sm| self.mk_shallow_method(sm)) .collect(), } } fn mk_shallow_fun( &self, sf: &oxidized_by_ref::shallow_decl_defs::FunDecl<'_>, ) -> ShallowFun<R> { let decl_tys = &self.dec
}
l_ty_provider; ShallowFun { fe_pos: decl_tys.get_pos_provider().mk_pos_of_ref::<R>(sf.pos), fe_type: decl_tys.mk_decl_ty_from_parsed(sf.type_), } }
function_block-function_prefixed
[]
Rust
src/input/system.rs
alanpoon/crayon
ab320e4cab285e1baee802363f024235883f8aef
use std::sync::{Arc, RwLock}; use crate::application::prelude::{LifecycleListener, LifecycleListenerHandle}; use crate::window::prelude::{Event, EventListener, EventListenerHandle}; use super::events::InputEvent; use super::keyboard::{Key, Keyboard}; use super::mouse::{Mouse, MouseButton}; use super::touchpad::{GesturePan, GestureTap, TouchPad, TouchState}; use super::InputParams; use crate::utils::hash::FastHashSet; use crate::math::prelude::Vector2; pub struct InputSystem { events: EventListenerHandle, lifecycle: LifecycleListenerHandle, state: Arc<InputState>, } struct InputState { touch_emulation: bool, touch_emulation_button: RwLock<Option<MouseButton>>, mouse: RwLock<Mouse>, keyboard: RwLock<Keyboard>, touchpad: RwLock<TouchPad>, } impl EventListener for Arc<InputState> { fn on(&mut self, v: &Event) -> Result<(), failure::Error> { if let Event::InputDevice(v) = *v { match v { InputEvent::MouseMoved { position } => { if self.touch_emulation_button.read().unwrap().is_some() { self.touchpad.write().unwrap().on_touch( 255, TouchState::Move, self.mouse.read().unwrap().position(), ); } self.mouse.write().unwrap().on_move(position) } InputEvent::MousePressed { button } => { if self.touch_emulation { *self.touch_emulation_button.write().unwrap() = Some(button); self.touchpad.write().unwrap().on_touch( 255, TouchState::Start, self.mouse.read().unwrap().position(), ); } self.mouse.write().unwrap().on_button_pressed(button) } InputEvent::MouseReleased { button } => { if *self.touch_emulation_button.read().unwrap() == Some(button) { *self.touch_emulation_button.write().unwrap() = None; self.touchpad.write().unwrap().on_touch( 255, TouchState::End, self.mouse.read().unwrap().position(), ); } self.mouse.write().unwrap().on_button_released(button) } InputEvent::MouseWheel { delta } => { self.mouse.write().unwrap().on_wheel_scroll(delta) } InputEvent::KeyboardPressed { key } => { self.keyboard.write().unwrap().on_key_pressed(key) } InputEvent::KeyboardReleased { key } => { self.keyboard.write().unwrap().on_key_released(key) } InputEvent::ReceivedCharacter { character } => { self.keyboard.write().unwrap().on_char(character) } InputEvent::Touch { id, state, position, } => { self.touchpad.write().unwrap().on_touch(id, state, position); } } } Ok(()) } } impl LifecycleListener for Arc<InputState> { fn on_post_update(&mut self) -> Result<(), failure::Error> { self.mouse.write().unwrap().advance(); self.keyboard.write().unwrap().advance(); self.touchpad.write().unwrap().advance(); Ok(()) } } impl Drop for InputSystem { fn drop(&mut self) { crate::application::detach(self.lifecycle); crate::window::detach(self.events); } } impl InputSystem { pub fn new(setup: InputParams) -> Self { debug_assert!(crate::application::valid(), ""); let state = Arc::new(InputState { touch_emulation: setup.touch_emulation, touch_emulation_button: RwLock::new(None), mouse: RwLock::new(Mouse::new(setup.mouse)), keyboard: RwLock::new(Keyboard::new(setup.keyboard)), touchpad: RwLock::new(TouchPad::new(setup.touchpad)), }); InputSystem { state: state.clone(), lifecycle: crate::application::attach(state.clone()), events: crate::window::attach(state), } } pub fn reset(&self) { self.state.mouse.write().unwrap().reset(); self.state.keyboard.write().unwrap().reset(); self.state.touchpad.write().unwrap().reset(); *self.state.touch_emulation_button.write().unwrap() = None; } #[inline] pub fn has_keyboard_attached(&self) -> bool { true } #[inline] pub fn is_key_down(&self, key: Key) -> bool { self.state.keyboard.read().unwrap().is_key_down(key) } #[inline] pub fn is_key_press(&self, key: Key) -> bool { self.state.keyboard.read().unwrap().is_key_press(key) } #[inline] pub fn key_presses(&self) -> FastHashSet<Key> { self.state.keyboard.read().unwrap().key_presses() } #[inline] pub fn is_key_release(&self, key: Key) -> bool { self.state.keyboard.read().unwrap().is_key_release(key) } #[inline] pub fn key_releases(&self) -> FastHashSet<Key> { self.state.keyboard.read().unwrap().key_releases() } #[inline] pub fn is_key_repeat(&self, key: Key) -> bool { self.state.keyboard.read().unwrap().is_key_repeat(key) } #[inline] pub fn text(&self) -> String { use std::iter::FromIterator; String::from_iter(self.state.keyboard.read().unwrap().captured_chars()) } #[inline] pub fn has_mouse_attached(&self) -> bool { true } #[inline] pub fn is_mouse_down(&self, button: MouseButton) -> bool { self.state.mouse.read().unwrap().is_button_down(button) } #[inline] pub fn is_mouse_press(&self, button: MouseButton) -> bool { self.state.mouse.read().unwrap().is_button_press(button) } #[inline] pub fn mouse_presses(&self) -> FastHashSet<MouseButton> { self.state.mouse.read().unwrap().mouse_presses() } #[inline] pub fn is_mouse_release(&self, button: MouseButton) -> bool { self.state.mouse.read().unwrap().is_button_release(button) } #[inline] pub fn mouse_releases(&self) -> FastHashSet<MouseButton> { self.state.mouse.read().unwrap().mouse_releases() } #[inline] pub fn is_mouse_click(&self, button: MouseButton) -> bool { self.state.mouse.read().unwrap().is_button_click(button) } #[inline] pub fn is_mouse_double_click(&self, button: MouseButton) -> bool { self.state .mouse .read() .unwrap() .is_button_double_click(button) } #[inline] pub fn mouse_position(&self) -> Vector2<f32> { self.state.mouse.read().unwrap().position() } #[inline] pub fn mouse_movement(&self) -> Vector2<f32> { self.state.mouse.read().unwrap().movement() } #[inline] pub fn mouse_scroll(&self) -> Vector2<f32> { self.state.mouse.read().unwrap().scroll() } #[inline] pub fn has_touchpad_attached(&self) -> bool { true } #[inline] pub fn is_finger_touched(&self, n: usize) -> bool { self.state.touchpad.read().unwrap().is_touched(n) } #[inline] pub fn finger_position(&self, n: usize) -> Option<Vector2<f32>> { self.state.touchpad.read().unwrap().position(n) } #[inline] pub fn finger_tap(&self) -> GestureTap { self.state.touchpad.read().unwrap().tap() } #[inline] pub fn finger_double_tap(&self) -> GestureTap { self.state.touchpad.read().unwrap().double_tap() } #[inline] pub fn finger_pan(&self) -> GesturePan { self.state.touchpad.read().unwrap().pan() } }
use std::sync::{Arc, RwLock}; use crate::application::prelude::{LifecycleListener, LifecycleListenerHandle}; use crate::window::prelude::{Event, EventListener, EventListenerHandle}; use super::events::InputEvent; use super::keyboard::{Key, Keyboard}; use super::mouse::{Mouse, MouseButton}; use super::touchpad::{GesturePan, GestureTap, TouchPad, TouchState}; use super::InputParams; use crate::utils::hash::FastHashSet; use crate::math::prelude::Vector2; pub struct InputSystem { events: EventListenerHandle, lifecycle: LifecycleListenerHandle, state: Arc<InputState>, } struct InputState { touch_emulation: bool, touch_emulation_button: RwLock<Option<MouseButton>>, mouse: RwLock<Mouse>, keyboard: RwLock<Keyboard>, touchpad: RwLock<TouchPad>, } impl EventListener for Arc<InputState> { fn on(&mut self, v: &Event) -> Result<(), failure::Error> { if let Event::InputDevice(v) = *v { match v { InputEvent::MouseMoved { position } => { if self.touch_emulation_button.read().unwrap().is_some() { self.touchpad.write().unwrap().on_touch( 255, TouchState::Move, self.mouse.read().unwrap().position(), ); } self.mouse.write().unwrap().on_move(position) } InputEvent::MousePressed { button } => { if self.touch_emulation { *self.touch_emulation_button.write().unwrap() = Some(button); self.touchpad.write().unwrap().on_touch( 255, TouchState::Start, self.mouse.read().unwrap().position(), ); } self.mouse.write().unwrap().on_button_pressed(button) } InputEvent::MouseReleased { button } => { if *self.touch_emulation_button.read().unwrap() == Some(button) { *self.touch_emulation_button.write().unwrap() = None; self.touchpad.write().unwrap().on_touch( 255, TouchState::End, self.mouse.read().unwrap().position(), ); } self.mouse.write().unwrap().on_button_released(button) } InputEvent::MouseWheel { delta } => { self.mouse.write().unwrap().on_wheel_scroll(delta) } InputEvent::KeyboardPressed { key } => { self.keyboard.write().unwrap().on_key_pressed(key) } InputEvent::KeyboardReleased { key } => { self.keyboard.write().unwrap().on_key_released(key) } InputEvent::ReceivedCharacter { character } => { self.keyboard.write().unwrap().on_char(character) } InputEvent::Touch { id, state, position, } => { self.touchpad.write().unwrap().on_touch(id, state, position); } } } Ok(()) } } impl LifecycleListener for Arc<InputState> { fn on_post_update(&mut self) -> Result<(), failure::Error> { self.mouse.write().unwrap().advance(); self.keyboard.write().unwrap().advance(); self.touchpad.write().unwrap().advance(); Ok(()) } } impl Drop for InputSystem { fn drop(&mut self) { crate::application::detach(self.lifecycle); crate::window::detach(self.events); } } impl InputSystem { pub fn new(setup: InputParams) -> Self { debug_assert!(crate::application::valid(), ""); let state = Arc::new(InputState { touch_emulation: setup.touch_emulation, touch_emulation_button: RwLock::new(None), mouse: RwLock::new(Mouse::new(setup.mouse)), keyboard: RwLock::new(Keyboard::new(setup.keyboard)), touchpad: RwLock::new(TouchPad::new(setup.touchpad)), }); InputSystem { state: state.clone(), lifecycle: crate::application::attach(state.clone()), events: crate::window::attach(state), } } pub fn reset(&self) { self.state.mouse.write().unwrap().reset(); self.state.keyboard.write().unwrap().reset(); self.state.touchpad.write().unwrap(
b fn key_presses(&self) -> FastHashSet<Key> { self.state.keyboard.read().unwrap().key_presses() } #[inline] pub fn is_key_release(&self, key: Key) -> bool { self.state.keyboard.read().unwrap().is_key_release(key) } #[inline] pub fn key_releases(&self) -> FastHashSet<Key> { self.state.keyboard.read().unwrap().key_releases() } #[inline] pub fn is_key_repeat(&self, key: Key) -> bool { self.state.keyboard.read().unwrap().is_key_repeat(key) } #[inline] pub fn text(&self) -> String { use std::iter::FromIterator; String::from_iter(self.state.keyboard.read().unwrap().captured_chars()) } #[inline] pub fn has_mouse_attached(&self) -> bool { true } #[inline] pub fn is_mouse_down(&self, button: MouseButton) -> bool { self.state.mouse.read().unwrap().is_button_down(button) } #[inline] pub fn is_mouse_press(&self, button: MouseButton) -> bool { self.state.mouse.read().unwrap().is_button_press(button) } #[inline] pub fn mouse_presses(&self) -> FastHashSet<MouseButton> { self.state.mouse.read().unwrap().mouse_presses() } #[inline] pub fn is_mouse_release(&self, button: MouseButton) -> bool { self.state.mouse.read().unwrap().is_button_release(button) } #[inline] pub fn mouse_releases(&self) -> FastHashSet<MouseButton> { self.state.mouse.read().unwrap().mouse_releases() } #[inline] pub fn is_mouse_click(&self, button: MouseButton) -> bool { self.state.mouse.read().unwrap().is_button_click(button) } #[inline] pub fn is_mouse_double_click(&self, button: MouseButton) -> bool { self.state .mouse .read() .unwrap() .is_button_double_click(button) } #[inline] pub fn mouse_position(&self) -> Vector2<f32> { self.state.mouse.read().unwrap().position() } #[inline] pub fn mouse_movement(&self) -> Vector2<f32> { self.state.mouse.read().unwrap().movement() } #[inline] pub fn mouse_scroll(&self) -> Vector2<f32> { self.state.mouse.read().unwrap().scroll() } #[inline] pub fn has_touchpad_attached(&self) -> bool { true } #[inline] pub fn is_finger_touched(&self, n: usize) -> bool { self.state.touchpad.read().unwrap().is_touched(n) } #[inline] pub fn finger_position(&self, n: usize) -> Option<Vector2<f32>> { self.state.touchpad.read().unwrap().position(n) } #[inline] pub fn finger_tap(&self) -> GestureTap { self.state.touchpad.read().unwrap().tap() } #[inline] pub fn finger_double_tap(&self) -> GestureTap { self.state.touchpad.read().unwrap().double_tap() } #[inline] pub fn finger_pan(&self) -> GesturePan { self.state.touchpad.read().unwrap().pan() } }
).reset(); *self.state.touch_emulation_button.write().unwrap() = None; } #[inline] pub fn has_keyboard_attached(&self) -> bool { true } #[inline] pub fn is_key_down(&self, key: Key) -> bool { self.state.keyboard.read().unwrap().is_key_down(key) } #[inline] pub fn is_key_press(&self, key: Key) -> bool { self.state.keyboard.read().unwrap().is_key_press(key) } #[inline] pu
random
[ { "content": "#[inline]\n\npub fn is_mouse_down(button: MouseButton) -> bool {\n\n ctx().is_mouse_down(button)\n\n}\n\n\n\n/// Checks if a mouse button has been pressed during last frame.\n", "file_path": "src/input/mod.rs", "rank": 0, "score": 293791.80774219765 }, { "content": "#[inline]\n\npub fn is_mouse_release(button: MouseButton) -> bool {\n\n ctx().is_mouse_release(button)\n\n}\n\n\n", "file_path": "src/input/mod.rs", "rank": 1, "score": 288264.46173663007 }, { "content": "#[inline]\n\npub fn is_mouse_press(button: MouseButton) -> bool {\n\n ctx().is_mouse_press(button)\n\n}\n\n\n\n/// Checks if a mouse button has been pressed during last frame.\n", "file_path": "src/input/mod.rs", "rank": 2, "score": 288264.46173663007 }, { "content": "#[inline]\n\npub fn is_mouse_click(button: MouseButton) -> bool {\n\n ctx().is_mouse_click(button)\n\n}\n\n\n\n/// Checks if a mouse button has been double clicked during last frame.\n", "file_path": "src/input/mod.rs", "rank": 3, "score": 288264.46173663007 }, { "content": "#[inline]\n\npub fn is_mouse_double_click(button: MouseButton) -> bool {\n\n ctx().is_mouse_double_click(button)\n\n}\n\n\n\n/// Gets the mouse position relative to the lower-left hand corner of the window.\n", "file_path": "src/input/mod.rs", "rank": 4, "score": 283058.17956446833 }, { "content": "#[inline]\n\npub fn is_key_down(key: Key) -> bool {\n\n ctx().is_key_down(key)\n\n}\n\n\n\n/// Checks if a key has been pressed down during the last frame.\n", "file_path": "src/input/mod.rs", "rank": 5, "score": 247658.33584729402 }, { "content": "#[inline]\n\npub fn is_key_press(key: Key) -> bool {\n\n ctx().is_key_press(key)\n\n}\n\n\n", "file_path": "src/input/mod.rs", "rank": 6, "score": 243060.05606325372 }, { "content": "#[inline]\n\npub fn is_key_release(key: Key) -> bool {\n\n ctx().is_key_release(key)\n\n}\n", "file_path": "src/input/mod.rs", "rank": 7, "score": 243060.05606325372 }, { "content": "#[inline]\n\npub fn is_key_repeat(key: Key) -> bool {\n\n ctx().is_key_repeat(key)\n\n}\n\n\n\n/// Gets captured text during the last frame.\n", "file_path": "src/input/mod.rs", "rank": 8, "score": 243060.05606325378 }, { "content": "#[inline]\n\npub fn has_keyboard_attached() -> bool {\n\n ctx().has_keyboard_attached()\n\n}\n\n\n\n/// Checks if a key is currently held down.\n", "file_path": "src/input/mod.rs", "rank": 9, "score": 216434.14112294017 }, { "content": "#[inline]\n\npub fn has_mouse_attached() -> bool {\n\n ctx().has_mouse_attached()\n\n}\n\n\n\n/// Checks if a mouse buttoAn is held down.\n", "file_path": "src/input/mod.rs", "rank": 10, "score": 216397.3695775073 }, { "content": "#[inline]\n\npub fn has_touchpad_attached() -> bool {\n\n ctx().has_touchpad_attached()\n\n}\n\n\n\n/// Checks if the `n`th finger is touched during last frame.\n", "file_path": "src/input/mod.rs", "rank": 11, "score": 216389.16565292916 }, { "content": "#[inline]\n\npub fn mouse_presses() -> FastHashSet<MouseButton> {\n\n ctx().mouse_presses()\n\n}\n\n\n\n/// Checks if a mouse button has been released during last frame.\n", "file_path": "src/input/mod.rs", "rank": 12, "score": 202800.27429649152 }, { "content": "#[inline]\n\npub fn mouse_releases() -> FastHashSet<MouseButton> {\n\n ctx().mouse_releases()\n\n}\n\n/// Checks if a mouse button has been clicked during last frame.\n", "file_path": "src/input/mod.rs", "rank": 13, "score": 202800.27429649152 }, { "content": "#[inline]\n\npub fn valid() -> bool {\n\n unsafe { !LIFECYCLE_CTX.is_null() }\n\n}\n\n\n\n/// Checks if the engine is running in headless mode.\n", "file_path": "src/application/mod.rs", "rank": 14, "score": 184544.96591521727 }, { "content": "#[inline]\n\npub fn valid() -> bool {\n\n unsafe { !CTX.is_null() }\n\n}\n\n\n\n/// Reset input to initial states.\n", "file_path": "src/input/mod.rs", "rank": 15, "score": 184544.96591521727 }, { "content": "#[inline]\n\npub fn is_current() -> bool {\n\n ctx().is_current()\n\n}\n\n\n\n/// Returns the position of the lower-left hand corner of the window relative to the lower-left\n\n/// hand corner of the desktop. Note that the lower-left hand corner of the desktop is not\n\n/// necessarily the same as the screen. If the user uses a desktop with multiple monitors,\n\n/// the lower-left hand corner of the desktop is the lower-left hand corner of the monitor at\n\n/// the lower-left of the desktop.\n\n///\n\n/// The coordinates can be negative if the lower-left hand corner of the window is outside of\n\n/// the visible screen region.\n", "file_path": "src/window/mod.rs", "rank": 16, "score": 184544.96591521727 }, { "content": "#[inline]\n\npub fn valid() -> bool {\n\n unsafe { !CTX.is_null() }\n\n}\n\n\n\n/// Resolve shortcuts in the provided string recursively and return None if not exists.\n", "file_path": "src/res/mod.rs", "rank": 17, "score": 184544.96591521724 }, { "content": "#[inline]\n\npub fn valid() -> bool {\n\n unsafe { !CTX.is_null() }\n\n}\n\n\n\n// /// Blocks current thread until latch is set. Try to keep busy by popping and stealing jobs\n\n// /// as necessary.\n\n// #[inline]\n\n// pub fn wait_until<T>(latch: &T)\n\n// where\n\n// T: LatchWaitProbe,\n\n// {\n\n// ctx().wait_until(latch);\n\n// }\n\n\n", "file_path": "src/sched/mod.rs", "rank": 18, "score": 184544.96591521724 }, { "content": "#[inline]\n\npub fn headless() -> bool {\n\n ctx().headless()\n\n}\n\n\n", "file_path": "src/application/mod.rs", "rank": 19, "score": 184544.96591521727 }, { "content": "#[doc(hidden)]\n\npub fn oneshot() -> Result<()> {\n\n unsafe {\n\n debug_assert!(LIFECYCLE_CTX.is_null(), \"duplicated setup of crayon.\");\n\n\n\n let params = Params::default();\n\n\n\n sys::init();\n\n LIFECYCLE_CTX = Box::into_raw(Box::new(LifecycleSystem::new()));\n\n TIME_CTX = Box::into_raw(Box::new(TimeSystem::new(&params)));\n\n CTX = Box::into_raw(Box::new(EngineSystem::new_headless(params)?));\n\n\n\n ctx().run_oneshot()\n\n }\n\n}\n\n\n\n/// Discard the core system.\n", "file_path": "src/application/mod.rs", "rank": 20, "score": 184322.10944958727 }, { "content": "#[inline]\n\npub fn mouse_position() -> Vector2<f32> {\n\n ctx().mouse_position()\n\n}\n\n\n\n/// Gets mouse movement since last frame.\n", "file_path": "src/input/mod.rs", "rank": 21, "score": 180849.50259413 }, { "content": "#[inline]\n\npub fn make_current() -> Result<()> {\n\n ctx().make_current()\n\n}\n\n\n\n/// Returns true if this context is the current one in this thread.\n", "file_path": "src/window/mod.rs", "rank": 22, "score": 180102.02196617465 }, { "content": "#[inline]\n\npub fn key_presses() -> FastHashSet<Key> {\n\n ctx().key_presses()\n\n}\n\n\n\n/// Checks if a key has been released during the last frame.\n", "file_path": "src/input/mod.rs", "rank": 23, "score": 174601.33899337918 }, { "content": "#[inline]\n\npub fn key_releases() -> FastHashSet<Key>{\n\n ctx().key_releases()\n\n}\n\n/// Checks if a key has been repeated during the last frame.\n", "file_path": "src/input/mod.rs", "rank": 24, "score": 174601.33899337918 }, { "content": "pub fn from_virtual_key_code(key: &str) -> Option<Key> {\n\n match key {\n\n \"1\" => Some(Key::Key1),\n\n \"2\" => Some(Key::Key2),\n\n \"3\" => Some(Key::Key3),\n\n \"4\" => Some(Key::Key4),\n\n \"5\" => Some(Key::Key5),\n\n \"6\" => Some(Key::Key6),\n\n \"7\" => Some(Key::Key7),\n\n \"8\" => Some(Key::Key8),\n\n \"9\" => Some(Key::Key9),\n\n \"0\" => Some(Key::Key0),\n\n \"A\" | \"a\" => Some(Key::A),\n\n \"B\" | \"b\" => Some(Key::B),\n\n \"C\" | \"c\" => Some(Key::C),\n\n \"D\" | \"d\" => Some(Key::D),\n\n \"E\" | \"e\" => Some(Key::E),\n\n \"F\" | \"f\" => Some(Key::F),\n\n \"G\" | \"g\" => Some(Key::G),\n\n \"H\" | \"h\" => Some(Key::H),\n", "file_path": "src/window/backends/web/types.rs", "rank": 25, "score": 171539.3819880185 }, { "content": "#[inline]\n\npub fn exists(uuid: Uuid) -> bool {\n\n ctx().exists(uuid)\n\n}\n\n\n\n/// Loads file asynchronously with response callback.\n", "file_path": "src/res/mod.rs", "rank": 26, "score": 163453.70495129432 }, { "content": "#[inline]\n\npub fn is_finger_touched(n: usize) -> bool {\n\n ctx().is_finger_touched(n)\n\n}\n\n\n\n/// Gets the position of the `n`th touched finger.\n", "file_path": "src/input/mod.rs", "rank": 27, "score": 163453.70495129432 }, { "content": "#[cfg(target_arch = \"wasm32\")]\n\npub fn new() -> Result<Box<Visitor>> {\n\n let visitor = unsafe { webgl::visitor::WebGLVisitor::new()? };\n\n Ok(Box::new(visitor))\n\n}\n\n\n", "file_path": "src/video/backends/mod.rs", "rank": 28, "score": 159625.5432668606 }, { "content": "pub fn quad() -> Result<MeshHandle> {\n\n let verts: [Vertex; 4] = [\n\n Vertex::new([-0.5, -0.5, 0.0], [0.0, 0.0, -1.0], [0.0, 0.0]),\n\n Vertex::new([0.5, -0.5, 0.0], [0.0, 0.0, -1.0], [1.0, 0.0]),\n\n Vertex::new([0.5, 0.5, 0.0], [0.0, 0.0, -1.0], [1.0, 1.0]),\n\n Vertex::new([-0.5, 0.5, 0.0], [0.0, 0.0, -1.0], [0.0, 1.0]),\n\n ];\n\n\n\n let idxes: [u16; 6] = [0, 1, 2, 0, 2, 3];\n\n\n\n let mut params = MeshParams::default();\n\n params.num_verts = verts.len();\n\n params.num_idxes = idxes.len();\n\n params.layout = Vertex::layout();\n\n\n\n let data = MeshData {\n\n vptr: Vertex::encode(&verts[..]).into(),\n\n iptr: IndexFormat::encode(&idxes).into(),\n\n };\n\n\n\n let mesh = video::create_mesh(params, Some(data))?;\n\n Ok(mesh)\n\n}\n\n\n", "file_path": "modules/world/src/assets/mesh_builder.rs", "rank": 29, "score": 158909.19596280676 }, { "content": "pub fn white() -> Result<TextureHandle> {\n\n let mut params = TextureParams::default();\n\n params.dimensions = (2, 2).into();\n\n\n\n let bytes = vec![255; 16];\n\n let data = TextureData {\n\n bytes: vec![bytes.into_boxed_slice()],\n\n };\n\n\n\n let texture = video::create_texture(params, data)?;\n\n Ok(texture)\n\n}\n", "file_path": "modules/world/src/assets/texture_builder.rs", "rank": 30, "score": 158909.19596280676 }, { "content": "pub fn cube() -> Result<MeshHandle> {\n\n let texcoords = [[0.0, 0.0], [1.0, 0.0], [1.0, 1.0], [0.0, 1.0]];\n\n\n\n let points = [\n\n [-0.5, -0.5, 0.5],\n\n [0.5, -0.5, 0.5],\n\n [0.5, 0.5, 0.5],\n\n [-0.5, 0.5, 0.5],\n\n [-0.5, -0.5, -0.5],\n\n [0.5, -0.5, -0.5],\n\n [0.5, 0.5, -0.5],\n\n [-0.5, 0.5, -0.5],\n\n ];\n\n\n\n let normals = [\n\n [0.0, 0.0, 1.0],\n\n [1.0, 0.0, 0.0],\n\n [0.0, 0.0, -1.0],\n\n [-1.0, 0.0, 0.0],\n\n [0.0, 1.0, 0.0],\n", "file_path": "modules/world/src/assets/mesh_builder.rs", "rank": 31, "score": 158909.19596280676 }, { "content": "pub fn from_event(source: glutin::Event, dimensions: Vector2<u32>) -> Option<Event> {\n\n match source {\n\n glutin::Event::WindowEvent { event, .. } => from_window_event(&event, dimensions),\n\n\n\n glutin::Event::Awakened => Some(Event::Window(WindowEvent::Awakened)),\n\n\n\n glutin::Event::Suspended(v) => if v {\n\n Some(Event::Window(WindowEvent::Suspended))\n\n } else {\n\n Some(Event::Window(WindowEvent::Resumed))\n\n },\n\n\n\n glutin::Event::DeviceEvent { .. } => None,\n\n }\n\n}\n\n\n", "file_path": "src/window/backends/glutin/types.rs", "rank": 32, "score": 152072.31116810968 }, { "content": "#[inline]\n\npub fn position() -> Vector2<i32> {\n\n ctx().position()\n\n}\n\n\n\n/// Returns the size in *points* of the client area of the window.\n\n///\n\n/// The client area is the content of the window, excluding the title bar and borders. These are\n\n/// the size of the frame buffer.\n", "file_path": "src/window/mod.rs", "rank": 33, "score": 150008.96628221523 }, { "content": "#[derive(Debug, Clone, Copy)]\n\nstruct TouchEvent {\n\n pub id: u8,\n\n pub state: TouchState,\n\n pub position: Vector2<f32>,\n\n}\n\n\n\nimpl Default for TouchEvent {\n\n fn default() -> Self {\n\n TouchEvent {\n\n id: 0,\n\n state: TouchState::End,\n\n position: Vector2::new(0.0, 0.0),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/input/touchpad.rs", "rank": 34, "score": 149565.7930428998 }, { "content": "pub fn run(rx: Arc<RwLock<Vec<Command>>>) -> Result<()> {\n\n let device = cpal::default_output_device()\n\n .ok_or_else(|| format_err!(\"No avaiable audio output device\"))?;\n\n\n\n let format = device\n\n .default_output_format()\n\n .expect(\"The device doesn't support any format.\");\n\n\n\n let events = EventLoop::new();\n\n let stream = events.build_output_stream(&device, &format).unwrap();\n\n\n\n info!(\n\n \"Create audio mixer based on CPAL. [{:?}] {:?}.\",\n\n device.name(),\n\n format\n\n );\n\n\n\n let mut sampler = Sampler::new(format.channels as u8, format.sample_rate.0 as u32);\n\n Builder::new()\n\n .name(\"Audio\".into())\n", "file_path": "modules/audio/src/mixer/cpal.rs", "rank": 35, "score": 148996.15242131037 }, { "content": "pub fn run(rx: Arc<RwLock<Vec<Command>>>) -> Result<()> {\n\n info!(\"Create web audio mixer.\",);\n\n\n\n let ctx = AudioContext::new().unwrap();\n\n\n\n let closure = Rc::new(RefCell::new(None));\n\n let clone = closure.clone();\n\n let mut sampler = Sampler::new(CHANNELS, ctx.sample_rate() as u32);\n\n\n\n let mut bufs = Vec::new();\n\n for _ in 0..CHANNELS {\n\n bufs.push(Vec::new());\n\n }\n\n\n\n *closure.borrow_mut() = Some(Closure::wrap(Box::new(move |e: AudioProcessingEvent| {\n\n if clone.borrow().is_some() {}\n\n\n\n {\n\n let mut rx = rx.write().unwrap();\n\n sampler.update(rx.drain(..));\n", "file_path": "modules/audio/src/mixer/webaudio.rs", "rank": 36, "score": 148996.15242131037 }, { "content": "pub fn run(rx: Arc<RwLock<Vec<Command>>>) -> Result<()> {\n\n info!(\"Create headless audio mixer.\",);\n\n\n\n Builder::new()\n\n .name(\"Audio\".into())\n\n .spawn(move || {\n\n //\n\n loop {\n\n {\n\n let mut rx = rx.write().unwrap();\n\n rx.clear();\n\n }\n\n\n\n std::thread::sleep(std::time::Duration::from_millis(50));\n\n }\n\n }).expect(\"Failed to create thread for `AudioSystem`.\");\n\n\n\n Ok(())\n\n}\n", "file_path": "modules/audio/src/mixer/headless.rs", "rank": 37, "score": 148996.15242131037 }, { "content": "#[inline]\n\npub fn mouse_movement() -> Vector2<f32> {\n\n ctx().mouse_movement()\n\n}\n\n\n\n/// Gets the scroll movement of mouse, usually provided by mouse wheel.\n", "file_path": "src/input/mod.rs", "rank": 38, "score": 146013.4972464898 }, { "content": "#[inline]\n\npub fn mouse_scroll() -> Vector2<f32> {\n\n ctx().mouse_scroll()\n\n}\n\n\n\n/// Returns true if a touchpad is attached\n", "file_path": "src/input/mod.rs", "rank": 39, "score": 146013.4972464898 }, { "content": "#[inline]\n\npub fn create_surface(params: SurfaceParams) -> Result<SurfaceHandle> {\n\n ctx().create_surface(params)\n\n}\n\n\n\n/// Gets the `SurfaceParams` if available.\n", "file_path": "src/video/mod.rs", "rank": 40, "score": 145221.04438315527 }, { "content": "#[inline]\n\npub fn create_prefab(prefab: Prefab) -> Result<PrefabHandle> {\n\n ctx().create_prefab(prefab)\n\n}\n\n\n\n/// Create a prefab object from file asynchronously.\n\n///\n\n/// A prefab asset acts as a template from which you can create new entity instances\n\n/// in the world. It stores a entity and its children complete with components and\n\n/// properties internally.\n", "file_path": "modules/world/src/lib.rs", "rank": 41, "score": 145221.04438315527 }, { "content": "#[inline]\n\npub fn surface_state(handle: SurfaceHandle) -> ResourceState {\n\n ctx().surface_state(handle)\n\n}\n\n\n\n/// Deletes surface object.\n", "file_path": "src/video/mod.rs", "rank": 42, "score": 144228.37832297187 }, { "content": "#[inline]\n\npub fn texture_state(handle: TextureHandle) -> ResourceState {\n\n ctx().texture_state(handle)\n\n}\n\n\n\n/// Update a contiguous subregion of an existing two-dimensional texture object.\n", "file_path": "src/video/mod.rs", "rank": 43, "score": 144228.37832297187 }, { "content": "#[inline]\n\npub fn shader_state(handle: ShaderHandle) -> ResourceState {\n\n ctx().shader_state(handle)\n\n}\n\n\n\n/// Delete shader state object.\n", "file_path": "src/video/mod.rs", "rank": 44, "score": 144228.37832297187 }, { "content": "#[inline]\n\npub fn mesh_state(handle: MeshHandle) -> ResourceState {\n\n ctx().mesh_state(handle)\n\n}\n\n\n\n/// Update a subset of dynamic vertex buffer. Use `offset` specifies the offset\n\n/// into the buffer object's data store where data replacement will begin, measured\n\n/// in bytes.\n", "file_path": "src/video/mod.rs", "rank": 45, "score": 144228.37832297187 }, { "content": "pub fn sphere(iteration: usize) -> Result<MeshHandle> {\n\n use std::f32::consts::FRAC_1_PI;\n\n\n\n fn normalize(v: [f32; 3]) -> Vertex {\n\n let l = (v[0] * v[0] + v[1] * v[1] + v[2] * v[2]).sqrt();\n\n let v = [v[0] / l, v[1] / l, v[2] / l];\n\n let uv = [v[0].asin() * FRAC_1_PI + 0.5, v[1].asin() * FRAC_1_PI + 0.5];\n\n\n\n Vertex::new(v, v, uv)\n\n }\n\n\n\n let t = (1.0f32 + 5.0f32.sqrt()) / 2.0f32;\n\n let mut verts = vec![\n\n normalize([-1.0, t, 0.0]),\n\n normalize([1.0, t, 0.0]),\n\n normalize([-1.0, -t, 0.0]),\n\n normalize([1.0, -t, 0.0]),\n\n normalize([0.0, -1.0, t]),\n\n normalize([0.0, 1.0, t]),\n\n normalize([0.0, -1.0, -t]),\n", "file_path": "modules/world/src/assets/mesh_builder.rs", "rank": 46, "score": 142440.9101412746 }, { "content": "/// Removes a event listener from window.\n\npub fn detach(handle: EventListenerHandle) {\n\n ctx().remove_event_listener(handle)\n\n}\n\n\n\n/// Shows the window if it was hidden.\n\n///\n\n/// # Platform-specific\n\n///\n\n/// Has no effect on mobile platform.\n", "file_path": "src/window/mod.rs", "rank": 47, "score": 142437.43502104643 }, { "content": "#[inline]\n\npub fn detach(handle: LifecycleListenerHandle) {\n\n lifecycle_ctx().detach(handle)\n\n}\n\n\n\n/// Set minimum frames per second. If fps goes lower than this, time will\n\n/// appear to slow. This is useful for some subsystems required strict minimum\n\n/// time step per frame, such like Collision checks.\n", "file_path": "src/application/mod.rs", "rank": 48, "score": 142428.62705363508 }, { "content": "#[inline]\n\npub fn finger_tap() -> GestureTap {\n\n ctx().finger_tap()\n\n}\n\n\n\n/// Gets the double tap gesture.\n", "file_path": "src/input/mod.rs", "rank": 49, "score": 141847.51872802718 }, { "content": "#[inline]\n\npub fn prefab_state(handle: PrefabHandle) -> ResourceState {\n\n ctx().prefab_state(handle)\n\n}\n\n\n\n/// Delete a prefab object from this world.\n", "file_path": "modules/world/src/lib.rs", "rank": 50, "score": 141288.93759469764 }, { "content": "/// Loads file asynchronously. This method will returns a `Request` object immediatedly,\n\n/// its user's responsibility to store the object and frequently check it for completion.\n\npub fn load(uuid: Uuid) -> Result<Request, failure::Error> {\n\n ctx().load(uuid)\n\n}\n\n\n", "file_path": "src/res/mod.rs", "rank": 51, "score": 140459.0611604554 }, { "content": "#[inline]\n\npub fn create_clip_from_uuid(uuid: Uuid) -> Result<AudioClipHandle> {\n\n ctx().create_clip_from_uuid(uuid)\n\n}\n\n\n", "file_path": "modules/audio/src/lib.rs", "rank": 52, "score": 139827.42360559842 }, { "content": "#[inline]\n\npub fn finger_double_tap() -> GestureTap {\n\n ctx().finger_double_tap()\n\n}\n\n\n\n/// Gets the panning gesture.\n", "file_path": "src/input/mod.rs", "rank": 53, "score": 139434.90689777525 }, { "content": "#[inline]\n\npub fn clip_state(handle: AudioClipHandle) -> ResourceState {\n\n ctx().clip_state(handle)\n\n}\n\n\n\n/// Deletes a `AudioClip` resource from `AudioSystem`.\n", "file_path": "modules/audio/src/lib.rs", "rank": 54, "score": 138513.56943377387 }, { "content": "#[inline]\n\npub fn render_texture_state(handle: RenderTextureHandle) -> ResourceState {\n\n ctx().render_texture_state(handle)\n\n}\n\n\n\n/// Delete the render texture object.\n", "file_path": "src/video/mod.rs", "rank": 55, "score": 138513.56943377387 }, { "content": "pub fn new(params: WindowParams) -> Result<Box<Visitor>> {\n\n let visitor = visitor::WebVisitor::new(params)?;\n\n Ok(Box::new(visitor))\n\n}\n", "file_path": "src/window/backends/web/mod.rs", "rank": 56, "score": 138287.9042516537 }, { "content": "#[inline]\n\npub fn play<T>(params: T) -> Result<AudioSourceHandle>\n\nwhere\n\n T: Into<AudioSource>,\n\n{\n\n ctx().play(params)\n\n}\n\n\n\n/// Stops a played audio source.\n", "file_path": "modules/audio/src/lib.rs", "rank": 57, "score": 138287.9042516537 }, { "content": "pub fn new(params: WindowParams) -> Result<Box<Visitor>> {\n\n let visitor = self::visitor::GlutinVisitor::from(params)?;\n\n Ok(Box::new(visitor))\n\n}\n", "file_path": "src/window/backends/glutin/mod.rs", "rank": 58, "score": 138287.90425165364 }, { "content": "#[inline]\n\npub fn create_render_texture(params: RenderTextureParams) -> Result<RenderTextureHandle> {\n\n ctx().create_render_texture(params)\n\n}\n\n\n\n/// Gets the `RenderTextureParams` if available.\n", "file_path": "src/video/mod.rs", "rank": 59, "score": 137366.0369163037 }, { "content": "#[inline]\n\npub fn set_listener<T>(position: T)\n\nwhere\n\n T: Into<Vector3<f32>>,\n\n{\n\n ctx().set_listener(position);\n\n}\n\n\n\n/// Creates a clip object from file asynchronously.\n", "file_path": "modules/audio/src/lib.rs", "rank": 60, "score": 135269.54521420112 }, { "content": "/// Executes `f` and captures any panic, translating that panic into a\n\n/// `Err` result. The assumption is that any panic will be propagated\n\n/// later with `resume_unwinding`, and hence `f` can be treated as\n\n/// exception safe.\n\npub fn halt_unwinding<F, R>(func: F) -> thread::Result<R>\n\nwhere\n\n F: FnOnce() -> R,\n\n{\n\n panic::catch_unwind(AssertUnwindSafe(func))\n\n}\n\n\n", "file_path": "src/sched/unwind.rs", "rank": 61, "score": 131614.26345799412 }, { "content": "#[inline]\n\npub fn set_position<T>(handle: AudioSourceHandle, position: T)\n\nwhere\n\n T: Into<Vector3<f32>>,\n\n{\n\n ctx().set_position(handle, position)\n\n}\n\n\n\n/// Sets the volume of a playing sound.\n", "file_path": "modules/audio/src/lib.rs", "rank": 62, "score": 130097.88914454241 }, { "content": "#[inline]\n\npub fn hide() {\n\n ctx().hide();\n\n}\n\n\n\n/// Set the context as the active context in this thread.\n", "file_path": "src/window/mod.rs", "rank": 63, "score": 129998.74200979703 }, { "content": "#[inline]\n\npub fn discard() {\n\n ctx().shutdown()\n\n}\n\n\n\npub(crate) unsafe fn late_discard() {\n\n drop(Box::from_raw(CTX as *mut EngineSystem));\n\n CTX = std::ptr::null();\n\n\n\n drop(Box::from_raw(TIME_CTX as *mut TimeSystem));\n\n TIME_CTX = std::ptr::null();\n\n\n\n drop(Box::from_raw(LIFECYCLE_CTX as *mut LifecycleSystem));\n\n LIFECYCLE_CTX = std::ptr::null();\n\n}\n\n\n\n/// Checks if the engine is enabled.\n", "file_path": "src/application/mod.rs", "rank": 64, "score": 129998.74200979703 }, { "content": "#[inline]\n\npub fn reset() {\n\n ctx().reset();\n\n}\n\n\n\n/// Returns true if a keyboard is attached\n", "file_path": "src/input/mod.rs", "rank": 65, "score": 129998.74200979703 }, { "content": "#[inline]\n\npub fn show() {\n\n ctx().show();\n\n}\n\n\n\n/// Hides the window if it was visible.\n\n///\n\n/// # Platform-specific\n\n///\n\n/// Has no effect on mobile platform.\n", "file_path": "src/window/mod.rs", "rank": 66, "score": 129998.74200979703 }, { "content": "#[inline]\n\npub fn finger_position(n: usize) -> Option<Vector2<f32>> {\n\n ctx().finger_position(n)\n\n}\n\n\n\n/// Gets the tap gesture.\n", "file_path": "src/input/mod.rs", "rank": 67, "score": 129400.64943519943 }, { "content": "#[inline]\n\npub fn attach<T>(lis: T) -> LifecycleListenerHandle\n\nwhere\n\n T: LifecycleListener + 'static,\n\n{\n\n lifecycle_ctx().attach(lis)\n\n}\n\n\n", "file_path": "src/application/mod.rs", "rank": 68, "score": 128906.03358154136 }, { "content": "/// Setup the core system.\n\npub fn setup<T, T2>(mut params: Params, closure: T) -> Result<()>\n\nwhere\n\n T: FnOnce() -> Result<T2> + 'static,\n\n T2: LifecycleListener + Send + 'static,\n\n{\n\n unsafe {\n\n debug_assert!(LIFECYCLE_CTX.is_null(), \"duplicated setup of crayon.\");\n\n\n\n sys::init();\n\n params.validate();\n\n\n\n let dirs = params.res.dirs.clone();\n\n LIFECYCLE_CTX = Box::into_raw(Box::new(LifecycleSystem::new()));\n\n TIME_CTX = Box::into_raw(Box::new(TimeSystem::new(&params)));\n\n\n\n if std::env::args().any(|v| v == \"headless\") {\n\n CTX = Box::into_raw(Box::new(EngineSystem::new_headless(params)?));\n\n } else {\n\n CTX = Box::into_raw(Box::new(EngineSystem::new(params)?));\n\n };\n\n\n\n let latch = crate::res::inside::load_manifests(dirs)?;\n\n ctx().run(latch, closure)\n\n }\n\n}\n\n\n", "file_path": "src/application/mod.rs", "rank": 69, "score": 128886.04819481546 }, { "content": "/// Adds a event listener.\n\npub fn attach<T: EventListener + 'static>(lis: T) -> EventListenerHandle {\n\n ctx().add_event_listener(lis)\n\n}\n\n\n", "file_path": "src/window/mod.rs", "rank": 70, "score": 127761.13268551936 }, { "content": "#[test]\n\npub fn hierachy() {\n\n let mut scene = Scene::new(HeadlessRenderer::new());\n\n let e1 = scene.create(\"e1\");\n\n let e2 = scene.create(\"e2\");\n\n let e3 = scene.create(\"e3\");\n\n let e4 = scene.create(\"e4\");\n\n\n\n scene.set_parent(e4, e3, false).unwrap();\n\n scene.set_parent(e3, e1, false).unwrap();\n\n scene.set_parent(e2, e1, false).unwrap();\n\n // e1 <- (e2, e3 <- (e4))\n\n\n\n assert!(scene.is_ancestor(e2, e1));\n\n assert!(scene.is_ancestor(e3, e1));\n\n assert!(scene.is_ancestor(e4, e1));\n\n assert!(scene.is_ancestor(e4, e3));\n\n\n\n assert!(!scene.is_ancestor(e1, e1));\n\n assert!(!scene.is_ancestor(e1, e2));\n\n assert!(!scene.is_ancestor(e1, e3));\n", "file_path": "modules/world/tests/graph.rs", "rank": 71, "score": 126968.60071454835 }, { "content": "#[inline]\n\npub fn create_prefab_from<T: AsRef<str>>(url: T) -> Result<PrefabHandle> {\n\n ctx().create_prefab_from(url)\n\n}\n\n\n\n/// Return the prefab obejct if exists.\n", "file_path": "modules/world/src/lib.rs", "rank": 72, "score": 126535.49803428643 }, { "content": "#[inline]\n\npub fn load_with_callback<T>(uuid: Uuid, func: T) -> Result<(), failure::Error>\n\nwhere\n\n T: FnOnce(Response) + Send + 'static,\n\n{\n\n ctx().load_with_callback(uuid, func)\n\n}\n\n\n\n/// Loads file asynchronously with response callback.\n", "file_path": "src/res/mod.rs", "rank": 73, "score": 126272.56165913929 }, { "content": "fn from_virtual_key_code(key: glutin::VirtualKeyCode) -> Option<Key> {\n\n match key {\n\n glutin::VirtualKeyCode::Key1 => Some(Key::Key1),\n\n glutin::VirtualKeyCode::Key2 => Some(Key::Key2),\n\n glutin::VirtualKeyCode::Key3 => Some(Key::Key3),\n\n glutin::VirtualKeyCode::Key4 => Some(Key::Key4),\n\n glutin::VirtualKeyCode::Key5 => Some(Key::Key5),\n\n glutin::VirtualKeyCode::Key6 => Some(Key::Key6),\n\n glutin::VirtualKeyCode::Key7 => Some(Key::Key7),\n\n glutin::VirtualKeyCode::Key8 => Some(Key::Key8),\n\n glutin::VirtualKeyCode::Key9 => Some(Key::Key9),\n\n glutin::VirtualKeyCode::Key0 => Some(Key::Key0),\n\n glutin::VirtualKeyCode::A => Some(Key::A),\n\n glutin::VirtualKeyCode::B => Some(Key::B),\n\n glutin::VirtualKeyCode::C => Some(Key::C),\n\n glutin::VirtualKeyCode::D => Some(Key::D),\n\n glutin::VirtualKeyCode::E => Some(Key::E),\n\n glutin::VirtualKeyCode::F => Some(Key::F),\n\n glutin::VirtualKeyCode::G => Some(Key::G),\n\n glutin::VirtualKeyCode::H => Some(Key::H),\n", "file_path": "src/window/backends/glutin/types.rs", "rank": 74, "score": 125635.06436113428 }, { "content": "#[inline]\n\npub fn create_clip_from<T: AsRef<str>>(url: T) -> Result<AudioClipHandle> {\n\n ctx().create_clip_from(url)\n\n}\n\n\n\n/// Creates a clip object from file asynchronously.\n", "file_path": "modules/audio/src/lib.rs", "rank": 75, "score": 124213.30876545908 }, { "content": "/// Loads file asynchronously. This method will returns a `Request` object immediatedly,\n\n/// its user's responsibility to store the object and frequently check it for completion.\n\npub fn load_from<T: AsRef<str>>(filename: T) -> Result<Request, failure::Error> {\n\n ctx().load_from(filename)\n\n}\n\n\n\npub(crate) mod inside {\n\n use std::sync::Arc;\n\n\n\n use failure::ResultExt;\n\n\n\n use crate::sched::prelude::{CountLatch, Latch};\n\n\n\n use super::system::ResourceSystem;\n\n use super::ResourceParams;\n\n\n\n pub static mut CTX: *const ResourceSystem = std::ptr::null();\n\n\n\n #[inline]\n\n pub fn ctx() -> &'static ResourceSystem {\n\n unsafe {\n\n debug_assert!(\n", "file_path": "src/res/mod.rs", "rank": 76, "score": 123863.16341706782 }, { "content": "#[inline]\n\npub fn create_texture_from_uuid(uuid: Uuid) -> CrResult<TextureHandle> {\n\n ctx().create_texture_from_uuid(uuid)\n\n}\n\n\n\n/// Get the resource state of specified texture.\n", "file_path": "src/video/mod.rs", "rank": 77, "score": 123059.36690910405 }, { "content": "#[inline]\n\npub fn create_mesh_from_uuid(uuid: Uuid) -> CrResult<MeshHandle> {\n\n ctx().create_mesh_from_uuid(uuid)\n\n}\n\n\n\n/// Gets the `MeshParams` if available.\n", "file_path": "src/video/mod.rs", "rank": 78, "score": 123059.36690910405 }, { "content": "#[inline]\n\npub fn create_shader(params: ShaderParams, vs: String, fs: String) -> Result<ShaderHandle> {\n\n ctx().create_shader(params, vs, fs)\n\n}\n\n\n\n/// Gets the `ShaderParams` if available.\n", "file_path": "src/video/mod.rs", "rank": 79, "score": 121488.98570101721 }, { "content": "#[inline]\n\npub fn load_from_with_callback<T1, T2>(filename: T1, func: T2) -> Result<(), failure::Error>\n\nwhere\n\n T1: AsRef<str>,\n\n T2: FnOnce(Response) + Send + 'static,\n\n{\n\n ctx().load_from_with_callback(filename, func)\n\n}\n\n\n", "file_path": "src/res/mod.rs", "rank": 80, "score": 121401.7767277731 }, { "content": "#[inline]\n\npub fn fps() -> u32 {\n\n time_ctx().fps()\n\n}\n\n\n\n/// Gets the duration duraing last frame.\n", "file_path": "src/application/mod.rs", "rank": 81, "score": 121255.71120074278 }, { "content": "#[inline]\n\npub fn text() -> String {\n\n ctx().text()\n\n}\n\n\n\n/// Returns true if a mouse is attached\n", "file_path": "src/input/mod.rs", "rank": 82, "score": 121255.71120074278 }, { "content": "pub fn timestamp() -> Timestamp {\n\n let duration = std::time::SystemTime::now()\n\n .duration_since(std::time::UNIX_EPOCH)\n\n .unwrap();\n\n\n\n let ms = u64::from(duration.subsec_millis()) + duration.as_secs() * 1000;\n\n Timestamp::from_millis(ms)\n\n}\n\n\n\npub(crate) fn init() {}\n\n\n\npub(crate) fn run_forever<F, F2>(mut advance: F, mut finished: F2) -> Result<(), failure::Error>\n\nwhere\n\n F: FnMut() -> Result<bool, failure::Error> + 'static,\n\n F2: FnMut() -> Result<(), failure::Error> + 'static,\n\n{\n\n while advance()? {}\n\n finished()\n\n}\n", "file_path": "src/application/sys/glutin.rs", "rank": 83, "score": 118455.42725882007 }, { "content": "pub fn timestamp() -> Timestamp {\n\n let ms = web_sys::window()\n\n .expect(\"should have a window in this context\")\n\n .performance()\n\n .expect(\"performance should be available\")\n\n .now();\n\n\n\n Timestamp::from_millis(ms as u64)\n\n}\n\n\n\npub(crate) fn init() {\n\n std::panic::set_hook(Box::new(console_error_panic_hook::hook));\n\n log::set_boxed_logger(Box::new(WebBrowserLogger {})).unwrap();\n\n log::set_max_level(log::LevelFilter::Info);\n\n}\n\n\n\npub(crate) fn run_forever<F, F2>(mut advance: F, mut finished: F2) -> Result<(), failure::Error>\n\nwhere\n\n F: FnMut() -> Result<bool, failure::Error> + 'static,\n\n F2: FnMut() -> Result<(), failure::Error> + 'static,\n", "file_path": "src/application/sys/web.rs", "rank": 84, "score": 118455.42725882007 }, { "content": "fn from_touch_state(state: glutin::TouchPhase) -> TouchState {\n\n match state {\n\n glutin::TouchPhase::Started => TouchState::Start,\n\n glutin::TouchPhase::Moved => TouchState::Move,\n\n glutin::TouchPhase::Ended => TouchState::End,\n\n glutin::TouchPhase::Cancelled => TouchState::Cancel,\n\n }\n\n}\n\n\n\nimpl From<glutin::MouseButton> for MouseButton {\n\n fn from(mouse: glutin::MouseButton) -> Self {\n\n match mouse {\n\n glutin::MouseButton::Left => MouseButton::Left,\n\n glutin::MouseButton::Right => MouseButton::Right,\n\n glutin::MouseButton::Middle => MouseButton::Middle,\n\n glutin::MouseButton::Other(id) => MouseButton::Other(id),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/window/backends/glutin/types.rs", "rank": 85, "score": 117967.82511082297 }, { "content": "enum KeyDownState {\n\n Start(Timestamp),\n\n Press(Timestamp),\n\n}\n\n\n\npub struct Keyboard {\n\n downs: FastHashMap<Key, KeyDownState>,\n\n presses: FastHashSet<Key>,\n\n releases: FastHashSet<Key>,\n\n chars: Vec<char>,\n\n setup: KeyboardParams,\n\n now: Timestamp,\n\n}\n\n\n\nimpl Keyboard {\n\n pub fn new(setup: KeyboardParams) -> Self {\n\n Keyboard {\n\n setup,\n\n downs: FastHashMap::default(),\n\n presses: FastHashSet::default(),\n", "file_path": "src/input/keyboard.rs", "rank": 86, "score": 117234.56596719424 }, { "content": "#[inline]\n\npub fn device_pixel_ratio() -> f32 {\n\n ctx().device_pixel_ratio()\n\n}\n\n\n\npub(crate) mod inside {\n\n use crate::errors::*;\n\n use crate::math::prelude::Vector2;\n\n\n\n use super::system::WindowSystem;\n\n use super::WindowParams;\n\n\n\n pub static mut CTX: *const WindowSystem = std::ptr::null();\n\n\n\n #[inline]\n\n pub fn ctx() -> &'static WindowSystem {\n\n unsafe {\n\n debug_assert!(\n\n !CTX.is_null(),\n\n \"window system has not been initialized properly.\"\n\n );\n", "file_path": "src/window/mod.rs", "rank": 87, "score": 115859.80199260256 }, { "content": "#[inline]\n\npub fn finger_pan() -> GesturePan {\n\n ctx().finger_pan()\n\n}\n\n\n\npub(crate) mod inside {\n\n use super::system::InputSystem;\n\n use super::InputParams;\n\n\n\n pub static mut CTX: *const InputSystem = std::ptr::null();\n\n\n\n #[inline]\n\n pub fn ctx() -> &'static InputSystem {\n\n unsafe {\n\n debug_assert!(\n\n !CTX.is_null(),\n\n \"input system has not been initialized properly.\"\n\n );\n\n\n\n &*CTX\n\n }\n", "file_path": "src/input/mod.rs", "rank": 88, "score": 115859.80199260256 }, { "content": "struct Lifecycle {\n\n requests: Arc<RequestQueue>,\n\n}\n\n\n\nimpl LifecycleListener for Lifecycle {\n\n fn on_post_update(&mut self) -> Result<(), failure::Error> {\n\n self.requests.advance();\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl Drop for ResourceSystem {\n\n fn drop(&mut self) {\n\n crate::application::detach(self.lifecycle);\n\n }\n\n}\n\n\n\nimpl ResourceSystem {\n\n pub fn new(params: ResourceParams) -> Result<Self, failure::Error> {\n\n debug_assert!(crate::application::valid(), \"\");\n", "file_path": "src/res/system.rs", "rank": 89, "score": 113874.13917512698 }, { "content": "struct Lifecycle {\n\n last_dimensions: Vector2<u32>,\n\n visitor: Box<dyn Visitor>,\n\n state: Arc<VideoState>,\n\n}\n\n\n\nimpl LifecycleListener for Lifecycle {\n\n fn on_pre_update(&mut self) -> CrResult<()> {\n\n // Swap internal commands frame.\n\n self.state.frames.swap();\n\n self.state.frames.write().clear();\n\n self.state.meshes.write().unwrap().advance()?;\n\n self.state.textures.write().unwrap().advance()?;\n\n Ok(())\n\n }\n\n\n\n fn on_post_update(&mut self) -> CrResult<()> {\n\n let dimensions = dimensions_pixels();\n\n\n\n // Resize the window, which would recreate the underlying framebuffer.\n", "file_path": "src/video/system.rs", "rank": 90, "score": 113874.13917512698 }, { "content": "#[inline]\n\npub fn dimensions() -> Vector2<u32> {\n\n ctx().dimensions()\n\n}\n\n\n\n/// Returns the ratio between the backing framebuffer resolution and the window size in\n\n/// screen pixels. This is typically one for a normal display and two for a retina display.\n", "file_path": "src/window/mod.rs", "rank": 91, "score": 113851.68393633509 }, { "content": "#[inline]\n\npub fn default() -> WorldDefaultResources {\n\n ctx().default\n\n}\n\n\n\nmod inside {\n\n use super::system::WorldSystem;\n\n\n\n static mut CTX: *const WorldSystem = std::ptr::null();\n\n\n\n #[inline]\n\n pub fn ctx() -> &'static WorldSystem {\n\n unsafe {\n\n debug_assert!(\n\n !CTX.is_null(),\n\n \"world system has not been initialized properly.\"\n\n );\n\n\n\n &*CTX\n\n }\n\n }\n", "file_path": "modules/world/src/lib.rs", "rank": 92, "score": 113447.19016235063 }, { "content": "pub trait LifecycleListener {\n\n fn on_pre_update(&mut self) -> Result<(), failure::Error> {\n\n Ok(())\n\n }\n\n\n\n fn on_update(&mut self) -> Result<(), failure::Error> {\n\n Ok(())\n\n }\n\n\n\n fn on_render(&mut self) -> Result<(), failure::Error> {\n\n Ok(())\n\n }\n\n\n\n fn on_post_update(&mut self) -> Result<(), failure::Error> {\n\n Ok(())\n\n }\n\n\n\n fn on_exit(&mut self) -> Result<(), failure::Error> {\n\n Ok(())\n\n }\n", "file_path": "src/application/lifecycle.rs", "rank": 93, "score": 112946.33331546183 }, { "content": "#[inline]\n\npub fn hash<T: Hash + ?Sized>(v: &T) -> usize {\n\n let mut state = hasher::FxHasher::default();\n\n v.hash(&mut state);\n\n state.finish() as usize\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn basic() {\n\n let mut v: HashMap<&'static str, i32> = Default::default();\n\n v.insert(\"hahah\", 123);\n\n }\n\n}\n\n\n\nmod hasher {\n\n use std::hash::Hasher;\n\n use std::ops::BitXor;\n\n\n\n const ROTATE: u32 = 5;\n\n const SEED64: u64 = 0x517c_c1b7_2722_0a95;\n\n const SEED32: u32 = (SEED64 & 0xFFFF_FFFF) as u32;\n\n\n\n #[cfg(target_pointer_width = \"32\")]\n\n const SEED: usize = SEED32 as usize;\n\n #[cfg(target_pointer_width = \"64\")]\n\n const SEED: usize = SEED64 as usize;\n\n\n", "file_path": "src/utils/hash.rs", "rank": 94, "score": 111512.29977280417 }, { "content": "#[inline]\n\npub fn hash64<T: Hash + ?Sized>(v: &T) -> u64 {\n\n let mut state = hasher::FxHasher64::default();\n\n v.hash(&mut state);\n\n state.finish()\n\n}\n\n\n\n/// A convenience function for when you need a quick 32-bit hash.\n", "file_path": "src/utils/hash.rs", "rank": 95, "score": 111512.29977280417 }, { "content": "#[inline]\n\npub fn hash32<T: Hash + ?Sized>(v: &T) -> u32 {\n\n let mut state = hasher::FxHasher32::default();\n\n v.hash(&mut state);\n\n state.finish() as u32\n\n}\n\n\n\n/// A convenience function for when you need a quick usize hash.\n", "file_path": "src/utils/hash.rs", "rank": 96, "score": 111512.29977280417 }, { "content": "#[inline]\n\npub fn create_mesh_from<T: AsRef<str>>(url: T) -> CrResult<MeshHandle> {\n\n ctx().create_mesh_from(url)\n\n}\n\n\n\n/// Creates a mesh object from file asynchronously.\n", "file_path": "src/video/mod.rs", "rank": 97, "score": 110953.90510904898 }, { "content": "#[inline]\n\npub fn create_texture_from<T: AsRef<str>>(url: T) -> CrResult<TextureHandle> {\n\n ctx().create_texture_from(url)\n\n}\n\n\n\n/// Creates a texture object from file asynchronously.\n", "file_path": "src/video/mod.rs", "rank": 98, "score": 110953.90510904898 } ]
Rust
tremor-script/src/ast/support.rs
0xd34b33f/tremor-runtime
73af8033509e224e4cbf078559f27bec4c12cf3d
#![cfg_attr(tarpaulin, skip)] use super::{ BinOpKind, EventPath, Invoke, InvokeAggr, InvokeAggrFn, LocalPath, MetadataPath, Segment, StatePath, UnaryOpKind, }; use std::fmt; impl<'script> fmt::Debug for InvokeAggrFn<'script> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "fn(aggr) {}::{}", self.module, self.fun) } } impl<'script> PartialEq for InvokeAggrFn<'script> { fn eq(&self, other: &Self) -> bool { self.module == other.module && self.fun == other.fun && self.args == other.args } } impl<'script> PartialEq for Segment<'script> { fn eq(&self, other: &Self) -> bool { use Segment::{Element, Id, Idx, Range}; match (self, other) { (Id { mid: id1, .. }, Id { mid: id2, .. }) => id1 == id2, (Idx { idx: idx1, .. }, Idx { idx: idx2, .. }) => idx1 == idx2, (Element { expr: expr1, .. }, Element { expr: expr2, .. }) => expr1 == expr2, ( Range { range_start: start1, range_end: end1, .. }, Range { range_start: start2, range_end: end2, .. }, ) => start1 == start2 && end1 == end2, _ => false, } } } impl<'script> PartialEq for LocalPath<'script> { fn eq(&self, other: &Self) -> bool { self.idx == other.idx && self.is_const == other.is_const && self.segments == other.segments } } impl<'script> PartialEq for MetadataPath<'script> { fn eq(&self, other: &Self) -> bool { self.segments == other.segments } } impl<'script> PartialEq for EventPath<'script> { fn eq(&self, other: &Self) -> bool { self.segments == other.segments } } impl<'script> PartialEq for StatePath<'script> { fn eq(&self, other: &Self) -> bool { self.segments == other.segments } } impl BinOpKind { fn operator_name(self) -> &'static str { match self { Self::Or => "or", Self::Xor => "xor", Self::And => "and", Self::BitOr => "|", Self::BitXor => "^", Self::BitAnd => "&", Self::Eq => "==", Self::NotEq => "!=", Self::Gte => ">=", Self::Gt => ">", Self::Lte => "<=", Self::Lt => "<", Self::RBitShiftSigned => ">>", Self::RBitShiftUnsigned => ">>>", Self::LBitShift => "<<", Self::Add => "+", Self::Sub => "-", Self::Mul => "*", Self::Div => "/", Self::Mod => "%", } } } impl fmt::Display for BinOpKind { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str(self.operator_name()) } } impl UnaryOpKind { fn operator_name(self) -> &'static str { match self { Self::Plus => "+", Self::Minus => "-", Self::Not => "not", Self::BitNot => "!", } } } impl fmt::Display for UnaryOpKind { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str(self.operator_name()) } } impl<'script> PartialEq for Invoke<'script> { fn eq(&self, other: &Self) -> bool { self.mid == other.mid && self.module == other.module && self.fun == other.fun } } impl<'script> fmt::Debug for Invoke<'script> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "fn {}::{}", self.module.join("::"), self.fun) } } impl PartialEq for InvokeAggr { fn eq(&self, other: &Self) -> bool { self.mid == other.mid && self.module == other.module && self.fun == other.fun && self.aggr_id == other.aggr_id } } impl fmt::Debug for InvokeAggr { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "fn(aggr) {}::{}", self.module, self.fun) } }
#![cfg_attr(tarpaulin, skip)] use super::{ BinOpKind, EventPath, Invoke, InvokeAggr, InvokeAggrFn, LocalPath, MetadataPath, Segment, StatePath, UnaryOpKind, }; use std::fmt; impl<'script> fmt::Debug for InvokeAggrFn<'script> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "fn(aggr) {}::{}", self.module, self.fun) } } impl<'script> PartialEq for InvokeAggrFn<'script> { fn eq(&self, other: &Self) -> bool { self.module == other.module && self.fun == other.fun && self.args == other.args } } impl<'script> PartialEq for Segment<'script> { fn eq(&self, other: &Self) -> bool { use Segment::{Element, Id, Idx, Range}; match (self, other) { (Id { mid: id1, .. }, Id { mid: id2, .. }) => id1 == id2, (Idx { idx: idx1, .. }, Idx { idx: idx2, .. }) => idx1 == idx2, (Element { expr: expr1, .. }, Element { expr: expr2, .. }) => expr1 == expr2, ( Range { range_start: start1, range_end: end1, .. }, Range { range_start: start2, range_end: end2, .. }, ) => start1 == start2 && end1 == end2, _ => false, } } } impl<'script> PartialEq for LocalPath<'script> { fn eq(&self, other: &Self) -> bool { self.idx == other.idx && self.is_const == other.is_const && self.segments == other.segments } } impl<'script> PartialEq for MetadataPath<'script> { fn eq(&self, other: &Self) -> bool { self.segments == other.segments } } impl<'script> PartialEq for EventPath<'script> { fn eq(&self, other: &Self) -> bool { self.segments == other.segments } } impl<'script> PartialEq for StatePath<'script> { fn eq(&self, other: &Self) -> bool { self.segments == other.segments } } impl BinOpKind { fn operator_name(self) -> &'static str { match self { Self::Or => "or", Self::Xor => "xor", Self::And => "and", Self::BitOr => "|", Self::BitXor => "^", Self::BitAnd => "&",
} impl fmt::Display for BinOpKind { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str(self.operator_name()) } } impl UnaryOpKind { fn operator_name(self) -> &'static str { match self { Self::Plus => "+", Self::Minus => "-", Self::Not => "not", Self::BitNot => "!", } } } impl fmt::Display for UnaryOpKind { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.write_str(self.operator_name()) } } impl<'script> PartialEq for Invoke<'script> { fn eq(&self, other: &Self) -> bool { self.mid == other.mid && self.module == other.module && self.fun == other.fun } } impl<'script> fmt::Debug for Invoke<'script> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "fn {}::{}", self.module.join("::"), self.fun) } } impl PartialEq for InvokeAggr { fn eq(&self, other: &Self) -> bool { self.mid == other.mid && self.module == other.module && self.fun == other.fun && self.aggr_id == other.aggr_id } } impl fmt::Debug for InvokeAggr { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "fn(aggr) {}::{}", self.module, self.fun) } }
Self::Eq => "==", Self::NotEq => "!=", Self::Gte => ">=", Self::Gt => ">", Self::Lte => "<=", Self::Lt => "<", Self::RBitShiftSigned => ">>", Self::RBitShiftUnsigned => ">>>", Self::LBitShift => "<<", Self::Add => "+", Self::Sub => "-", Self::Mul => "*", Self::Div => "/", Self::Mod => "%", } }
function_block-function_prefix_line
[]
Rust
src/core/image.rs
RahulDas-dev/ndarray-vision
fddbb85f67b2e9124a8c9582ecd775ff5d60a3e7
use crate::core::colour_models::*; use crate::core::traits::PixelBound; use ndarray::prelude::*; use ndarray::{s, Data, DataMut, OwnedRepr, RawDataClone, ViewRepr}; use num_traits::cast::{FromPrimitive, NumCast}; use num_traits::Num; use std::{fmt, hash, marker::PhantomData}; pub type Image<T, C> = ImageBase<OwnedRepr<T>, C>; pub type ImageView<'a, T, C> = ImageBase<ViewRepr<&'a T>, C>; pub struct ImageBase<T, C> where C: ColourModel, T: Data, { pub data: ArrayBase<T, Ix3>, pub(crate) model: PhantomData<C>, } impl<T, U, C> ImageBase<U, C> where U: Data<Elem = T>, T: Copy + Clone + FromPrimitive + Num + NumCast + PixelBound, C: ColourModel, { pub fn into_type<T2>(self) -> Image<T2, C> where T2: Copy + Clone + FromPrimitive + Num + NumCast + PixelBound, { let rescale = |x: &T| { let scaled = normalise_pixel_value(*x) * (T2::max_pixel() - T2::min_pixel()) .to_f64() .unwrap_or_else(|| 0.0f64); T2::from_f64(scaled).unwrap_or_else(T2::zero) + T2::min_pixel() }; let data = self.data.map(rescale); Image::<_, C>::from_data(data) } } impl<S, T, C> ImageBase<S, C> where S: Data<Elem = T>, T: Clone, C: ColourModel, { pub fn to_owned(&self) -> Image<T, C> { Image { data: self.data.to_owned(), model: PhantomData, } } pub fn from_shape_data(rows: usize, cols: usize, data: Vec<T>) -> Image<T, C> { let data = Array3::from_shape_vec((rows, cols, C::channels()), data).unwrap(); Image { data, model: PhantomData, } } } impl<T, C> Image<T, C> where T: Clone + Num, C: ColourModel, { pub fn new(rows: usize, columns: usize) -> Self { Image { data: Array3::zeros((rows, columns, C::channels())), model: PhantomData, } } } impl<T, U, C> ImageBase<T, C> where T: Data<Elem = U>, C: ColourModel, { pub fn from_data(data: ArrayBase<T, Ix3>) -> Self { Self { data, model: PhantomData, } } pub fn rows(&self) -> usize { self.data.shape()[0] } pub fn cols(&self) -> usize { self.data.shape()[1] } pub fn channels(&self) -> usize { C::channels() } pub fn pixel(&self, row: usize, col: usize) -> ArrayView<U, Ix1> { self.data.slice(s![row, col, ..]) } pub fn into_type_raw<C2>(self) -> ImageBase<T, C2> where C2: ColourModel, { assert_eq!(C2::channels(), C::channels()); ImageBase::<T, C2>::from_data(self.data) } } impl<T, U, C> ImageBase<T, C> where T: DataMut<Elem = U>, C: ColourModel, { pub fn pixel_mut(&mut self, row: usize, col: usize) -> ArrayViewMut<U, Ix1> { self.data.slice_mut(s![row, col, ..]) } } impl<T, U, C> fmt::Debug for ImageBase<U, C> where U: Data<Elem = T>, T: fmt::Debug, C: ColourModel, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "ColourModel={:?} Data={:?}", self.model, self.data)?; Ok(()) } } impl<T, U, C> PartialEq<ImageBase<U, C>> for ImageBase<U, C> where U: Data<Elem = T>, T: PartialEq, C: ColourModel, { fn eq(&self, other: &Self) -> bool { self.model == other.model && self.data == other.data } } impl<S, C> Clone for ImageBase<S, C> where S: RawDataClone + Data, C: ColourModel, { fn clone(&self) -> Self { Self { data: self.data.clone(), model: PhantomData, } } fn clone_from(&mut self, other: &Self) { self.data.clone_from(&other.data) } } impl<'a, S, C> hash::Hash for ImageBase<S, C> where S: Data, S::Elem: hash::Hash, C: ColourModel, { fn hash<H: hash::Hasher>(&self, state: &mut H) { self.model.hash(state); self.data.hash(state); } } pub fn normalise_pixel_value<T>(t: T) -> f64 where T: PixelBound + Num + NumCast, { let numerator = (t + T::min_pixel()).to_f64(); let denominator = (T::max_pixel() - T::min_pixel()).to_f64(); let numerator = numerator.unwrap_or_else(|| 0.0f64); let denominator = denominator.unwrap_or_else(|| 1.0f64); numerator / denominator } #[cfg(test)] mod tests { use super::*; use ndarray::arr1; #[test] fn image_consistency_checks() { let i = Image::<u8, RGB>::new(1, 2); assert_eq!(i.rows(), 1); assert_eq!(i.cols(), 2); assert_eq!(i.channels(), 3); assert_eq!(i.channels(), i.data.shape()[2]); } #[test] fn image_type_conversion() { let mut i = Image::<u8, RGB>::new(1, 1); i.pixel_mut(0, 0) .assign(&arr1(&[u8::max_value(), 0, u8::max_value() / 3])); let t: Image<u16, RGB> = i.into_type(); assert_eq!( t.pixel(0, 0), arr1(&[u16::max_value(), 0, u16::max_value() / 3]) ); } }
use crate::core::colour_models::*; use crate::core::traits::PixelBound; use ndarray::prelude::*; use ndarray::{s, Data, DataMut, OwnedRepr, RawDataClone, ViewRepr}; use num_traits::cast::{FromPrimitive, NumCast}; use num_traits::Num; use std::{fmt, hash, marker::PhantomData}; pub type Image<T, C> = ImageBase<OwnedRepr<T>, C>; pub type ImageView<'a, T, C> = ImageBase<ViewRepr<&'a T>, C>; pub struct ImageBase<T, C> where C: ColourModel, T: Data, { pub data: ArrayBase<T, Ix3>, pub(crate) model: PhantomData<C>, } impl<T, U, C> ImageBase<U, C> where U: Data<Elem = T>, T: Copy + Clone + FromPrimitive + Num + NumCast + PixelBound, C: ColourModel, { pub fn into_type<T2>(self) -> Image<T2, C> where T2: Copy + Clone + FromPrimitive + Num + NumCast + PixelBound, { let rescale = |x: &T| { let scaled = normalise_pixel_value(*x) * (T2::max_pixel() - T2::min_pixel()) .to_f64() .unwrap_or_else(|| 0.0f64); T2::from_f64(scaled).unwrap_or_else(T2::zero) + T2::min_pixel() }; let data = self.data.map(rescale); Image::<_, C>::from_data(data) } } impl<S, T, C> ImageBase<S, C> where S: Data<Elem = T>, T: Clone, C: ColourModel, { pub f
pub fn from_shape_data(rows: usize, cols: usize, data: Vec<T>) -> Image<T, C> { let data = Array3::from_shape_vec((rows, cols, C::channels()), data).unwrap(); Image { data, model: PhantomData, } } } impl<T, C> Image<T, C> where T: Clone + Num, C: ColourModel, { pub fn new(rows: usize, columns: usize) -> Self { Image { data: Array3::zeros((rows, columns, C::channels())), model: PhantomData, } } } impl<T, U, C> ImageBase<T, C> where T: Data<Elem = U>, C: ColourModel, { pub fn from_data(data: ArrayBase<T, Ix3>) -> Self { Self { data, model: PhantomData, } } pub fn rows(&self) -> usize { self.data.shape()[0] } pub fn cols(&self) -> usize { self.data.shape()[1] } pub fn channels(&self) -> usize { C::channels() } pub fn pixel(&self, row: usize, col: usize) -> ArrayView<U, Ix1> { self.data.slice(s![row, col, ..]) } pub fn into_type_raw<C2>(self) -> ImageBase<T, C2> where C2: ColourModel, { assert_eq!(C2::channels(), C::channels()); ImageBase::<T, C2>::from_data(self.data) } } impl<T, U, C> ImageBase<T, C> where T: DataMut<Elem = U>, C: ColourModel, { pub fn pixel_mut(&mut self, row: usize, col: usize) -> ArrayViewMut<U, Ix1> { self.data.slice_mut(s![row, col, ..]) } } impl<T, U, C> fmt::Debug for ImageBase<U, C> where U: Data<Elem = T>, T: fmt::Debug, C: ColourModel, { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "ColourModel={:?} Data={:?}", self.model, self.data)?; Ok(()) } } impl<T, U, C> PartialEq<ImageBase<U, C>> for ImageBase<U, C> where U: Data<Elem = T>, T: PartialEq, C: ColourModel, { fn eq(&self, other: &Self) -> bool { self.model == other.model && self.data == other.data } } impl<S, C> Clone for ImageBase<S, C> where S: RawDataClone + Data, C: ColourModel, { fn clone(&self) -> Self { Self { data: self.data.clone(), model: PhantomData, } } fn clone_from(&mut self, other: &Self) { self.data.clone_from(&other.data) } } impl<'a, S, C> hash::Hash for ImageBase<S, C> where S: Data, S::Elem: hash::Hash, C: ColourModel, { fn hash<H: hash::Hasher>(&self, state: &mut H) { self.model.hash(state); self.data.hash(state); } } pub fn normalise_pixel_value<T>(t: T) -> f64 where T: PixelBound + Num + NumCast, { let numerator = (t + T::min_pixel()).to_f64(); let denominator = (T::max_pixel() - T::min_pixel()).to_f64(); let numerator = numerator.unwrap_or_else(|| 0.0f64); let denominator = denominator.unwrap_or_else(|| 1.0f64); numerator / denominator } #[cfg(test)] mod tests { use super::*; use ndarray::arr1; #[test] fn image_consistency_checks() { let i = Image::<u8, RGB>::new(1, 2); assert_eq!(i.rows(), 1); assert_eq!(i.cols(), 2); assert_eq!(i.channels(), 3); assert_eq!(i.channels(), i.data.shape()[2]); } #[test] fn image_type_conversion() { let mut i = Image::<u8, RGB>::new(1, 1); i.pixel_mut(0, 0) .assign(&arr1(&[u8::max_value(), 0, u8::max_value() / 3])); let t: Image<u16, RGB> = i.into_type(); assert_eq!( t.pixel(0, 0), arr1(&[u16::max_value(), 0, u16::max_value() / 3]) ); } }
n to_owned(&self) -> Image<T, C> { Image { data: self.data.to_owned(), model: PhantomData, } }
function_block-function_prefixed
[]
Rust
examples3/convex_decomposition.rs
BenBergman/nphysics
11ca4d6f967c35e7f51e65295174c5b0395cbd93
extern crate rand; extern crate kiss3d; extern crate nalgebra as na; extern crate ncollide; extern crate nphysics; extern crate nphysics_testbed3d; use std::sync::Arc; use std::path::Path; use rand::random; use na::{Pnt3, Vec3, Translation}; use kiss3d::loader::obj; use ncollide::shape::{Plane, Compound, Convex}; use ncollide::procedural::TriMesh3; use ncollide::transformation; use ncollide::bounding_volume::{BoundingVolume, AABB}; use ncollide::bounding_volume; use ncollide::inspection::Repr3; use nphysics::world::World; use nphysics::object::RigidBody; use nphysics_testbed3d::Testbed; fn main() { /* * World */ let mut world = World::new(); world.set_gravity(Vec3::new(0.0, -9.81, 0.0)); /* * Planes */ let shift = 10.0; let normals = [ Vec3::new(0.0, 1.0, 0.0), Vec3::new(-1.0, 1.0, 0.0), Vec3::new(1.0, 1.0, 0.0), Vec3::new(0.0, 1.0, -1.0), Vec3::new(0.0, 1.0, 1.0), ]; let poss = [ Vec3::new(0.0, 0.0, 0.0), Vec3::new(shift, 0.0, 0.0), Vec3::new(-shift, 0.0, 0.0), Vec3::new(0.0, 0.0, shift), Vec3::new(0.0, 0.0, -shift) ]; for (normal, pos) in normals.iter().zip(poss.iter()) { let geom = Plane::new(*normal); let mut rb = RigidBody::new_static(geom, 0.3, 0.6); rb.append_translation(pos); world.add_body(rb); } /* * Create the convex decompositions. */ let geoms = models(); let mut bodies = Vec::new(); let ngeoms = geoms.len(); for obj_path in geoms.into_iter() { let deltas = na::one(); let mtl_path = Path::new(""); let mut geom_data = Vec::new(); let obj = obj::parse_file(&Path::new(&obj_path), &mtl_path, ""); if let Ok(model) = obj { let meshes: Vec<TriMesh3<f32>> = model.into_iter().map(|mesh| mesh.1.to_trimesh().unwrap()).collect(); let (mins, maxs) = bounding_volume::point_cloud_aabb(&deltas, &meshes[0].coords[..]); let mut aabb = AABB::new(mins, maxs); for mesh in meshes[1 ..].iter() { let (mins, maxs) = bounding_volume::point_cloud_aabb(&deltas, &mesh.coords[..]); aabb.merge(&AABB::new(mins, maxs)); } let center = aabb.translation(); let diag = na::norm(&(*aabb.maxs() - *aabb.mins())); for mut trimesh in meshes.into_iter() { trimesh.translate_by(&-center); trimesh.scale_by_scalar(6.0 / diag); trimesh.split_index_buffer(true); let (decomp, _) = transformation::hacd(trimesh, 0.03, 1); for mesh in decomp.into_iter() { let convex = Arc::new(Box::new(Convex::new(mesh.coords)) as Box<Repr3<f32>>); geom_data.push((deltas, convex)); } } let compound = Compound::new(geom_data); let mut rb = RigidBody::new_dynamic(compound, 1.0, 0.3, 0.5); rb.set_deactivation_threshold(Some(0.5)); bodies.push(rb) } } if bodies.len() != ngeoms { println!("#########################################################################################"); println!("Some model are missing. You can download them all at : http://crozet.re/nphysics/models."); println!("All the obj files should be put on the `./media/models` folder."); println!("#########################################################################################"); } let nreplicats = 100 / bodies.len(); for rb in bodies.iter() { for _ in 0 .. nreplicats { let mut rb = rb.clone(); let pos = random::<Vec3<f32>>() * 30.0+ Vec3::new(-15.0, 15.0, -15.0); rb.append_translation(&pos); world.add_body(rb); } } /* * Set up the testbed. */ let mut testbed = Testbed::new(world); testbed.look_at(Pnt3::new(-30.0, 30.0, -30.0), Pnt3::new(0.0, 0.0, 0.0)); testbed.run(); } fn models() -> Vec<String> { vec!("media/models/CRATERS_F_decimated.obj".to_string() , "media/models/DRAGON_F_decimated.obj".to_string() , "media/models/GARGOYLE_F_decimated.obj".to_string() , "media/models/Hand1_decimated.obj".to_string() , "media/models/RSCREATURE_F_decimated.obj".to_string() , "media/models/Sketched-Brunnen_decimated.obj".to_string() , "media/models/Teapot_decimated.obj".to_string() , "media/models/block.obj".to_string() , "media/models/block_decimated.obj".to_string() , "media/models/bowl.obj".to_string() , "media/models/bunny_decimated.obj".to_string() , "media/models/camel.obj".to_string() , "media/models/camel_decimated.obj".to_string() , "media/models/casting.obj".to_string() , "media/models/casting_decimated.obj".to_string() , "media/models/chair.obj".to_string() , "media/models/cow1.obj".to_string() , "media/models/cow1_decimated.obj".to_string() , "media/models/cow2.obj".to_string() , "media/models/cow2_decimated.obj".to_string() , "media/models/crank_decimated.obj".to_string() , "media/models/cup.obj".to_string() , "media/models/cup_decimated.obj".to_string() , "media/models/dancer2_decimated.obj".to_string() , "media/models/deer_bound.obj".to_string() , "media/models/dilo_decimated.obj".to_string() , "media/models/dino_decimated.obj".to_string() , "media/models/drum.obj".to_string() , "media/models/egea.obj".to_string() , "media/models/egea_decimated.obj".to_string() , "media/models/eight.obj".to_string() , "media/models/elephant_decimated.obj".to_string() , "media/models/elk.obj".to_string() , "media/models/elk_decimated.obj".to_string() , "media/models/face-YH_decimated.obj".to_string() , "media/models/feline_decimated.obj".to_string() , "media/models/fish_decimated.obj".to_string() , "media/models/foot.obj".to_string() , "media/models/foot_decimated.obj".to_string() , "media/models/genus3.obj".to_string() , "media/models/genus3_decimated.obj".to_string() , "media/models/greek_sculpture_decimated.obj".to_string() , "media/models/hand2_decimated.obj".to_string() , "media/models/hand_decimated.obj".to_string() , "media/models/helix.obj".to_string() , "media/models/helmet.obj".to_string() , "media/models/hero.obj".to_string() , "media/models/hero_decimated.obj".to_string() , "media/models/homer.obj".to_string() , "media/models/homer_decimated.obj".to_string() , "media/models/hornbug.obj".to_string() , "media/models/horse_decimated.obj".to_string() , "media/models/maneki-neko_decimated.obj".to_string() , "media/models/mannequin-devil.obj".to_string() , "media/models/mannequin-devil_decimated.obj".to_string() , "media/models/mannequin.obj".to_string() , "media/models/mannequin_decimated.obj".to_string() , "media/models/mask_decimated.obj".to_string() , "media/models/moaimoai.obj".to_string() , "media/models/moaimoai_decimated.obj".to_string() , "media/models/monk_decimated.obj".to_string() , "media/models/octopus_decimated.obj".to_string() , "media/models/pig.obj".to_string() , "media/models/pig_decimated.obj".to_string() , "media/models/pinocchio_b_decimated.obj".to_string() , "media/models/polygirl.obj".to_string() , "media/models/polygirl_decimated.obj".to_string() , "media/models/rabbit_decimated.obj".to_string() , "media/models/rocker-arm.obj".to_string() , "media/models/rocker-arm_decimated.obj".to_string() , "media/models/screw-remeshed_decimated.obj".to_string() , "media/models/screwdriver_decimated.obj".to_string() , "media/models/shark_b_decimated.obj".to_string() , "media/models/skull-original_decimated.obj".to_string() , "media/models/sledge.obj".to_string() , "media/models/squirrel.obj".to_string() , "media/models/squirrel_decimated.obj".to_string() , "media/models/sword_decimated.obj".to_string() , "media/models/table.obj".to_string() , "media/models/test2.obj".to_string() , "media/models/tstTorusModel.obj".to_string() , "media/models/tstTorusModel2.obj".to_string() , "media/models/tstTorusModel3.obj".to_string() , "media/models/tube1.obj".to_string() , "media/models/venus-original_decimated.obj".to_string() , "media/models/venus.obj".to_string() ) }
extern crate rand; extern crate kiss3d; extern crate nalgebra as na; extern crate ncollide; extern crate nphysics; extern crate nphysics_testbed3d; use std::sync::Arc; use std::path::Path; use rand::random; use na::{Pnt3, Vec3, Translation}; use kiss3d::loader::obj; use ncollide::shape::{Plane, Compound, Convex}; use ncollide::procedural::TriMesh3; use ncollide::transformation; use ncollide::bounding_volume::{BoundingVolume, AABB}; use ncollide::bounding_volume; use ncollide::inspection::Repr3; use nphysics::world::World; use nphysics::object::RigidBody; use nphysics_testbed3d::Testbed; fn main() { /* * World */ let mut world = World::new(); world.set_gravity(Vec3::new(0.0, -9.81, 0.0)); /* * Planes */ let shift = 10.0; let normals = [ Vec3::new(0.0, 1.0, 0.0), Vec3::new(-1.0, 1.0, 0.0), Vec3::new(1.0, 1.0, 0.0), Vec3::new(0.0, 1.0, -1.0), Vec3::new(0.0, 1.0, 1.0), ]; let poss = [ Vec3::new(0.0, 0.0, 0.0), Vec3::new(shift, 0.0, 0.0), Vec3::new(-shift, 0.0, 0.0), Vec3::new(0.0, 0.0, shift), Vec3::new(0.0, 0.0, -shift) ]; for (normal, pos) in normals.iter().zip(poss.iter()) { let geom = Plane::new(*normal); let mut rb = RigidBody::new_static(geom, 0.3, 0.6); rb.append_translation(pos); world.add_body(rb); } /* * Create the convex decompositions. */ let geoms = models(); let mut bodies = Vec::new(); let ngeoms = geoms.len(); for obj_path in geoms.into_iter() { let deltas = na::one(); let mtl_path = Path::new(""); let mut geom_data = Vec::new(); let obj = obj::parse_file(&Path::new(&obj_path), &mtl_path, ""); if let Ok(model) = obj { let meshes: Vec<TriMesh3<f32>> = model.into_iter().map(|mesh| mesh.1.to_trimesh().unwrap()).collect(); let (mins, maxs) = bounding_volume::point_cloud_aabb(&deltas, &meshes[0].coords[..]); let mut aabb = AABB::new(mins, maxs); for mesh in meshes[1 ..].iter() { let (mins, maxs) = bounding_volume::point_cloud_aabb(&deltas, &mesh.coords[..]); aabb.merge(&AABB::new(mins, maxs)); } let center = aabb.translation(); let diag = na::norm(&(*aabb.maxs() - *aabb.mins())); for mut trimesh in meshes.into_iter() { trimesh.translate_by(&-center); trimesh.scale_by_scalar(6.0 / diag); trimesh.split_index_buffer(true); let (decomp, _) = transformation::hacd(trimesh, 0.03, 1); for mesh in decomp.into_iter() { let convex = Arc::new(Box::new(Convex::new(mesh.coords)) as Box<Repr3<f32>>); geom_data.push((deltas, convex)); } } let compound = Compound::new(geom_data); let mut rb = RigidBody::new_dynamic(compound, 1.0, 0.3, 0.5); rb.set_deactivation_threshold(Some(0.5)); bodies.push(rb) } } if bodies.len() != ngeoms { println!("#########################################################################################"); println!("Some model are missing. You can download them all at : http://crozet.re/nphysics/models."); println!("All the obj files should be put on the `./media/models` folder."); println!("#########################################################################################"); } let nreplicats = 100 / bodies.len(); for rb in bodies.iter() { for _ in 0 .. nreplicats { let mut rb = rb.clone(); let pos = random::<Vec3<f32>>() * 30.0+ Vec3::new(-15.0, 15.0, -15.0); rb.append_translation(&pos); world.add_body(rb); } } /* * Set up the testbed. */ let mut testbed = Testbed::new(world); testbed.look_at(Pnt3::new(-30.0, 30.0, -30.0), Pnt3::new(0.0, 0.0, 0.0)); testbed.run(); } fn models() -> Vec<String> { vec!("media/models/CRATERS_F_decimated.obj".to_string() , "media/models/DRAGON_F_decimated.obj".to_string() , "media/models/GARGOYLE_F_decimated.obj".to_string() , "media/
, "media/models/genus3.obj".to_string() , "media/models/genus3_decimated.obj".to_string() , "media/models/greek_sculpture_decimated.obj".to_string() , "media/models/hand2_decimated.obj".to_string() , "media/models/hand_decimated.obj".to_string() , "media/models/helix.obj".to_string() , "media/models/helmet.obj".to_string() , "media/models/hero.obj".to_string() , "media/models/hero_decimated.obj".to_string() , "media/models/homer.obj".to_string() , "media/models/homer_decimated.obj".to_string() , "media/models/hornbug.obj".to_string() , "media/models/horse_decimated.obj".to_string() , "media/models/maneki-neko_decimated.obj".to_string() , "media/models/mannequin-devil.obj".to_string() , "media/models/mannequin-devil_decimated.obj".to_string() , "media/models/mannequin.obj".to_string() , "media/models/mannequin_decimated.obj".to_string() , "media/models/mask_decimated.obj".to_string() , "media/models/moaimoai.obj".to_string() , "media/models/moaimoai_decimated.obj".to_string() , "media/models/monk_decimated.obj".to_string() , "media/models/octopus_decimated.obj".to_string() , "media/models/pig.obj".to_string() , "media/models/pig_decimated.obj".to_string() , "media/models/pinocchio_b_decimated.obj".to_string() , "media/models/polygirl.obj".to_string() , "media/models/polygirl_decimated.obj".to_string() , "media/models/rabbit_decimated.obj".to_string() , "media/models/rocker-arm.obj".to_string() , "media/models/rocker-arm_decimated.obj".to_string() , "media/models/screw-remeshed_decimated.obj".to_string() , "media/models/screwdriver_decimated.obj".to_string() , "media/models/shark_b_decimated.obj".to_string() , "media/models/skull-original_decimated.obj".to_string() , "media/models/sledge.obj".to_string() , "media/models/squirrel.obj".to_string() , "media/models/squirrel_decimated.obj".to_string() , "media/models/sword_decimated.obj".to_string() , "media/models/table.obj".to_string() , "media/models/test2.obj".to_string() , "media/models/tstTorusModel.obj".to_string() , "media/models/tstTorusModel2.obj".to_string() , "media/models/tstTorusModel3.obj".to_string() , "media/models/tube1.obj".to_string() , "media/models/venus-original_decimated.obj".to_string() , "media/models/venus.obj".to_string() ) }
models/Hand1_decimated.obj".to_string() , "media/models/RSCREATURE_F_decimated.obj".to_string() , "media/models/Sketched-Brunnen_decimated.obj".to_string() , "media/models/Teapot_decimated.obj".to_string() , "media/models/block.obj".to_string() , "media/models/block_decimated.obj".to_string() , "media/models/bowl.obj".to_string() , "media/models/bunny_decimated.obj".to_string() , "media/models/camel.obj".to_string() , "media/models/camel_decimated.obj".to_string() , "media/models/casting.obj".to_string() , "media/models/casting_decimated.obj".to_string() , "media/models/chair.obj".to_string() , "media/models/cow1.obj".to_string() , "media/models/cow1_decimated.obj".to_string() , "media/models/cow2.obj".to_string() , "media/models/cow2_decimated.obj".to_string() , "media/models/crank_decimated.obj".to_string() , "media/models/cup.obj".to_string() , "media/models/cup_decimated.obj".to_string() , "media/models/dancer2_decimated.obj".to_string() , "media/models/deer_bound.obj".to_string() , "media/models/dilo_decimated.obj".to_string() , "media/models/dino_decimated.obj".to_string() , "media/models/drum.obj".to_string() , "media/models/egea.obj".to_string() , "media/models/egea_decimated.obj".to_string() , "media/models/eight.obj".to_string() , "media/models/elephant_decimated.obj".to_string() , "media/models/elk.obj".to_string() , "media/models/elk_decimated.obj".to_string() , "media/models/face-YH_decimated.obj".to_string() , "media/models/feline_decimated.obj".to_string() , "media/models/fish_decimated.obj".to_string() , "media/models/foot.obj".to_string() , "media/models/foot_decimated.obj".to_string()
function_block-random_span
[ { "content": "fn add_ragdoll(pos: Vec3<f32>, world: &mut World) {\n\n // head\n\n let head_geom = Ball::new(0.8);\n\n let mut head = RigidBody::new_dynamic(head_geom, 1.0, 0.3, 0.5);\n\n head.append_translation(&(pos + Vec3::new(0.0, 2.4, 0.0)));\n\n\n\n // body\n\n let body_geom = Cylinder::new(1.2, 0.5);\n\n let mut body = RigidBody::new_dynamic(body_geom, 1.0, 0.3, 0.5);\n\n body.append_translation(&pos);\n\n\n\n // right arm\n\n let rarm_geom = Cylinder::new(1.6, 0.2);\n\n let mut rarm = RigidBody::new_dynamic(rarm_geom, 1.0, 0.3, 0.5);\n\n rarm.append_rotation(&Vec3::new(f32::consts::FRAC_PI_2, 0.0, 0.0));\n\n rarm.append_translation(&(pos + Vec3::new(0.0, 1.0, 2.4)));\n\n\n\n // left arm\n\n let mut larm = rarm.clone();\n\n larm.append_translation(&Vec3::new(0.0, 0.0, -4.8));\n", "file_path": "examples3/ragdoll.rs", "rank": 0, "score": 226666.3254039269 }, { "content": "fn draw_collisions(window: &mut Window, physics: &mut World) {\n\n let mut collisions = Vec::new();\n\n\n\n physics.interferences(&mut collisions);\n\n\n\n for c in collisions.iter() {\n\n match *c {\n\n Constraint::RBRB(_, _, ref c) => {\n\n window.draw_line(&c.world1, &c.world2, &Pnt3::new(1.0, 0.0, 0.0));\n\n\n\n let center = na::center(&c.world1, &c.world2);\n\n let end = center + c.normal * 0.4f32;\n\n window.draw_line(&center, &end, &Pnt3::new(0.0, 1.0, 1.0))\n\n },\n\n Constraint::BallInSocket(ref bis) => {\n\n let bbis = bis.borrow();\n\n window.draw_line(&bbis.anchor1_pos(), &bbis.anchor2_pos(), &Pnt3::new(0.0, 1.0, 0.0));\n\n },\n\n Constraint::Fixed(ref f) => {\n\n // FIXME: draw the rotation too\n\n window.draw_line(&f.borrow().anchor1_pos().translate(&na::orig()), &f.borrow().anchor2_pos().translate(&na::orig()), &Pnt3::new(0.0, 1.0, 0.0));\n\n }\n\n }\n\n }\n\n}\n", "file_path": "examples3/nphysics_testbed3d/src/testbed.rs", "rank": 1, "score": 222547.5737631244 }, { "content": "fn create_the_balls(world: &mut World) {\n\n let num = (4000.0f32.sqrt()) as usize;\n\n let rad = 0.5;\n\n let shift = 2.5 * rad;\n\n let centerx = shift * (num as f32) / 2.0;\n\n let centery = shift * (num as f32) / 2.0;\n\n\n\n for i in 0usize .. num {\n\n for j in 0usize .. num {\n\n let x = i as f32 * 2.5 * rad - centerx;\n\n let y = j as f32 * 2.5 * rad - centery * 2.0 - 20.0;\n\n\n\n let mut rb = RigidBody::new_dynamic(Ball::new(rad), 1.0, 0.3, 0.6);\n\n\n\n rb.append_translation(&Vec2::new(x, y));\n\n\n\n world.add_body(rb);\n\n }\n\n }\n\n}\n\n\n\n\n", "file_path": "examples2/balls_vee.rs", "rank": 2, "score": 218729.4059662996 }, { "content": "fn create_the_walls(world: &mut World) {\n\n /*\n\n * First plane\n\n */\n\n let mut rb = RigidBody::new_static(Plane::new(Vec2::new(-1.0, -1.0)), 0.3, 0.6);\n\n rb.append_translation(&Vec2::new(0.0, 10.0));\n\n world.add_body(rb);\n\n\n\n /*\n\n * Second plane\n\n */\n\n let mut rb = RigidBody::new_static(Plane::new(Vec2::new(1.0, -1.0)), 0.3, 0.6);\n\n rb.append_translation(&Vec2::new(0.0, 10.0));\n\n world.add_body(rb);\n\n}\n\n\n\n\n", "file_path": "examples2/balls_vee.rs", "rank": 3, "score": 218729.4059662996 }, { "content": "fn add_ragdoll(pos: Vec2<f32>, world: &mut World) {\n\n // head\n\n let head_geom = Ball::new(0.8);\n\n let mut head = RigidBody::new_dynamic(head_geom, 1.0, 0.3, 0.5);\n\n head.append_translation(&(pos + Vec2::new(0.0, -2.4)));\n\n\n\n // body\n\n let body_geom = Cuboid::new(Vec2::new(1.2, 0.5));\n\n let mut body = RigidBody::new_dynamic(body_geom, 1.0, 0.3, 0.5);\n\n body.append_rotation(&-Vec1::new(BaseFloat::frac_pi_2()));\n\n body.append_translation(&pos);\n\n\n\n // right arm\n\n let rarm_geom = Cuboid::new(Vec2::new(1.6, 0.2));\n\n let mut rarm = RigidBody::new_dynamic(rarm_geom, 1.0, 0.3, 0.5);\n\n rarm.append_translation(&(pos + Vec2::new(2.4, -1.0)));\n\n\n\n // left arm\n\n let mut larm = rarm.clone();\n\n larm.append_translation(&Vec2::new(-4.8, 0.0));\n", "file_path": "examples2/ragdoll.rs", "rank": 5, "score": 171297.55188509726 }, { "content": "fn main() {\n\n /*\n\n * World\n\n */\n\n let mut world = World::new();\n\n world.set_gravity(Vec2::new(0.0, 9.81));\n\n\n\n /*\n\n * Polyline\n\n */\n\n let num_split = 5;\n\n let begin = -75.0;\n\n let max_h = 15.0;\n\n let begin_h = 15.0;\n\n let step = (begin.abs() * 2.0) / (num_split as f32);\n\n let mut vertices: Vec<Pnt2<f32>> = (0 .. num_split + 2).map(|i| Pnt2::new(begin + (i as f32) * step, 0.0)).collect();\n\n let mut indices = Vec::new();\n\n let mut rng: StdRng = SeedableRng::from_seed(&[1, 2, 3, 4][..]);\n\n\n\n for i in (0usize .. num_split) {\n", "file_path": "examples2/mesh.rs", "rank": 6, "score": 164531.1692156622 }, { "content": "fn main() {\n\n /*\n\n * World\n\n */\n\n let mut world = World::new();\n\n world.set_gravity(Vec3::new(0.0, -9.81, 0.0));\n\n\n\n let meshes = Testbed::load_obj(\"media/great_hall.obj\");\n\n\n\n for (vertices, indices) in meshes.into_iter() {\n\n let vertices = vertices.iter().map(|v| *v * 3.0).collect();\n\n let indices = indices.chunks(3).map(|is| Pnt3::new(is[0], is[1], is[2])).collect();\n\n\n\n let mesh: TriMesh3<f32> = TriMesh::new(Arc::new(vertices), Arc::new(indices), None, None);\n\n\n\n world.add_body(RigidBody::new_static(mesh, 0.3, 0.6));\n\n }\n\n\n\n /*\n\n * Set up the testbed.\n\n */\n\n let mut testbed = Testbed::new(world);\n\n\n\n testbed.run();\n\n}\n", "file_path": "examples3/mesh.rs", "rank": 7, "score": 164531.1692156622 }, { "content": "fn main() {\n\n /*\n\n * World\n\n */\n\n let mut world = World::new();\n\n world.set_gravity(Vec2::new(0.0, 9.81));\n\n\n\n /*\n\n * First plane\n\n */\n\n let mut rb = RigidBody::new_static(Plane::new(Vec2::new(-1.0, -1.0)), 0.3, 0.6);\n\n\n\n rb.append_translation(&Vec2::new(0.0, 10.0));\n\n\n\n world.add_body(rb);\n\n\n\n /*\n\n * Second plane\n\n */\n\n let mut rb = RigidBody::new_static(Plane::new(Vec2::new(1.0, -1.0)), 0.3, 0.6);\n", "file_path": "examples2/compound.rs", "rank": 8, "score": 164479.25491688808 }, { "content": "fn main() {\n\n /*\n\n * World\n\n */\n\n let mut world = World::new();\n\n world.set_gravity(Vec3::new(0.0, -9.81, 0.0));\n\n\n\n /*\n\n * Planes\n\n */\n\n let rb = RigidBody::new_static(Plane::new(Vec3::new(0.0, 1.0, 0.0)), 0.3, 0.6);\n\n\n\n world.add_body(rb);\n\n\n\n /*\n\n * Cross shaped geometry\n\n */\n\n let delta1 = Iso3::new(Vec3::new(0.0, -5.0, 0.0), na::zero());\n\n let delta2 = Iso3::new(Vec3::new(-5.0, 0.0, 0.0), na::zero());\n\n let delta3 = Iso3::new(Vec3::new(5.0, 0.0, 0.0), na::zero());\n", "file_path": "examples3/compound.rs", "rank": 9, "score": 164479.25491688808 }, { "content": "fn main() {\n\n /*\n\n * World\n\n */\n\n let mut world = World::new();\n\n world.set_gravity(Vec3::new(0.0, -9.81, 0.0));\n\n\n\n /*\n\n * Plane\n\n */\n\n let geom = Plane::new(Vec3::new(0.0, 1.0, 0.0));\n\n\n\n world.add_body(RigidBody::new_static(geom, 0.3, 0.6));\n\n\n\n /*\n\n * Create the convex geometries.\n\n */\n\n let npts = 10usize;\n\n let num = 8;\n\n let shift = 2.0;\n", "file_path": "examples3/convex.rs", "rank": 10, "score": 163937.11309323646 }, { "content": "fn create_the_world() -> World {\n\n let mut world = World::new();\n\n world.set_gravity(Vec2::new(0.0, 9.81));\n\n world\n\n}\n\n\n\n\n", "file_path": "examples2/balls_vee.rs", "rank": 11, "score": 162483.77549743335 }, { "content": "fn main() {\n\n /*\n\n * World\n\n */\n\n let mut world = World::new();\n\n world.set_gravity(Vec2::new(0.0, 9.81));\n\n\n\n /*\n\n * First plane\n\n */\n\n let mut rb = RigidBody::new_static(Plane::new(Vec2::new(-1.0, -1.0)), 0.3, 0.6);\n\n\n\n rb.append_translation(&Vec2::new(0.0, 10.0));\n\n\n\n world.add_body(rb);\n\n\n\n /*\n\n * Second plane\n\n */\n\n let mut rb = RigidBody::new_static(Plane::new(Vec2::new(1.0, -1.0)), 0.3, 0.6);\n", "file_path": "examples2/convex_vee.rs", "rank": 12, "score": 160156.94303301658 }, { "content": "fn main() {\n\n /*\n\n * World\n\n */\n\n let mut world = World::new();\n\n world.set_gravity(Vec3::new(0.0, -9.81, 0.0));\n\n\n\n /*\n\n * Plane\n\n */\n\n let geom = Plane::new(Vec3::new(0.0, 1.0, 0.0));\n\n\n\n world.add_body(RigidBody::new_static(geom, 0.3, 0.6));\n\n\n\n /*\n\n * Create the convex geometries.\n\n */\n\n let num = 8;\n\n let shift = 2.0;\n\n let centerx = shift * (num as f32) / 2.0;\n", "file_path": "examples3/known_bug_excentric_convex.rs", "rank": 14, "score": 153280.7172158185 }, { "content": "/// Performs the `find` part of the union-find algorithm.\n\npub fn find(x: usize, sets: &mut [UnionFindSet]) -> usize {\n\n if sets[x].parent != x {\n\n sets[x].parent = find(sets[x].parent, sets);\n\n }\n\n\n\n sets[x].parent\n\n}\n\n \n", "file_path": "src/utils/union_find.rs", "rank": 15, "score": 133247.1814035765 }, { "content": "/// Performs the `union` part of the union-find algorithm.\n\npub fn union(x: usize, y: usize, sets: &mut [UnionFindSet]) {\n\n let x_root = find(x, sets);\n\n let y_root = find(y, sets);\n\n\n\n if x_root == y_root {\n\n return\n\n }\n\n\n\n let rankx = sets[x_root].rank;\n\n let ranky = sets[y_root].rank;\n\n\n\n if rankx < ranky {\n\n sets[x_root].parent = y_root\n\n }\n\n else if rankx > ranky {\n\n sets[y_root].parent = x_root\n\n }\n\n else {\n\n sets[y_root].parent = x_root;\n\n sets[x_root].rank = rankx + 1\n\n }\n\n}\n", "file_path": "src/utils/union_find.rs", "rank": 16, "score": 131917.7210008093 }, { "content": "fn run_simulation(world: World) {\n\n let mut testbed = Testbed::new(world);\n\n\n\n testbed.run();\n\n}\n", "file_path": "examples2/balls_vee.rs", "rank": 17, "score": 128527.17661716237 }, { "content": "fn main() {\n\n /*\n\n * World\n\n */\n\n let mut world = World::new();\n\n world.set_gravity(Vec2::new(0.0, 9.81));\n\n\n\n /*\n\n * First plane\n\n */\n\n let rb = RigidBody::new_static(Plane::new(Vec2::new(0.0, -1.0)), 0.3, 0.6);\n\n\n\n world.add_body(rb);\n\n\n\n /*\n\n * Create the boxes\n\n */\n\n let num = 25;\n\n let rad = 0.5;\n\n let shift = 2.5 * rad;\n", "file_path": "examples2/pyramid.rs", "rank": 18, "score": 124678.39269248547 }, { "content": "fn main() {\n\n /*\n\n * World\n\n */\n\n let mut world = World::new();\n\n world.set_gravity(Vec3::new(0.0, -9.81, 0.0));\n\n\n\n /*\n\n * A plane for the ground\n\n */\n\n let ground_geom = Plane::new(Vec3::new(0.0, 1.0, 0.0));\n\n\n\n world.add_body(RigidBody::new_static(ground_geom, 0.3, 0.6));\n\n\n\n /*\n\n * Create the ragdolls\n\n */\n\n let n = 5;\n\n let shift = 10.0;\n\n\n", "file_path": "examples3/ragdoll.rs", "rank": 19, "score": 124678.39269248547 }, { "content": "fn main() {\n\n /*\n\n * World\n\n */\n\n let mut world = World::new();\n\n world.set_gravity(Vec2::new(0.0, 9.81));\n\n\n\n /*\n\n * First plane\n\n */\n\n let rb = RigidBody::new_static(Plane::new(Vec2::new(0.0, -1.0)), 0.3, 0.6);\n\n\n\n world.add_body(rb);\n\n\n\n /*\n\n * Create the boxes\n\n */\n\n let width = 100;\n\n let height = 20;\n\n let rad = 0.5;\n", "file_path": "examples2/wall.rs", "rank": 20, "score": 124678.39269248547 }, { "content": "fn main() {\n\n /*\n\n * World\n\n */\n\n let mut world = World::new();\n\n world.set_gravity(Vec3::new(0.0, -9.81, 0.0));\n\n\n\n /*\n\n * Planes\n\n */\n\n let rb = RigidBody::new_static(Plane::new(Vec3::new(0.0, 1.0, 0.0)), 0.3, 0.6);\n\n\n\n world.add_body(rb);\n\n\n\n /*\n\n * Create the boxes\n\n */\n\n let num = 30;\n\n let rad = 0.5;\n\n let shift = rad * 2.0;\n", "file_path": "examples3/pyramid.rs", "rank": 21, "score": 124678.39269248547 }, { "content": "fn main() {\n\n let mut testbed = Testbed::new_empty();\n\n\n\n /*\n\n * World\n\n */\n\n let mut world = World::new();\n\n world.set_gravity(Vec3::new(0.0, -9.81, 0.0));\n\n\n\n /*\n\n * Planes\n\n */\n\n let geom = Plane::new(Vec3::new(0.0, 1.0, 0.0));\n\n\n\n world.add_body(RigidBody::new_static(geom, 0.3, 0.6));\n\n\n\n let geom = Plane::new(Vec3::new(0.0, -1.0, 0.0));\n\n let mut rb = RigidBody::new_static(geom, 0.3, 0.6);\n\n\n\n rb.append_translation(&Vec3::new(0.0, 50.0, 0.0));\n", "file_path": "examples3/gravity.rs", "rank": 22, "score": 124678.39269248547 }, { "content": "fn main() {\n\n let mut testbed = Testbed::new_empty();\n\n\n\n /*\n\n * World\n\n */\n\n let mut world = World::new();\n\n world.set_gravity(Vec2::new(0.0, 9.81));\n\n\n\n /*\n\n * First plane\n\n */\n\n let mut rb = RigidBody::new_static(Plane::new(Vec2::new(0.0, 1.0)), 0.3, 0.6);\n\n\n\n rb.append_translation(&Vec2::new(0.0, -10.0));\n\n\n\n world.add_body(rb);\n\n\n\n /*\n\n * Second plane\n", "file_path": "examples2/gravity.rs", "rank": 23, "score": 124678.39269248547 }, { "content": "fn main() {\n\n /*\n\n * World\n\n */\n\n let mut world = World::new();\n\n world.set_gravity(Vec2::new(0.0, 9.81));\n\n\n\n /*\n\n * First plane\n\n */\n\n let mut rb = RigidBody::new_static(Plane::new(Vec2::new(-1.0, -1.0)), 0.3, 0.6);\n\n\n\n rb.append_translation(&Vec2::new(0.0, 10.0));\n\n\n\n world.add_body(rb);\n\n\n\n /*\n\n * Second plane\n\n */\n\n let mut rb = RigidBody::new_static(Plane::new(Vec2::new(1.0, -1.0)), 0.3, 0.6);\n", "file_path": "examples2/cross.rs", "rank": 24, "score": 124678.39269248547 }, { "content": "fn main() {\n\n /*\n\n * World\n\n */\n\n let mut world = World::new();\n\n world.set_gravity(Vec3::new(0.0, -9.81, 0.0));\n\n\n\n /*\n\n * Planes\n\n */\n\n let rb = RigidBody::new_static(Plane::new(Vec3::new(0.0, 1.0, 0.0)), 0.3, 0.6);\n\n\n\n world.add_body(rb);\n\n\n\n /*\n\n * Create the boxes\n\n */\n\n let width = 50;\n\n let height = 10;\n\n let rad = 1.0;\n", "file_path": "examples3/wall.rs", "rank": 25, "score": 124678.39269248547 }, { "content": "fn main() {\n\n /*\n\n * World\n\n */\n\n let mut world = World::new();\n\n world.set_gravity(Vec2::new(0.0, 9.81));\n\n\n\n /*\n\n * A plane for the ground\n\n */\n\n let ground_geom = Plane::new(Vec2::new(0.0, -1.0));\n\n\n\n world.add_body(RigidBody::new_static(ground_geom, 0.3, 0.6));\n\n\n\n /*\n\n * Create the ragdolls\n\n */\n\n let n = 5;\n\n let shift = 10.0;\n\n\n", "file_path": "examples2/ragdoll.rs", "rank": 26, "score": 124678.39269248547 }, { "content": "fn main() {\n\n /*\n\n * World\n\n */\n\n let mut world = World::new();\n\n world.set_gravity(Vec3::new(0.0, -9.81, 0.0));\n\n\n\n /*\n\n * Planes\n\n */\n\n let rb = RigidBody::new_static(Plane::new(Vec3::new(0.0, 1.0, 0.0)), 0.3, 0.6);\n\n\n\n world.add_body(rb);\n\n\n\n /*\n\n * Create the boxes\n\n */\n\n let num = 8;\n\n let rad = 1.0;\n\n let shift = (rad + 0.08) * 2.0;\n", "file_path": "examples3/primitives.rs", "rank": 27, "score": 124678.39269248547 }, { "content": "fn main() {\n\n /*\n\n * World\n\n */\n\n let mut world = World::new();\n\n world.set_gravity(Vec3::new(0.0, -9.81, 0.0));\n\n\n\n /*\n\n * Planes\n\n */\n\n let rb = RigidBody::new_static(Plane::new(Vec3::new(0.0, 1.0, 0.0)), 0.3, 0.6);\n\n\n\n world.add_body(rb);\n\n\n\n /*\n\n * Cross shaped geometry\n\n */\n\n let mut cross_geoms = Vec::new();\n\n\n\n let edge_x = Box::new(Cuboid::new(Vec3::new(4.96f32, 0.21, 0.21)));\n", "file_path": "examples3/cross.rs", "rank": 28, "score": 124678.39269248547 }, { "content": "pub fn draw_colls(window: &mut graphics::RenderWindow,\n\n physics: &mut World) {\n\n\n\n let mut collisions = Vec::new();\n\n\n\n physics.interferences(&mut collisions);\n\n\n\n for c in collisions.iter() {\n\n match *c {\n\n Constraint::RBRB(_, _, ref c) => {\n\n draw_line(\n\n window,\n\n &c.world1,\n\n &c.world2,\n\n &Color::new_rgb(255, 255, 255));\n\n\n\n let center = na::center(&c.world1, &c.world2);\n\n draw_line(\n\n window,\n\n &center,\n", "file_path": "examples2/nphysics_testbed2d/src/draw_helper.rs", "rank": 29, "score": 122246.79889082463 }, { "content": "fn main() {\n\n /*\n\n * World\n\n */\n\n let mut world = World::new();\n\n world.set_gravity(Vec3::new(0.0, -9.81, 0.0));\n\n\n\n /*\n\n * Planes\n\n */\n\n let normals = [\n\n Vec3::new(-1.0, 1.0, -1.0 ),\n\n Vec3::new(1.0, 1.0, -1.0 ),\n\n Vec3::new(-1.0, 1.0, 1.0 ),\n\n Vec3::new(1.0, 1.0, 1.0 )\n\n ];\n\n for n in normals.iter() {\n\n let rb = RigidBody::new_static(Plane::new(*n), 0.3, 0.6);\n\n\n\n world.add_body(rb);\n", "file_path": "examples3/balls_vee.rs", "rank": 30, "score": 122158.37908957365 }, { "content": "fn main() {\n\n /*\n\n * World\n\n */\n\n let mut world = World::new();\n\n world.set_gravity(Vec2::new(0.0, 0.0));\n\n\n\n /*\n\n * Create the box that will be deactivated.\n\n */\n\n let rad = 1.0;\n\n let geom = Cuboid::new(Vec2::new(rad, rad));\n\n let mut rb = RigidBody::new_dynamic(geom, 1.0, 0.3, 0.5);\n\n\n\n rb.set_lin_vel(Vec2::new(0.99, 0.0));\n\n\n\n world.add_body(rb.clone());\n\n\n\n /*\n\n * Create the box that will not be deactivated.\n", "file_path": "examples2/fixed_bug_11.rs", "rank": 31, "score": 122158.37908957365 }, { "content": "fn main() {\n\n let mut world = create_the_world();\n\n create_the_walls(&mut world);\n\n create_the_balls(&mut world);\n\n run_simulation(world);\n\n}\n\n\n\n\n", "file_path": "examples2/balls_vee.rs", "rank": 32, "score": 122158.37908957365 }, { "content": "fn main() {\n\n /*\n\n * World\n\n */\n\n let mut world = World::new();\n\n world.set_gravity(Vec2::new(0.0, 9.81));\n\n\n\n /*\n\n * First plane\n\n */\n\n let mut rb = RigidBody::new_static(Plane::new(Vec2::new(-1.0, -1.0)), 0.3, 0.6);\n\n\n\n rb.append_translation(&Vec2::new(0.0, 10.0));\n\n\n\n world.add_body(rb);\n\n\n\n /*\n\n * Second plane\n\n */\n\n let mut rb = RigidBody::new_static(Plane::new(Vec2::new(1.0, -1.0)), 0.3, 0.6);\n", "file_path": "examples2/boxes_vee.rs", "rank": 33, "score": 122158.37908957365 }, { "content": "fn main() {\n\n /*\n\n * World\n\n */\n\n let mut world = World::new();\n\n world.set_gravity(Vec3::new(0.0, -9.81, 0.0));\n\n\n\n /*\n\n * Plane\n\n */\n\n let geom = Plane::new(Vec3::new(0.0, 1.0, 0.0));\n\n\n\n world.add_body(RigidBody::new_static(geom, 0.3, 0.6));\n\n\n\n /*\n\n * Create the boxes\n\n */\n\n let num = 8;\n\n let rad = 1.0;\n\n let shift = rad * 2.0;\n", "file_path": "examples3/boxes_vee.rs", "rank": 34, "score": 122158.37908957365 }, { "content": "fn usage(exe_name: &str) {\n\n println!(\"Usage: {} [OPTION] \", exe_name);\n\n println!(\"\");\n\n println!(\"Options:\");\n\n println!(\" --help - prints this help message and exits.\");\n\n println!(\" --pause - do not start the simulation right away.\");\n\n println!(\"\");\n\n println!(\"The following keyboard commands are supported:\");\n\n println!(\" t - pause/continue the simulation.\");\n\n println!(\" s - pause then execute only one simulation step.\");\n\n println!(\" 1 - launch a ball.\");\n\n println!(\" 2 - launch a cube.\");\n\n println!(\" 3 - launch a fast cube using continuous collision detection.\");\n\n println!(\" TAB - switch camera mode (first-person or arc-ball).\");\n\n println!(\" SHIFT + right click - launch a fast cube using continuous collision detection.\");\n\n println!(\" CTRL + left click + drag - select and drag an object using a ball-in-socket joint.\");\n\n println!(\" SHIFT + left click - remove an object.\");\n\n println!(\" arrows - move around when in first-person camera mode.\");\n\n println!(\" space - switch wireframe mode. When ON, the contacts points and normals are displayed.\");\n\n println!(\" b - draw the bounding boxes.\");\n", "file_path": "examples3/nphysics_testbed3d/src/testbed.rs", "rank": 35, "score": 112771.52153747866 }, { "content": "fn usage(exe_name: &str) {\n\n println!(\"Usage: {} [OPTION] \", exe_name);\n\n println!(\"\");\n\n println!(\"Options:\");\n\n println!(\" --help - prints this help message and exits.\");\n\n println!(\" --pause - do not start the simulation right away.\");\n\n println!(\"\");\n\n println!(\"The following keyboard commands are supported:\");\n\n println!(\" t - pause/continue the simulation.\");\n\n println!(\" s - pause then execute only one simulation step.\");\n\n println!(\" space - display/hide contacts.\");\n\n}\n\n\n\n\n", "file_path": "examples2/nphysics_testbed2d/src/testbed.rs", "rank": 36, "score": 112771.52153747866 }, { "content": "fn main() {\n\n /*\n\n * World\n\n */\n\n let mut world = World::new();\n\n world.set_gravity(Vec3::new(0.0, -9.81, 0.0));\n\n\n\n /*\n\n * Planes\n\n */\n\n let rb = RigidBody::new_static(Plane::new(Vec3::new(0.0, 1.0, 0.0)), 0.3, 0.6);\n\n\n\n world.add_body(rb);\n\n\n\n /*\n\n * First cuboid.\n\n */\n\n let rad = 5.0;\n\n let shift = (rad + 0.08) * 2.0;\n\n let x = shift / 2.0;\n", "file_path": "examples3/fixed_bug_minkowski_sampling_fails_on_touching_objects.rs", "rank": 37, "score": 111649.38619232432 }, { "content": "fn main() {\n\n /*\n\n * World\n\n */\n\n let mut world = World::new();\n\n world.set_gravity(Vec3::new(0.0, -9.81, 0.0));\n\n\n\n /*\n\n * Plane\n\n */\n\n let geom = Plane::new(Vec3::new(0.0, 1.0, 0.0));\n\n\n\n world.add_body(RigidBody::new_static(geom, 0.3, 0.6));\n\n\n\n /*\n\n * Create the boxes\n\n */\n\n let rad = 1.0;\n\n let x = rad;\n\n let y = rad + 10.0;\n", "file_path": "examples3/fixed_bug_long_thin_box_one_shot_manifold.rs", "rank": 38, "score": 109887.0085014813 }, { "content": "/// The center of mass of the convex hull of a set of points.\n\npub fn convex_hull_center_of_mass<P>(dim: usize, points: &[P]) -> P\n\n where P: Point,\n\n P::Vect: Outer,\n\n <P::Vect as Outer>::OuterProductType: EigenQR<<P::Vect as Vect>::Scalar, P::Vect> +\n\n Mul<P, Output = P> +\n\n Add<<P::Vect as Outer>::OuterProductType, Output = <P::Vect as Outer>::OuterProductType> +\n\n Zero + Copy {\n\n assert!(dim == 2 || dim == 3);\n\n\n\n match dim {\n\n 2 => {\n\n unimplemented!()\n\n }\n\n 3 => {\n\n let convex_mesh = transformation::convex_hull3(points);\n\n unsafe { convex_mesh_volume_and_center_of_mass(&convex_mesh).1 }\n\n }\n\n _ => {\n\n unimplemented!()\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/volumetric/volumetric_convex.rs", "rank": 39, "score": 107288.87349878605 }, { "content": "#[inline(always)]\n\nfn solve_velocity_constraint(c: &mut VelocityConstraint, mj_lambda: &mut [Velocities]) {\n\n let id1 = c.id1;\n\n let id2 = c.id2;\n\n\n\n let mut d_lambda_i = c.objective.clone();\n\n\n\n if id1 >= 0 {\n\n d_lambda_i = d_lambda_i + na::dot(&c.normal, &mj_lambda[id1 as usize].lv)\n\n - na::dot(&c.rot_axis1, &mj_lambda[id1 as usize].av);\n\n }\n\n\n\n if id2 >= 0 {\n\n d_lambda_i = d_lambda_i - na::dot(&c.normal, &mj_lambda[id2 as usize].lv)\n\n - na::dot(&c.rot_axis2, &mj_lambda[id2 as usize].av);\n\n }\n\n\n\n d_lambda_i = d_lambda_i * c.inv_projected_mass;\n\n\n\n // clamp the value such that: lambda- <= lambda <= lambda+\n\n // (this is the ``projected'' flavour of Gauss-Seidel\n", "file_path": "src/resolution/constraint/projected_gauss_seidel_solver.rs", "rank": 40, "score": 106735.42956863364 }, { "content": "pub fn fill_constraint_geometry(normal: Vect,\n\n rot_axis1: Orientation,\n\n rot_axis2: Orientation,\n\n rb1: &Option<&RigidBody>,\n\n rb2: &Option<&RigidBody>,\n\n constraint: &mut VelocityConstraint) {\n\n constraint.normal = normal;\n\n constraint.inv_projected_mass = na::zero();\n\n\n\n match *rb1 {\n\n Some(ref rb) => {\n\n // rotation axis\n\n constraint.weighted_normal1 = constraint.normal * rb.inv_mass();\n\n constraint.rot_axis1 = rot_axis1;\n\n\n\n constraint.weighted_rot_axis1 = rb.inv_inertia().apply(&constraint.rot_axis1);\n\n\n\n constraint.inv_projected_mass = constraint.inv_projected_mass +\n\n na::dot(&constraint.normal, &constraint.weighted_normal1) +\n\n na::dot(&constraint.rot_axis1, &constraint.weighted_rot_axis1);\n", "file_path": "src/resolution/constraint/contact_equation.rs", "rank": 41, "score": 103498.58950170617 }, { "content": "#[inline]\n\npub fn write_anchor_id<'a, P>(anchor: &'a Anchor<P>, id: &mut isize) -> Option<Ref<'a, RigidBody>> {\n\n match anchor.body {\n\n Some(ref b) => {\n\n let rb = b.borrow();\n\n let can_move;\n\n let rid;\n\n\n\n {\n\n can_move = rb.can_move();\n\n rid = rb.index();\n\n }\n\n\n\n if can_move {\n\n *id = rid;\n\n\n\n Some(rb)\n\n }\n\n else {\n\n *id = -1;\n\n\n\n None\n\n }\n\n },\n\n None => { *id = -1; None }\n\n }\n\n}\n", "file_path": "src/resolution/constraint/ball_in_socket_equation.rs", "rank": 42, "score": 100134.42715632087 }, { "content": "pub fn draw_line(window: &mut graphics::RenderWindow, v1: &Pnt2<f32>, v2: &Pnt2<f32>, color: &Color) {\n\n let mut vertices = VertexArray::new().unwrap();\n\n\n\n vertices.append(&Vertex::new(\n\n &Vector2f::new(v1.x * DRAW_SCALE, v1.y * DRAW_SCALE),\n\n color,\n\n &Vector2f::new(0.0, 0.0)));\n\n\n\n vertices.append(&Vertex::new(\n\n &Vector2f::new(v2.x * DRAW_SCALE, v2.y * DRAW_SCALE),\n\n color,\n\n &Vector2f::new(0.0, 0.0)));\n\n\n\n vertices.set_primitive_type(graphics::Lines);\n\n\n\n window.draw(&vertices);\n\n}\n", "file_path": "examples2/nphysics_testbed2d/src/draw_helper.rs", "rank": 43, "score": 98644.32906705678 }, { "content": "/// Solve a set of velocity constraints using the projected gauss seidel solver.\n\n///\n\n/// # Arguments:\n\n/// * `restitution` - constraints to simulate the restitution.\n\n/// * `friction` - constraints to simulate friction.\n\n/// * `result` - vector which will contain the result afterward. Must have the size\n\n/// `num_bodies`.\n\n/// * `num_bodies` - the size of `result`.\n\n/// * `num_iterations` - the number of iterations to perform.\n\n/// * `is_lambda_zero` - indicates whether or not the every element of `result` has been\n\n/// reinitialized. Set this to `false` if the `result` comes from a previous execution of\n\n/// `projected_gauss_seidel_solve`: this will perform warm-starting.\n\npub fn projected_gauss_seidel_solve(restitution: &mut [VelocityConstraint],\n\n friction: &mut [VelocityConstraint],\n\n result: &mut [Velocities],\n\n num_bodies: usize,\n\n num_iterations: usize,\n\n is_lambda_zero: bool) {\n\n // initialize the solution with zeros...\n\n // mj_lambda is result\n\n assert!(result.len() == num_bodies);\n\n\n\n for v in result.iter_mut() {\n\n v.reset();\n\n }\n\n\n\n // ... and warm start if possible\n\n if !is_lambda_zero {\n\n for c in restitution.iter() {\n\n setup_warmstart_for_constraint(c, result);\n\n }\n\n\n", "file_path": "src/resolution/constraint/projected_gauss_seidel_solver.rs", "rank": 44, "score": 93392.6671731621 }, { "content": "#[inline(always)]\n\nfn setup_warmstart_for_constraint(c: &VelocityConstraint, mj_lambda: &mut [Velocities]) {\n\n let id1 = c.id1;\n\n let id2 = c.id2;\n\n\n\n if id1 >= 0 {\n\n mj_lambda[id1 as usize].lv = mj_lambda[id1 as usize].lv - c.weighted_normal1 * c.impulse;\n\n mj_lambda[id1 as usize].av = mj_lambda[id1 as usize].av + c.weighted_rot_axis1 * c.impulse;\n\n }\n\n\n\n if id2 >= 0 {\n\n mj_lambda[id2 as usize].lv = mj_lambda[id2 as usize].lv + c.weighted_normal2 * c.impulse;\n\n mj_lambda[id2 as usize].av = mj_lambda[id2 as usize].av + c.weighted_rot_axis2 * c.impulse;\n\n }\n\n}\n\n\n", "file_path": "src/resolution/constraint/projected_gauss_seidel_solver.rs", "rank": 45, "score": 91659.1652929175 }, { "content": "fn resize_buffer<A: Clone>(buff: &mut Vec<A>, size: usize, val: A) {\n\n if buff.len() < size {\n\n let diff = size - buff.len();\n\n buff.extend(iter::repeat(val).take(diff));\n\n }\n\n else {\n\n buff.truncate(size)\n\n }\n\n}\n", "file_path": "src/resolution/constraint/accumulated_impulse_solver.rs", "rank": 46, "score": 86250.49678072734 }, { "content": "/// Computes the transformation matrix required to move an object with a `lin_vel` linear velocity,\n\n/// a `ang_vil` angular velocity, and a center of mass `center_of_mass`, during the time step `dt`.\n\npub fn displacement(dt: Scalar, _: &Matrix, center_of_mass: &Point, lin_vel: &Vect, ang_vel: &Orientation) -> Matrix {\n\n let mut res: Matrix = na::one();\n\n res.append_rotation_wrt_point_mut(&(*ang_vel * dt), center_of_mass.as_vec());\n\n\n\n res.append_translation_mut(&(*lin_vel * dt));\n\n\n\n res\n\n}\n", "file_path": "src/integration/euler.rs", "rank": 47, "score": 81159.39043338227 }, { "content": "/// The angular inertia of the convex hull of a set of points.\n\npub fn convex_hull_unit_angular_inertia<P, I>(dim: usize, points: &[P]) -> I\n\n where P: Point,\n\n I: Zero +\n\n Add<I, Output = I> +\n\n Mul<<P::Vect as Vect>::Scalar, Output = I> +\n\n IndexMut<(usize, usize), Output = <P::Vect as Vect>::Scalar>,\n\n P::Vect: Outer,\n\n <P::Vect as Outer>::OuterProductType: EigenQR<<P::Vect as Vect>::Scalar, P::Vect> +\n\n Mul<P, Output = P> +\n\n Add<<P::Vect as Outer>::OuterProductType, Output = <P::Vect as Outer>::OuterProductType> +\n\n Zero + Copy {\n\n assert!(dim == 2 || dim == 3);\n\n\n\n match dim {\n\n 2 => {\n\n let convex_mesh = transformation::convex_hull2(points);\n\n let (area, _, i): (_, _, <P::Vect as Vect>::Scalar) =\n\n unsafe { convex_mesh_mass_properties2(&convex_mesh, na::one()) };\n\n let mut tensor: I = na::zero();\n\n tensor[(0, 0)] = i * (na::one::<<P::Vect as Vect>::Scalar>() / area);\n", "file_path": "src/volumetric/volumetric_convex.rs", "rank": 58, "score": 78248.85678188773 }, { "content": "struct TestbedState<'a> {\n\n running: RunMode,\n\n draw_colls: bool,\n\n camera: Camera,\n\n fps: Fps<'a>,\n\n grabbed_object: Option<Rc<RefCell<RigidBody>>>,\n\n grabbed_object_joint: Option<Rc<RefCell<Fixed>>>,\n\n}\n\n\n\nimpl<'a> TestbedState<'a> {\n\n fn new(fnt: &'a Font) -> TestbedState<'a> {\n\n TestbedState{\n\n running: RunMode::Running,\n\n draw_colls: false,\n\n camera: Camera::new(),\n\n fps: Fps::new(&fnt),\n\n grabbed_object: None,\n\n grabbed_object_joint: None,\n\n }\n\n }\n", "file_path": "examples2/nphysics_testbed2d/src/testbed.rs", "rank": 59, "score": 78140.24737068293 }, { "content": "#[inline]\n\npub fn cylinder_center_of_mass<P: Orig>() -> P {\n\n na::orig()\n\n}\n\n\n\n/// The unit angular inertia of a cylinder.\n", "file_path": "src/volumetric/volumetric_cylinder.rs", "rank": 60, "score": 76436.38216326323 }, { "content": "#[inline]\n\npub fn ball_center_of_mass<P: Orig>() -> P {\n\n na::orig()\n\n}\n\n\n\n/// The unit angular inertia of a ball.\n", "file_path": "src/volumetric/volumetric_ball.rs", "rank": 61, "score": 76436.38216326323 }, { "content": "#[inline]\n\npub fn cuboid_center_of_mass<P: Orig>() -> P {\n\n na::orig()\n\n}\n\n\n\n/// The unit angular inertia of a cuboid.\n", "file_path": "src/volumetric/volumetric_cuboid.rs", "rank": 62, "score": 76436.38216326323 }, { "content": "pub fn relative_velocity(rb1: &Option<&RigidBody>,\n\n rb2: &Option<&RigidBody>,\n\n normal: &Vect,\n\n rot_axis1: &Orientation,\n\n rot_axis2: &Orientation,\n\n dt: &Scalar)\n\n -> Scalar {\n\n let mut dvel: Scalar = na::zero();\n\n\n\n match *rb1 {\n\n Some(ref rb) => {\n\n dvel = dvel - na::dot(&(rb.lin_vel() + rb.lin_acc() * *dt), normal)\n\n + na::dot(&(rb.ang_vel() + rb.ang_acc() * *dt), rot_axis1);\n\n },\n\n None => { }\n\n }\n\n\n\n match *rb2 {\n\n Some(ref rb) => {\n\n dvel = dvel + na::dot(&(rb.lin_vel() + rb.lin_acc() * *dt), normal)\n\n + na::dot(&(rb.ang_vel() + rb.ang_acc() * *dt), rot_axis2);\n\n },\n\n None => { }\n\n }\n\n\n\n dvel\n\n}\n", "file_path": "src/resolution/constraint/contact_equation.rs", "rank": 63, "score": 74520.28538407512 }, { "content": "/// The volume of the convex hull of a set of points.\n\npub fn convex_hull_volume<P>(dim: usize, points: &[P]) -> <P::Vect as Vect>::Scalar\n\n where P: Point,\n\n P::Vect: Outer,\n\n <P::Vect as Outer>::OuterProductType: EigenQR<<P::Vect as Vect>::Scalar, P::Vect> +\n\n Mul<P, Output = P> +\n\n Add<<P::Vect as Outer>::OuterProductType, Output = <P::Vect as Outer>::OuterProductType> +\n\n Zero + Copy {\n\n assert!(dim == 2 || dim == 3);\n\n\n\n match dim {\n\n 2 => {\n\n unimplemented!()\n\n }\n\n 3 => {\n\n let convex_mesh = transformation::convex_hull3(points);\n\n unsafe { convex_mesh_volume_and_center_of_mass(&convex_mesh).0 }\n\n }\n\n _ => {\n\n unimplemented!()\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/volumetric/volumetric_convex.rs", "rank": 64, "score": 74512.51520842084 }, { "content": "/// The surface of a convex hull.\n\npub fn convex_hull_surface<P>(dim: usize, points: &[P]) -> <P::Vect as Vect>::Scalar\n\n where P: Point,\n\n P::Vect: Outer,\n\n <P::Vect as Outer>::OuterProductType: EigenQR<<P::Vect as Vect>::Scalar, P::Vect> +\n\n Mul<P, Output = P> +\n\n Add<<P::Vect as Outer>::OuterProductType, Output = <P::Vect as Outer>::OuterProductType> +\n\n Zero + Copy {\n\n assert!(dim == 2 || dim == 3);\n\n\n\n match dim {\n\n 2 => {\n\n unimplemented!()\n\n }\n\n 3 => {\n\n let convex_mesh = transformation::convex_hull3(points);\n\n unsafe { convex_mesh_surface(&convex_mesh) }\n\n }\n\n _ => {\n\n unimplemented!()\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/volumetric/volumetric_convex.rs", "rank": 65, "score": 74508.36592599895 }, { "content": "use std::env;\n\nuse std::rc::Rc;\n\nuse std::cell::RefCell;\n\nuse std::path::Path;\n\nuse time;\n\nuse glfw;\n\nuse glfw::{MouseButton, Key, Action, WindowEvent};\n\nuse na::{Pnt2, Pnt3, Vec3, Translation, Translate, Iso3, Bounded};\n\nuse na;\n\nuse kiss3d::window::Window;\n\nuse kiss3d::light::Light;\n\nuse kiss3d::text::Font;\n\nuse kiss3d::loader::obj;\n\nuse ncollide::shape::{Cuboid, Ball};\n\nuse ncollide::ray;\n\nuse ncollide::ray::Ray;\n\nuse nphysics::detection::Detector;\n\nuse nphysics::detection::constraint::Constraint;\n\nuse nphysics::detection::joint::{Anchor, Fixed, Joint};\n\nuse nphysics::object::RigidBody;\n\nuse nphysics::world::World;\n\nuse engine::GraphicsManager;\n\n\n\n\n", "file_path": "examples3/nphysics_testbed3d/src/testbed.rs", "rank": 66, "score": 72788.87922408701 }, { "content": "\n\n rb.set_lin_vel(front * 40.0f32);\n\n\n\n let body = self.world.add_body(rb);\n\n self.graphics.add(&mut self.window, body.clone());\n\n },\n\n WindowEvent::Key(Key::Num2, _, Action::Press, _) => {\n\n let geom = Cuboid::new(Vec3::new(0.5f32, 0.5, 0.5));\n\n let mut rb = RigidBody::new_dynamic(geom, 4.0f32, 0.3, 0.6);\n\n\n\n let cam_transform;\n\n\n\n {\n\n let cam = self.graphics.camera();\n\n cam_transform = cam.view_transform();\n\n }\n\n\n\n rb.append_translation(&na::translation(&cam_transform));\n\n\n\n let front = na::rotate(&cam_transform, &Vec3::z());\n", "file_path": "examples3/nphysics_testbed3d/src/testbed.rs", "rank": 67, "score": 72787.22942627285 }, { "content": " self.window.set_framerate_limit(Some(60));\n\n self.window.set_light(Light::StickToCamera);\n\n\n\n\n\n while !self.window.should_close() {\n\n for mut event in self.window.events().iter() {\n\n match event.value {\n\n WindowEvent::MouseButton(MouseButton::Button2, Action::Press, glfw::Control) => {\n\n let geom = Cuboid::new(Vec3::new(0.5f32, 0.5f32, 0.5f32));\n\n let mut rb = RigidBody::new_dynamic(geom, 4.0f32, 0.3, 0.6);\n\n\n\n let size = self.window.size();\n\n let (pos, dir) = self.graphics.camera().unproject(&cursor_pos, &size);\n\n\n\n rb.set_translation(pos.to_vec());\n\n rb.set_lin_vel(dir * 1000.0f32);\n\n\n\n let body = self.world.add_body(rb);\n\n self.world.add_ccd_to(&body, 1.0);\n\n self.graphics.add(&mut self.window, body);\n", "file_path": "examples3/nphysics_testbed3d/src/testbed.rs", "rank": 68, "score": 72785.09693706369 }, { "content": " }\n\n else {\n\n self.window.scene_mut().set_lines_width(0.0);\n\n self.window.scene_mut().set_surface_rendering_activation(true);\n\n }\n\n },\n\n WindowEvent::Key(Key::Num1, _, Action::Press, _) => {\n\n let geom = Ball::new(0.5f32);\n\n let mut rb = RigidBody::new_dynamic(geom, 4.0f32, 0.3, 0.6);\n\n\n\n let cam_transfom;\n\n\n\n {\n\n let cam = self.graphics.camera();\n\n cam_transfom = cam.view_transform();\n\n }\n\n\n\n rb.append_translation(&na::translation(&cam_transfom));\n\n\n\n let front = na::rotate(&cam_transfom, &Vec3::z());\n", "file_path": "examples3/nphysics_testbed3d/src/testbed.rs", "rank": 69, "score": 72783.24140920432 }, { "content": " }\n\n\n\n pub fn set_color(&mut self, rb: &Rc<RefCell<RigidBody>>, color: Pnt3<f32>) {\n\n self.graphics.set_color(rb, color);\n\n }\n\n\n\n pub fn load_obj(path: &str) -> Vec<(Vec<Pnt3<f32>>, Vec<usize>)> {\n\n let path = Path::new(path);\n\n let empty = Path::new(\"_some_non_existant_folder\"); // dont bother loading mtl files correctly\n\n let objects = obj::parse_file(&path, &empty, \"\").ok().expect(\"Unable to open the obj file.\");\n\n\n\n let mut res = Vec::new();\n\n\n\n for (_, m, _) in objects.into_iter() {\n\n let vertices = m.coords().read().unwrap().to_owned().unwrap();\n\n let indices = m.faces().read().unwrap().to_owned().unwrap();\n\n\n\n let mut flat_indices = Vec::new();\n\n\n\n for i in indices.into_iter() {\n", "file_path": "examples3/nphysics_testbed3d/src/testbed.rs", "rank": 70, "score": 72780.08749161646 }, { "content": " world: World::new(),\n\n window: window,\n\n graphics: graphics\n\n }\n\n }\n\n\n\n pub fn new(world: World) -> Testbed<'a> {\n\n let mut res = Testbed::new_empty();\n\n\n\n res.set_world(world);\n\n\n\n res\n\n }\n\n\n\n pub fn set_world(&mut self, world: World) {\n\n self.world = world;\n\n self.graphics.clear();\n\n\n\n for rb in self.world.bodies() {\n\n self.graphics.add(rb.clone());\n", "file_path": "examples2/nphysics_testbed2d/src/testbed.rs", "rank": 71, "score": 72779.65600592185 }, { "content": " pub fn new(world: World) -> Testbed {\n\n let mut res = Testbed::new_empty();\n\n\n\n res.set_world(world);\n\n\n\n res\n\n }\n\n\n\n pub fn set_world(&mut self, world: World) {\n\n self.world = world;\n\n\n\n self.graphics.clear(&mut self.window);\n\n\n\n for rb in self.world.bodies() {\n\n self.graphics.add(&mut self.window, rb.clone());\n\n }\n\n }\n\n\n\n pub fn look_at(&mut self, eye: Pnt3<f32>, at: Pnt3<f32>) {\n\n self.graphics.look_at(eye, at);\n", "file_path": "examples3/nphysics_testbed3d/src/testbed.rs", "rank": 72, "score": 72779.61994854781 }, { "content": "use std::env;\n\nuse std::rc::Rc;\n\nuse std::cell::RefCell;\n\nuse sfml::graphics::{RenderWindow, RenderTarget, Font};\n\nuse sfml::window::{ContextSettings, VideoMode, Close};\n\nuse sfml::window::event;\n\nuse sfml::window::keyboard::Key;\n\nuse sfml::window::mouse::MouseButton;\n\nuse sfml::graphics::Color;\n\nuse sfml::system::vector2::Vector2i;\n\nuse na::{Pnt2, Pnt3, Iso2};\n\nuse na;\n\nuse nphysics::world::World;\n\nuse nphysics::object::RigidBody;\n\nuse nphysics::detection::joint::{Fixed, Anchor};\n\nuse camera::Camera;\n\nuse fps::Fps;\n\nuse engine::GraphicsManager;\n\nuse draw_helper;\n\n\n", "file_path": "examples2/nphysics_testbed2d/src/testbed.rs", "rank": 73, "score": 72775.32931350032 }, { "content": " }\n\n\n\n match grabbed_object {\n\n Some(ref b) => {\n\n for sn in self.graphics.body_to_scene_node(b).unwrap().iter_mut() {\n\n match grabbed_object_joint {\n\n Some(ref j) => self.world.remove_fixed(j),\n\n None => { }\n\n }\n\n\n\n let _1: Iso3<f32> = na::one();\n\n let attach2 = na::append_translation(&_1, (ray.orig + ray.dir * mintoi).as_vec());\n\n let attach1 = na::inv(&na::transformation(b.borrow().position())).unwrap() * attach2;\n\n let anchor1 = Anchor::new(Some(minb.as_ref().unwrap().clone()), attach1);\n\n let anchor2 = Anchor::new(None, attach2);\n\n let joint = Fixed::new(anchor1, anchor2);\n\n grabbed_object_plane = (attach2.translate(&na::orig()), -ray.dir);\n\n grabbed_object_joint = Some(self.world.add_fixed(joint));\n\n // add a joint\n\n sn.select()\n", "file_path": "examples3/nphysics_testbed3d/src/testbed.rs", "rank": 74, "score": 72775.23017757965 }, { "content": "\n\n fn process_mouse_moved(&mut self, state: &mut TestbedState, x: i32, y: i32) {\n\n let mapped_coords = state.camera.map_pixel_to_coords(Vector2i::new(x, y));\n\n let mapped_point = Pnt2::new(mapped_coords.x, mapped_coords.y);\n\n let _1: Iso2<f32> = na::one();\n\n let attach2 = na::append_translation(&_1, (mapped_point).as_vec());\n\n match state.grabbed_object {\n\n Some(_) => {\n\n let joint = state.grabbed_object_joint.as_ref().unwrap();\n\n joint.borrow_mut().set_local2(attach2);\n\n },\n\n None => state.camera.handle_event(&event::MouseMoved{x: x, y: y})\n\n };\n\n }\n\n\n\n fn progress_world(&mut self, state: &mut TestbedState) {\n\n if state.running != RunMode::Stop {\n\n self.world.step(0.016);\n\n }\n\n\n", "file_path": "examples2/nphysics_testbed2d/src/testbed.rs", "rank": 75, "score": 72774.40733879019 }, { "content": "\n\n rb.set_lin_vel(front * 40.0f32);\n\n\n\n let body = self.world.add_body(rb);\n\n self.graphics.add(&mut self.window, body.clone());\n\n }\n\n _ => { }\n\n }\n\n }\n\n\n\n let dt;\n\n\n\n if running != RunMode::Stop {\n\n let before = time::precise_time_s();\n\n self.world.step(0.016);\n\n dt = time::precise_time_s() - before;\n\n\n\n self.graphics.draw();\n\n }\n\n else {\n", "file_path": "examples3/nphysics_testbed3d/src/testbed.rs", "rank": 76, "score": 72774.24654575837 }, { "content": " let b = minb.as_ref().unwrap();\n\n if b.borrow().can_move() {\n\n self.world.remove_body(b);\n\n self.graphics.remove(&mut self.window, b);\n\n }\n\n }\n\n\n\n event.inhibited = true;\n\n }\n\n else if modifier.contains(glfw::Control) {\n\n match grabbed_object {\n\n Some(ref rb) => {\n\n for sn in self.graphics.body_to_scene_node(rb).unwrap().iter_mut() {\n\n sn.unselect()\n\n }\n\n },\n\n None => { }\n\n }\n\n\n\n // XXX: huge and uggly code duplication\n", "file_path": "examples3/nphysics_testbed3d/src/testbed.rs", "rank": 77, "score": 72772.82459360694 }, { "content": " if &arg[..] == \"--help\" || &arg[..] == \"-h\" {\n\n usage(&exname[..]);\n\n return;\n\n }\n\n else if &arg[..] == \"--pause\" {\n\n running = RunMode::Stop;\n\n }\n\n }\n\n }\n\n\n\n let font_mem = include_bytes!(\"Inconsolata.otf\");\n\n let font = Font::from_memory(font_mem, 60);\n\n let mut draw_colls = false;\n\n\n\n let mut cursor_pos = Pnt2::new(0.0f32, 0.0);\n\n let mut grabbed_object: Option<Rc<RefCell<RigidBody>>> = None;\n\n let mut grabbed_object_joint: Option<Rc<RefCell<Fixed>>> = None;\n\n let mut grabbed_object_plane: (Pnt3<f32>, Vec3<f32>) = (na::orig(), na::zero());\n\n\n\n\n", "file_path": "examples3/nphysics_testbed3d/src/testbed.rs", "rank": 78, "score": 72770.81909511476 }, { "content": " state.grabbed_object_joint = Some(self.world.add_fixed(joint));\n\n\n\n for node in self.graphics.body_to_scene_node(b).unwrap().iter_mut() {\n\n node.select()\n\n }\n\n },\n\n None => { }\n\n }\n\n },\n\n _ => {\n\n state.camera.handle_event(&event::MouseButtonPressed{ button: button, x: x, y: y })\n\n }\n\n }\n\n }\n\n\n\n fn process_mouse_release(&mut self, state: &mut TestbedState, button: MouseButton, x: i32, y: i32) {\n\n match button {\n\n MouseButton::MouseLeft => {\n\n match state.grabbed_object {\n\n Some(ref b) => {\n", "file_path": "examples2/nphysics_testbed2d/src/testbed.rs", "rank": 79, "score": 72770.30552867577 }, { "content": " }\n\n }\n\n\n\n pub fn set_color(&mut self, body: &Rc<RefCell<RigidBody>>, color: Pnt3<f32>) {\n\n let color = Pnt3::new(\n\n (color.x * 255.0) as u8,\n\n (color.y * 255.0) as u8,\n\n (color.z * 255.0) as u8\n\n );\n\n\n\n self.graphics.set_color(body, color);\n\n }\n\n\n\n pub fn run(&mut self) {\n\n let font_mem = include_bytes!(\"Inconsolata.otf\");\n\n let fnt = Font::new_from_memory(font_mem).unwrap();\n\n\n\n let mut state = TestbedState::new(&fnt);\n\n\n\n let mut args = env::args();\n", "file_path": "examples2/nphysics_testbed2d/src/testbed.rs", "rank": 80, "score": 72770.27149572162 }, { "content": " else {\n\n running = RunMode::Stop;\n\n }\n\n },\n\n WindowEvent::Key(Key::S, _, Action::Release, _) => running = RunMode::Step,\n\n WindowEvent::Key(Key::B, _, Action::Release, _) => {\n\n // XXX: there is a bug on kiss3d with the removal of objects.\n\n // draw_aabbs = !draw_aabbs;\n\n // if draw_aabbs {\n\n // graphics.enable_aabb_draw(&mut self.window);\n\n // }\n\n // else {\n\n // graphics.disable_aabb_draw(&mut self.window);\n\n // }\n\n },\n\n WindowEvent::Key(Key::Space, _, Action::Release, _) => {\n\n draw_colls = !draw_colls;\n\n if draw_colls {\n\n self.window.scene_mut().set_lines_width(1.0);\n\n self.window.scene_mut().set_surface_rendering_activation(false);\n", "file_path": "examples3/nphysics_testbed3d/src/testbed.rs", "rank": 81, "score": 72770.24309403414 }, { "content": "}\n\n\n\nimpl<'a> Testbed<'a> {\n\n pub fn new_empty() -> Testbed<'a> {\n\n let mode = VideoMode::new_init(800, 600, 32);\n\n let setting = ContextSettings {\n\n depth_bits: 10,\n\n stencil_bits: 10,\n\n antialiasing_level: 2,\n\n major_version: 0,\n\n minor_version: 1\n\n };\n\n let window =\n\n match RenderWindow::new(mode, \"nphysics 2d demo\", Close, &setting) {\n\n Some(rwindow) => rwindow,\n\n None => panic!(\"Error on creating the sfml window.\")\n\n };\n\n let graphics = GraphicsManager::new();\n\n\n\n Testbed {\n", "file_path": "examples2/nphysics_testbed2d/src/testbed.rs", "rank": 82, "score": 72770.2100735038 }, { "content": "}\n\n\n\npub struct Testbed {\n\n world: World,\n\n window: Window,\n\n graphics: GraphicsManager,\n\n}\n\n\n\nimpl Testbed {\n\n pub fn new_empty() -> Testbed {\n\n let graphics = GraphicsManager::new();\n\n let window = Window::new(\"nphysics: 3d demo\");\n\n\n\n Testbed {\n\n world: World::new(),\n\n window: window,\n\n graphics: graphics\n\n }\n\n }\n\n\n", "file_path": "examples3/nphysics_testbed3d/src/testbed.rs", "rank": 83, "score": 72768.62371409657 }, { "content": "\n\n fn run_loop(&mut self, mut state: TestbedState) {\n\n while self.window.is_open() {\n\n self.process_events(&mut state);\n\n\n\n self.window.clear(&Color::black());\n\n\n\n state.fps.reset();\n\n\n\n self.progress_world(&mut state);\n\n\n\n state.fps.register_delta();\n\n\n\n self.graphics.draw(&mut self.window, &state.camera);\n\n\n\n state.camera.activate_scene(&mut self.window);\n\n self.draw_collisions(&mut state);\n\n\n\n state.camera.activate_ui(&mut self.window);\n\n state.fps.draw_registered(&mut self.window);\n", "file_path": "examples2/nphysics_testbed2d/src/testbed.rs", "rank": 84, "score": 72768.49634148588 }, { "content": " j.borrow_mut().set_local2(na::append_translation(&_1, (pos + dir * inter).as_vec()))\n\n },\n\n None => { }\n\n }\n\n\n\n },\n\n None => { }\n\n }\n\n\n\n event.inhibited =\n\n self.window.glfw_window().get_key(Key::RightShift) != Action::Release ||\n\n self.window.glfw_window().get_key(Key::LeftShift) != Action::Release ||\n\n self.window.glfw_window().get_key(Key::RightControl) != Action::Release ||\n\n self.window.glfw_window().get_key(Key::LeftControl) != Action::Release;\n\n },\n\n WindowEvent::Key(Key::Tab, _, Action::Release, _) => self.graphics.switch_cameras(),\n\n WindowEvent::Key(Key::T, _, Action::Release, _) => {\n\n if running == RunMode::Stop {\n\n running = RunMode::Running;\n\n }\n", "file_path": "examples3/nphysics_testbed3d/src/testbed.rs", "rank": 85, "score": 72767.91948754215 }, { "content": " }\n\n },\n\n None => { }\n\n }\n\n\n\n event.inhibited = true;\n\n }\n\n },\n\n WindowEvent::MouseButton(_, Action::Release, _) => {\n\n match grabbed_object {\n\n Some(ref b) => {\n\n for sn in self.graphics.body_to_scene_node(b).unwrap().iter_mut() {\n\n sn.unselect()\n\n }\n\n },\n\n None => { }\n\n }\n\n\n\n match grabbed_object_joint {\n\n Some(ref j) => self.world.remove_fixed(j),\n", "file_path": "examples3/nphysics_testbed3d/src/testbed.rs", "rank": 86, "score": 72767.8942324729 }, { "content": " for node in self.graphics.body_to_scene_node(b).unwrap().iter_mut() {\n\n node.unselect()\n\n }\n\n },\n\n None => { }\n\n }\n\n\n\n match state.grabbed_object_joint {\n\n Some(ref j) => self.world.remove_fixed(j),\n\n None => { }\n\n }\n\n\n\n state.grabbed_object = None;\n\n state.grabbed_object_joint = None;\n\n },\n\n _ => {\n\n state.camera.handle_event(&event::MouseButtonReleased{ button: button, x: x, y: y })\n\n }\n\n }\n\n }\n", "file_path": "examples2/nphysics_testbed2d/src/testbed.rs", "rank": 87, "score": 72767.19492817084 }, { "content": " },\n\n WindowEvent::MouseButton(MouseButton::Button1, Action::Press, modifier) => {\n\n if modifier.contains(glfw::Shift) {\n\n // XXX: huge and uggly code duplication\n\n let size = self.window.size();\n\n let (pos, dir) = self.graphics.camera().unproject(&cursor_pos, &size);\n\n let ray = Ray::new(pos, dir);\n\n\n\n // cast the ray\n\n let mut mintoi = Bounded::max_value();\n\n let mut minb = None;\n\n\n\n self.world.interferences_with_ray(&ray, |b, inter| {\n\n if inter.toi < mintoi {\n\n mintoi = inter.toi;\n\n minb = Some(b.clone());\n\n }\n\n });\n\n\n\n if minb.is_some() {\n", "file_path": "examples3/nphysics_testbed3d/src/testbed.rs", "rank": 88, "score": 72766.98292723321 }, { "content": " if state.running == RunMode::Step {\n\n state.running = RunMode::Stop;\n\n }\n\n }\n\n\n\n fn draw_collisions(&mut self, state: &mut TestbedState) {\n\n if state.draw_colls {\n\n draw_helper::draw_colls(&mut self.window, &mut self.world);\n\n }\n\n }\n\n}\n", "file_path": "examples2/nphysics_testbed2d/src/testbed.rs", "rank": 89, "score": 72766.85845505339 }, { "content": " dt = 0.0;\n\n }\n\n\n\n if running == RunMode::Step {\n\n running = RunMode::Stop;\n\n }\n\n\n\n if draw_colls {\n\n self.graphics.draw_positions(&mut self.window);\n\n draw_collisions(&mut self.window, &mut self.world);\n\n }\n\n\n\n let color = Pnt3::new(1.0, 1.0, 1.0);\n\n\n\n if running != RunMode::Stop {\n\n self.window.draw_text(&dt.to_string()[..], &na::orig(), &font, &color);\n\n }\n\n else {\n\n self.window.draw_text(\"Paused\", &na::orig(), &font, &color);\n\n }\n\n\n\n self.window.render_with_camera(self.graphics.camera());\n\n }\n\n }\n\n}\n\n\n", "file_path": "examples3/nphysics_testbed3d/src/testbed.rs", "rank": 90, "score": 72765.91186437458 }, { "content": " let mapped_point = Pnt2::new(mapped_coords.x, mapped_coords.y);\n\n self.world.interferences_with_point(&mapped_point, |b| {\n\n if b.borrow().can_move() {\n\n state.grabbed_object = Some(b.clone())\n\n }\n\n });\n\n\n\n match state.grabbed_object {\n\n Some(ref b) => {\n\n match state.grabbed_object_joint {\n\n Some(ref j) => self.world.remove_fixed(j),\n\n None => { }\n\n }\n\n\n\n let _1: Iso2<f32> = na::one();\n\n let attach2 = na::append_translation(&_1, mapped_point.as_vec());\n\n let attach1 = na::inv(&na::transformation(b.borrow().position())).unwrap() * attach2;\n\n let anchor1 = Anchor::new(Some(state.grabbed_object.as_ref().unwrap().clone()), attach1);\n\n let anchor2 = Anchor::new(None, attach2);\n\n let joint = Fixed::new(anchor1, anchor2);\n", "file_path": "examples2/nphysics_testbed2d/src/testbed.rs", "rank": 91, "score": 72765.37938664561 }, { "content": " let size = self.window.size();\n\n let (pos, dir) = self.graphics.camera().unproject(&cursor_pos, &size);\n\n let ray = Ray::new(pos, dir);\n\n\n\n // cast the ray\n\n let mut mintoi = Bounded::max_value();\n\n let mut minb = None;\n\n\n\n self.world.interferences_with_ray(&ray, |b, inter| {\n\n if inter.toi < mintoi {\n\n mintoi = inter.toi;\n\n minb = Some(b.clone());\n\n }\n\n });\n\n\n\n if minb.is_some() {\n\n let b = minb.as_ref().unwrap();\n\n if b.borrow().can_move() {\n\n grabbed_object = Some(b.clone())\n\n }\n", "file_path": "examples3/nphysics_testbed3d/src/testbed.rs", "rank": 92, "score": 72764.81361783354 }, { "content": "\n\n self.window.display();\n\n }\n\n }\n\n\n\n fn process_events(&mut self, mut state: &mut TestbedState) {\n\n loop {\n\n match self.window.poll_event() {\n\n event::KeyPressed{code, ..} => self.process_key_press(&mut state, code),\n\n event::MouseButtonPressed{button, x, y} => self.process_mouse_press(&mut state, button, x, y),\n\n event::MouseButtonReleased{button, x, y} => self.process_mouse_release(&mut state, button, x, y),\n\n event::MouseMoved{x, y} => self.process_mouse_moved(&mut state, x, y),\n\n event::Closed => self.window.close(),\n\n event::NoEvent => break,\n\n e => state.camera.handle_event(&e)\n\n }\n\n }\n\n }\n\n\n\n fn process_key_press(&mut self, state: &mut TestbedState, code: Key) {\n", "file_path": "examples2/nphysics_testbed2d/src/testbed.rs", "rank": 93, "score": 72763.5657066528 }, { "content": " None => { }\n\n }\n\n\n\n grabbed_object = None;\n\n grabbed_object_joint = None;\n\n },\n\n WindowEvent::CursorPos(x, y) => {\n\n cursor_pos.x = x as f32;\n\n cursor_pos.y = y as f32;\n\n\n\n // update the joint\n\n match grabbed_object_joint {\n\n Some(ref j) => {\n\n let size = self.window.size();\n\n let (pos, dir) = self.graphics.camera().unproject(&cursor_pos, &size);\n\n let (ref ppos, ref pdir) = grabbed_object_plane;\n\n\n\n match ray::plane_toi_with_ray(ppos, pdir, &Ray::new(pos, dir)) {\n\n Some(inter) => {\n\n let _1: Iso3<f32> = na::one();\n", "file_path": "examples3/nphysics_testbed3d/src/testbed.rs", "rank": 94, "score": 72761.7652461097 }, { "content": " match code {\n\n Key::Escape => self.window.close(),\n\n Key::S => state.running = RunMode::Step,\n\n Key::Space => state.draw_colls = !state.draw_colls,\n\n Key::T => {\n\n if state.running == RunMode::Stop {\n\n state.running = RunMode::Running;\n\n }\n\n else {\n\n state.running = RunMode::Stop;\n\n }\n\n },\n\n _ => { }\n\n }\n\n }\n\n\n\n fn process_mouse_press(&mut self, state: &mut TestbedState, button: MouseButton, x: i32, y: i32) {\n\n match button {\n\n MouseButton::MouseLeft => {\n\n let mapped_coords = state.camera.map_pixel_to_coords(Vector2i::new(x, y));\n", "file_path": "examples2/nphysics_testbed2d/src/testbed.rs", "rank": 95, "score": 72761.30609040317 }, { "content": " flat_indices.push(i.x as usize);\n\n flat_indices.push(i.y as usize);\n\n flat_indices.push(i.z as usize);\n\n }\n\n\n\n let m = (vertices, flat_indices);\n\n\n\n res.push(m);\n\n }\n\n\n\n res\n\n }\n\n\n\n pub fn run(&mut self) {\n\n let mut args = env::args();\n\n let mut running = RunMode::Running;\n\n\n\n if args.len() > 1 {\n\n let exname = args.next().unwrap();\n\n for arg in args {\n", "file_path": "examples3/nphysics_testbed3d/src/testbed.rs", "rank": 96, "score": 72759.64564178252 }, { "content": "\n\n if args.len() > 1 {\n\n let exname = args.next().unwrap();\n\n for arg in args {\n\n if &arg[..] == \"--help\" || &arg[..] == \"-h\" {\n\n usage(&exname[..]);\n\n return;\n\n }\n\n else if &arg[..] == \"--pause\" {\n\n state.running = RunMode::Stop;\n\n }\n\n }\n\n }\n\n\n\n self.window.set_framerate_limit(60);\n\n\n\n self.run_loop(state);\n\n\n\n self.window.close();\n\n }\n", "file_path": "examples2/nphysics_testbed2d/src/testbed.rs", "rank": 97, "score": 72758.78635088053 }, { "content": "use std::rc::Rc;\n\nuse std::cell::RefCell;\n\nuse na;\n\nuse kiss3d::window;\n\nuse kiss3d::scene::SceneNode;\n\nuse na::{Pnt3, Vec3};\n\nuse nphysics::object::RigidBody;\n\n\n\npub struct Plane {\n\n gfx: SceneNode,\n\n body: Rc<RefCell<RigidBody>>\n\n}\n\n\n\nimpl Plane {\n\n pub fn new(body: Rc<RefCell<RigidBody>>,\n\n world_pos: &Pnt3<f32>,\n\n world_normal: &Vec3<f32>,\n\n color: Pnt3<f32>,\n\n window: &mut window::Window) -> Plane {\n\n let mut res = Plane {\n", "file_path": "examples3/nphysics_testbed3d/src/objects/plane.rs", "rank": 98, "score": 71017.90574334418 }, { "content": " gfx: window.add_quad(100.0, 100.0, 10, 10),\n\n body: body\n\n };\n\n\n\n res.gfx.set_color(color.x, color.y, color.z);\n\n\n\n let up;\n\n\n\n if na::is_zero(&world_normal.z) && na::is_zero(&world_normal.y) {\n\n up = Vec3::z();\n\n }\n\n else {\n\n up = Vec3::x();\n\n }\n\n\n\n res.gfx.look_at_z(world_pos, &(*world_pos + *world_normal), &up);\n\n\n\n res.update();\n\n\n\n res\n", "file_path": "examples3/nphysics_testbed3d/src/objects/plane.rs", "rank": 99, "score": 71006.52843579637 } ]
Rust
src/content/content_encoding.rs
felippemr/http-types
f77c653d6703192430b4ba8fb016fe17ba8d457f
use crate::content::{Encoding, EncodingProposal}; use crate::headers::{HeaderName, HeaderValue, Headers, ToHeaderValues, CONTENT_ENCODING}; use std::fmt::{self, Debug}; use std::ops::{Deref, DerefMut}; use std::option; pub struct ContentEncoding { inner: Encoding, } impl ContentEncoding { pub fn new(encoding: Encoding) -> Self { Self { inner: encoding } } pub fn from_headers(headers: impl AsRef<Headers>) -> crate::Result<Option<Self>> { let headers = match headers.as_ref().get(CONTENT_ENCODING) { Some(headers) => headers, None => return Ok(None), }; let mut inner = None; for value in headers { if let Some(entry) = Encoding::from_str(value.as_str()) { inner = Some(entry); } } let inner = inner.expect("Headers instance with no entries found"); Ok(Some(Self { inner })) } pub fn apply(&self, mut headers: impl AsMut<Headers>) { headers.as_mut().insert(CONTENT_ENCODING, self.value()); } pub fn name(&self) -> HeaderName { CONTENT_ENCODING } pub fn value(&self) -> HeaderValue { self.inner.into() } pub fn encoding(&self) -> Encoding { self.inner } } impl ToHeaderValues for ContentEncoding { type Iter = option::IntoIter<HeaderValue>; fn to_header_values(&self) -> crate::Result<Self::Iter> { Ok(self.value().to_header_values().unwrap()) } } impl Deref for ContentEncoding { type Target = Encoding; fn deref(&self) -> &Self::Target { &self.inner } } impl DerefMut for ContentEncoding { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.inner } } impl PartialEq<Encoding> for ContentEncoding { fn eq(&self, other: &Encoding) -> bool { &self.inner == other } } impl PartialEq<&Encoding> for ContentEncoding { fn eq(&self, other: &&Encoding) -> bool { &&self.inner == other } } impl From<Encoding> for ContentEncoding { fn from(encoding: Encoding) -> Self { Self { inner: encoding } } } impl From<&Encoding> for ContentEncoding { fn from(encoding: &Encoding) -> Self { Self { inner: *encoding } } } impl From<EncodingProposal> for ContentEncoding { fn from(encoding: EncodingProposal) -> Self { Self { inner: encoding.encoding, } } } impl From<&EncodingProposal> for ContentEncoding { fn from(encoding: &EncodingProposal) -> Self { Self { inner: encoding.encoding, } } } impl Debug for ContentEncoding { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.inner.fmt(f) } }
use crate::content::{Encoding, EncodingProposal}; use crate::headers::{HeaderName, HeaderValue, Headers, ToHeaderValues, CONTENT_ENCODING}; use std::fmt::{self, Debug}; use std::ops::{Deref, DerefMut}; use std::option; pub struct ContentEncoding { inner: Encoding, } impl ContentEncoding { pub fn new(encoding: Encoding) -> Self { Self { inner: encoding } } pub fn from_headers(headers: impl AsRef<Headers>) -> crate::Result<Option<Self>> { let headers = match headers.as_ref().get(CONTENT_ENCODING) { Some(headers) => headers, None => return Ok(None), }; let mut inner = None; for value in headers { if let Some(entry) = Encoding::from_str(value.as_str()) { inner = Some(entry); } } let inn
TENT_ENCODING } pub fn value(&self) -> HeaderValue { self.inner.into() } pub fn encoding(&self) -> Encoding { self.inner } } impl ToHeaderValues for ContentEncoding { type Iter = option::IntoIter<HeaderValue>; fn to_header_values(&self) -> crate::Result<Self::Iter> { Ok(self.value().to_header_values().unwrap()) } } impl Deref for ContentEncoding { type Target = Encoding; fn deref(&self) -> &Self::Target { &self.inner } } impl DerefMut for ContentEncoding { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.inner } } impl PartialEq<Encoding> for ContentEncoding { fn eq(&self, other: &Encoding) -> bool { &self.inner == other } } impl PartialEq<&Encoding> for ContentEncoding { fn eq(&self, other: &&Encoding) -> bool { &&self.inner == other } } impl From<Encoding> for ContentEncoding { fn from(encoding: Encoding) -> Self { Self { inner: encoding } } } impl From<&Encoding> for ContentEncoding { fn from(encoding: &Encoding) -> Self { Self { inner: *encoding } } } impl From<EncodingProposal> for ContentEncoding { fn from(encoding: EncodingProposal) -> Self { Self { inner: encoding.encoding, } } } impl From<&EncodingProposal> for ContentEncoding { fn from(encoding: &EncodingProposal) -> Self { Self { inner: encoding.encoding, } } } impl Debug for ContentEncoding { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { self.inner.fmt(f) } }
er = inner.expect("Headers instance with no entries found"); Ok(Some(Self { inner })) } pub fn apply(&self, mut headers: impl AsMut<Headers>) { headers.as_mut().insert(CONTENT_ENCODING, self.value()); } pub fn name(&self) -> HeaderName { CON
random
[ { "content": "#[inline]\n\npub fn powered_by(mut headers: impl AsMut<Headers>, value: Option<HeaderValue>) {\n\n let name = HeaderName::from_lowercase_str(\"X-Powered-By\");\n\n match value {\n\n Some(value) => {\n\n headers.as_mut().insert(name, value);\n\n }\n\n None => {\n\n headers.as_mut().remove(name);\n\n }\n\n };\n\n}\n\n\n\n/// Sets the `Strict-Transport-Security` header to keep your users on `HTTPS`.\n\n///\n\n/// Note that the header won’t tell users on HTTP to switch to HTTPS, it will tell HTTPS users to\n\n/// stick around. Defaults to 60 days.\n\n///\n\n/// [read more](https://helmetjs.github.io/docs/hsts/)\n\n///\n\n// /// ## Examples\n\n// /// ```\n\n// /// use http_types::Response;\n\n// ///\n\n// /// let mut res = Response::new(StatusCode::Ok);\n\n// /// http_types::security::hsts(&mut headers);\n\n// /// assert_eq!(headers[\"Strict-Transport-Security\"], \"max-age=5184000\");\n\n// /// ```\n", "file_path": "src/security/mod.rs", "rank": 0, "score": 224425.76250188696 }, { "content": "/// Apply a set of default protections.\n\n///\n\n// /// ## Examples\n\n// /// ```\n\n// /// use http_types::Response;\n\n// ///\n\n// /// let mut res = Response::new(StatusCode::Ok);\n\n// /// http_types::security::default(&mut headers);\n\n// /// assert_eq!(headers[\"X-Content-Type-Options\"], \"nosniff\");\n\n// /// assert_eq!(headers[\"X-XSS-Protection\"], \"1; mode=block\");\n\n// /// ```\n\npub fn default(mut headers: impl AsMut<Headers>) {\n\n dns_prefetch_control(&mut headers);\n\n nosniff(&mut headers);\n\n frameguard(&mut headers, None);\n\n powered_by(&mut headers, None);\n\n hsts(&mut headers);\n\n xss_filter(&mut headers);\n\n}\n\n\n\n/// Disable browsers’ DNS prefetching by setting the `X-DNS-Prefetch-Control` header.\n\n///\n\n/// [read more](https://helmetjs.github.io/docs/dns-prefetch-control/)\n\n///\n\n// /// ## Examples\n\n// /// ```\n\n// /// use http_types::Response;\n\n// ///\n\n// /// let mut res = Response::new(StatusCode::Ok);\n\n// /// http_types::security::dns_prefetch_control(&mut headers);\n\n// /// assert_eq!(headers[\"X-DNS-Prefetch-Control\"], \"on\");\n\n// /// ```\n", "file_path": "src/security/mod.rs", "rank": 1, "score": 211198.3641530694 }, { "content": "#[inline]\n\npub fn nosniff(mut headers: impl AsMut<Headers>) {\n\n headers.as_mut().insert(\"X-Content-Type-Options\", \"nosniff\");\n\n}\n\n\n\n/// Sets the `X-XSS-Protection` header to prevent reflected XSS attacks.\n\n///\n\n/// [read more](https://helmetjs.github.io/docs/xss-filter/)\n\n///\n\n// /// ## Examples\n\n// /// ```\n\n// /// use http_types::Response;\n\n// ///\n\n// /// let mut res = Response::new(StatusCode::Ok);\n\n// /// http_types::security::xss_filter(&mut headers);\n\n// /// assert_eq!(headers[\"X-XSS-Protection\"], \"1; mode=block\");\n\n// /// ```\n", "file_path": "src/security/mod.rs", "rank": 2, "score": 211183.72796423663 }, { "content": "#[inline]\n\npub fn hsts(mut headers: impl AsMut<Headers>) {\n\n headers\n\n .as_mut()\n\n .insert(\"Strict-Transport-Security\", \"max-age=5184000\");\n\n}\n\n\n\n/// Prevent browsers from trying to guess (“sniff”) the MIME type, which can have security\n\n/// implications.\n\n///\n\n/// [read more](https://helmetjs.github.io/docs/dont-sniff-mimetype/)\n\n///\n\n// /// ## Examples\n\n// /// ```\n\n// /// use http_types::Response;\n\n// ///\n\n// /// let mut res = Response::new(StatusCode::Ok);\n\n// /// http_types::security::nosniff(&mut headers);\n\n// /// assert_eq!(headers[\"X-Content-Type-Options\"], \"nosniff\");\n\n// /// ```\n", "file_path": "src/security/mod.rs", "rank": 3, "score": 211183.72796423663 }, { "content": "#[inline]\n\npub fn xss_filter(mut headers: impl AsMut<Headers>) {\n\n headers.as_mut().insert(\"X-XSS-Protection\", \"1; mode=block\");\n\n}\n\n\n\n/// Set the Referrer-Policy level\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub enum ReferrerOptions {\n\n /// Set to \"no-referrer\"\n\n NoReferrer,\n\n /// Set to \"no-referrer-when-downgrade\" the default\n\n NoReferrerDowngrade,\n\n /// Set to \"same-origin\"\n\n SameOrigin,\n\n /// Set to \"origin\"\n\n Origin,\n\n /// Set to \"strict-origin\"\n\n StrictOrigin,\n\n /// Set to \"origin-when-cross-origin\"\n\n CrossOrigin,\n\n /// Set to \"strict-origin-when-cross-origin\"\n", "file_path": "src/security/mod.rs", "rank": 4, "score": 206074.46902002508 }, { "content": "#[inline]\n\npub fn dns_prefetch_control(mut headers: impl AsMut<Headers>) {\n\n headers.as_mut().insert(\"X-DNS-Prefetch-Control\", \"on\");\n\n}\n\n\n\n/// Set the frameguard level.\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub enum FrameOptions {\n\n /// Set to `sameorigin`\n\n SameOrigin,\n\n /// Set to `deny`\n\n Deny,\n\n}\n\n\n\n/// Mitigates clickjacking attacks by setting the `X-Frame-Options` header.\n\n///\n\n/// [read more](https://helmetjs.github.io/docs/frameguard/)\n\n///\n\n// /// ## Examples\n\n// /// ```\n\n// /// use http_types::Response;\n\n// ///\n\n// /// let mut res = Response::new(StatusCode::Ok);\n\n// /// http_types::security::frameguard(&mut headers, None);\n\n// /// assert_eq!(headers[\"X-Frame-Options\"], \"sameorigin\");\n\n// /// ```\n", "file_path": "src/security/mod.rs", "rank": 5, "score": 201289.54704012774 }, { "content": "#[inline]\n\npub fn frameguard(mut headers: impl AsMut<Headers>, guard: Option<FrameOptions>) {\n\n let kind = match guard {\n\n None | Some(FrameOptions::SameOrigin) => \"sameorigin\",\n\n Some(FrameOptions::Deny) => \"deny\",\n\n };\n\n headers.as_mut().insert(\"X-Frame-Options\", kind);\n\n}\n\n\n\n/// Removes the `X-Powered-By` header to make it slightly harder for attackers to see what\n\n/// potentially-vulnerable technology powers your site.\n\n///\n\n/// [read more](https://helmetjs.github.io/docs/hide-powered-by/)\n\n///\n\n// /// ## Examples\n\n// /// ```\n\n// /// use http_types::Response;\n\n// ///\n\n// /// let mut res = Response::new(StatusCode::Ok);\n\n// /// headers.as_mut().insert(\"X-Powered-By\", \"Tide/Rust\".parse());\n\n// /// http_types::security::hide_powered_by(&mut headers);\n\n// /// assert_eq!(headers.get(\"X-Powered-By\"), None);\n\n// /// ```\n", "file_path": "src/security/mod.rs", "rank": 6, "score": 182915.4718748378 }, { "content": "#[inline]\n\npub fn referrer_policy(mut headers: impl AsMut<Headers>, referrer: Option<ReferrerOptions>) {\n\n let policy = match referrer {\n\n None | Some(ReferrerOptions::NoReferrer) => \"no-referrer\",\n\n Some(ReferrerOptions::NoReferrerDowngrade) => \"no-referrer-when-downgrade\",\n\n Some(ReferrerOptions::SameOrigin) => \"same-origin\",\n\n Some(ReferrerOptions::Origin) => \"origin\",\n\n Some(ReferrerOptions::StrictOrigin) => \"strict-origin\",\n\n Some(ReferrerOptions::CrossOrigin) => \"origin-when-cross-origin\",\n\n Some(ReferrerOptions::StrictCrossOrigin) => \"strict-origin-when-cross-origin\",\n\n Some(ReferrerOptions::UnsafeUrl) => \"unsafe-url\",\n\n };\n\n\n\n // We MUST allow for multiple Referrer-Policy headers to be set.\n\n // See: https://w3c.github.io/webappsec-referrer-policy/#unknown-policy-values example #13\n\n headers.as_mut().append(\"Referrer-Policy\", policy);\n\n}\n", "file_path": "src/security/mod.rs", "rank": 7, "score": 178934.60621058955 }, { "content": "/// A trait for objects which can be converted or resolved to one or more `HeaderValue`s.\n\npub trait ToHeaderValues {\n\n /// Returned iterator over header values which this type may correspond to.\n\n type Iter: Iterator<Item = HeaderValue>;\n\n\n\n /// Converts this object to an iterator of resolved `HeaderValues`.\n\n fn to_header_values(&self) -> crate::Result<Self::Iter>;\n\n}\n\n\n\nimpl ToHeaderValues for HeaderValue {\n\n type Iter = option::IntoIter<HeaderValue>;\n\n\n\n fn to_header_values(&self) -> crate::Result<Self::Iter> {\n\n Ok(Some(self.clone()).into_iter())\n\n }\n\n}\n\n\n\nimpl<'a> ToHeaderValues for &'a HeaderValues {\n\n type Iter = iter::Cloned<Values<'a>>;\n\n\n\n fn to_header_values(&self) -> crate::Result<Self::Iter> {\n", "file_path": "src/headers/to_header_values.rs", "rank": 8, "score": 142683.399136928 }, { "content": "fn headers_to_hyperium_headers(headers: &mut Headers, hyperium_headers: &mut http::HeaderMap) {\n\n for (name, values) in headers {\n\n let name = format!(\"{}\", name).into_bytes();\n\n let name = http::header::HeaderName::from_bytes(&name).unwrap();\n\n\n\n for value in values.iter() {\n\n let value = format!(\"{}\", value).into_bytes();\n\n let value = http::header::HeaderValue::from_bytes(&value).unwrap();\n\n hyperium_headers.append(&name, value);\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/hyperium_http.rs", "rank": 9, "score": 137282.00371343637 }, { "content": "fn hyperium_headers_to_headers(hyperium_headers: http::HeaderMap, headers: &mut Headers) {\n\n for (name, value) in hyperium_headers {\n\n let value = value.as_bytes().to_owned();\n\n let value = unsafe { HeaderValue::from_bytes_unchecked(value) };\n\n if let Some(name) = name {\n\n let name = name.as_str().as_bytes().to_owned();\n\n let name = unsafe { HeaderName::from_bytes_unchecked(name) };\n\n headers.insert(name, value).unwrap();\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/hyperium_http.rs", "rank": 10, "score": 122961.49884438163 }, { "content": "struct EscapeMimeValue {\n\n state: EscapeMimeValueState,\n\n}\n\n\n", "file_path": "src/mime/parse.rs", "rank": 11, "score": 89996.02190714372 }, { "content": "use crate::headers::{HeaderValue, Values};\n\n\n\nuse std::fmt::{self, Debug, Display};\n\nuse std::iter::FromIterator;\n\nuse std::ops::{Deref, DerefMut, Index};\n\nuse std::slice::SliceIndex;\n\n\n\n/// A list of `HeaderValue`s.\n\n///\n\n/// This always contains at least one header value.\n\n#[derive(Clone)]\n\npub struct HeaderValues {\n\n pub(crate) inner: Vec<HeaderValue>,\n\n}\n\n\n\nimpl HeaderValues {\n\n /// Move all values from `other` into `self`, leaving `other` empty.\n\n pub fn append(&mut self, other: &mut Self) {\n\n self.inner.append(&mut other.inner)\n\n }\n", "file_path": "src/headers/header_values.rs", "rank": 12, "score": 81768.08776432044 }, { "content": " Ok(Self {\n\n inner: String::from(s),\n\n })\n\n }\n\n}\n\n\n\nimpl<'a> TryFrom<&'a str> for HeaderValue {\n\n type Error = Error;\n\n\n\n fn try_from(value: &'a str) -> Result<Self, Self::Error> {\n\n Self::from_str(value)\n\n }\n\n}\n\n\n\nimpl Debug for HeaderValue {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{:?}\", self.inner)\n\n }\n\n}\n\n\n", "file_path": "src/headers/header_value.rs", "rank": 13, "score": 81762.34385741808 }, { "content": "\n\n /// Returns a reference or a value depending on the type of index.\n\n pub fn get(&self, index: usize) -> Option<&HeaderValue> {\n\n self.inner.get(index)\n\n }\n\n\n\n /// Returns a mutable reference or a value depending on the type of index.\n\n pub fn get_mut(&mut self, index: usize) -> Option<&mut HeaderValue> {\n\n self.inner.get_mut(index)\n\n }\n\n\n\n /// Returns `true` if there is a value corresponding to the specified `HeaderValue` in the list,\n\n /// `false` otherwise.\n\n pub fn contains(&self, value: &HeaderValue) -> bool {\n\n self.inner.contains(value)\n\n }\n\n\n\n /// Returns the last `HeaderValue`.\n\n pub fn last(&self) -> &HeaderValue {\n\n self.inner\n", "file_path": "src/headers/header_values.rs", "rank": 14, "score": 81761.34552499725 }, { "content": " }\n\n\n\n /// Get the header value as a `&str`\n\n pub fn as_str(&self) -> &str {\n\n &self.inner\n\n }\n\n}\n\n\n\nimpl From<Mime> for HeaderValue {\n\n fn from(mime: Mime) -> Self {\n\n HeaderValue {\n\n inner: format!(\"{}\", mime),\n\n }\n\n }\n\n}\n\n\n\nimpl From<Cookie<'_>> for HeaderValue {\n\n fn from(cookie: Cookie<'_>) -> Self {\n\n HeaderValue {\n\n inner: cookie.to_string(),\n", "file_path": "src/headers/header_value.rs", "rank": 15, "score": 81759.91217232282 }, { "content": " &self.inner == other\n\n }\n\n}\n\n\n\nimpl<'a> PartialEq<&String> for HeaderValue {\n\n fn eq(&self, other: &&String) -> bool {\n\n &&self.inner == other\n\n }\n\n}\n\n\n\nimpl From<HeaderValues> for HeaderValue {\n\n fn from(mut other: HeaderValues) -> Self {\n\n other.inner.reverse();\n\n other\n\n .inner\n\n .pop()\n\n .expect(\"HeaderValues should contain at least one value\")\n\n }\n\n}\n\n\n", "file_path": "src/headers/header_value.rs", "rank": 16, "score": 81759.38211792886 }, { "content": "}\n\n\n\nimpl<'a> IntoIterator for &'a HeaderValues {\n\n type Item = &'a HeaderValue;\n\n type IntoIter = Values<'a>;\n\n\n\n #[inline]\n\n fn into_iter(self) -> Self::IntoIter {\n\n self.iter()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_debug_single() {\n\n let header_values = HeaderValues {\n\n inner: vec![\"foo0\".parse().unwrap()],\n", "file_path": "src/headers/header_values.rs", "rank": 17, "score": 81759.36754328592 }, { "content": "\n\nimpl Debug for HeaderValues {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n if self.inner.len() == 1 {\n\n write!(f, \"{:?}\", self.inner[0])\n\n } else {\n\n f.debug_list().entries(self.inner.iter()).finish()\n\n }\n\n }\n\n}\n\n\n\nimpl Display for HeaderValues {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n let mut list = f.debug_list();\n\n for v in &self.inner {\n\n list.entry(&v);\n\n }\n\n list.finish()\n\n }\n\n}\n", "file_path": "src/headers/header_values.rs", "rank": 18, "score": 81759.24779640499 }, { "content": " }\n\n}\n\n\n\nimpl AsMut<HeaderValue> for HeaderValues {\n\n fn as_mut(&mut self) -> &mut HeaderValue {\n\n &mut self.inner[0]\n\n }\n\n}\n\nimpl Deref for HeaderValues {\n\n type Target = HeaderValue;\n\n\n\n fn deref(&self) -> &HeaderValue {\n\n &self.inner[0]\n\n }\n\n}\n\n\n\nimpl DerefMut for HeaderValues {\n\n fn deref_mut(&mut self) -> &mut HeaderValue {\n\n &mut self.inner[0]\n\n }\n", "file_path": "src/headers/header_values.rs", "rank": 19, "score": 81758.5182819858 }, { "content": "use std::convert::TryFrom;\n\nuse std::fmt::{self, Debug, Display};\n\nuse std::str::FromStr;\n\n\n\nuse crate::headers::HeaderValues;\n\nuse crate::Error;\n\nuse crate::{Cookie, Mime};\n\n\n\n/// A header value.\n\n#[derive(Clone, Eq, PartialEq, Hash)]\n\npub struct HeaderValue {\n\n inner: String,\n\n}\n\n\n\nimpl HeaderValue {\n\n /// Create a new `HeaderValue` from a Vec of ASCII bytes.\n\n ///\n\n /// # Error\n\n ///\n\n /// This function will error if the bytes is not valid ASCII.\n", "file_path": "src/headers/header_value.rs", "rank": 20, "score": 81758.46948911036 }, { "content": " fn eq(&self, other: &String) -> bool {\n\n self.inner.len() == 1 && self.inner[0] == *other\n\n }\n\n}\n\n\n\nimpl<'a> PartialEq<&String> for HeaderValues {\n\n fn eq(&self, other: &&String) -> bool {\n\n self.inner.len() == 1 && self.inner[0] == **other\n\n }\n\n}\n\n\n\nimpl From<HeaderValue> for HeaderValues {\n\n fn from(other: HeaderValue) -> Self {\n\n Self { inner: vec![other] }\n\n }\n\n}\n\n\n\nimpl AsRef<HeaderValue> for HeaderValues {\n\n fn as_ref(&self) -> &HeaderValue {\n\n &self.inner[0]\n", "file_path": "src/headers/header_values.rs", "rank": 21, "score": 81757.71003270795 }, { "content": " .last()\n\n .expect(\"HeaderValues must always contain at least one value\")\n\n }\n\n\n\n /// An iterator visiting all header values in arbitrary order.\n\n pub fn iter(&self) -> Values<'_> {\n\n Values::new_values(&self)\n\n }\n\n\n\n // /// An iterator visiting all header values in arbitrary order, with mutable\n\n // /// references to the values.\n\n // pub fn iter_mut(&mut self) -> ValuesMut<'_> {\n\n // ValuesMut {\n\n // inner: self.headers.iter_mut(),\n\n // }\n\n // }\n\n}\n\n\n\nimpl<I: SliceIndex<[HeaderValue]>> Index<I> for HeaderValues {\n\n type Output = I::Output;\n", "file_path": "src/headers/header_values.rs", "rank": 22, "score": 81757.2083207916 }, { "content": "\n\n #[inline]\n\n fn index(&self, index: I) -> &Self::Output {\n\n Index::index(&self.inner, index)\n\n }\n\n}\n\n\n\nimpl FromIterator<HeaderValue> for HeaderValues {\n\n fn from_iter<I>(iter: I) -> HeaderValues\n\n where\n\n I: IntoIterator<Item = HeaderValue>,\n\n {\n\n let iter = iter.into_iter();\n\n let mut output = Vec::with_capacity(iter.size_hint().0);\n\n for v in iter {\n\n output.push(v);\n\n }\n\n HeaderValues { inner: output }\n\n }\n\n}\n", "file_path": "src/headers/header_values.rs", "rank": 23, "score": 81756.08297418698 }, { "content": "impl Display for HeaderValue {\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"{}\", self.inner)\n\n }\n\n}\n\n\n\nimpl PartialEq<str> for HeaderValue {\n\n fn eq(&self, other: &str) -> bool {\n\n self.inner == other\n\n }\n\n}\n\n\n\nimpl<'a> PartialEq<&'a str> for HeaderValue {\n\n fn eq(&self, other: &&'a str) -> bool {\n\n &self.inner == other\n\n }\n\n}\n\n\n\nimpl PartialEq<String> for HeaderValue {\n\n fn eq(&self, other: &String) -> bool {\n", "file_path": "src/headers/header_value.rs", "rank": 24, "score": 81754.93577429275 }, { "content": " }\n\n }\n\n}\n\n\n\nimpl From<&Mime> for HeaderValue {\n\n fn from(mime: &Mime) -> Self {\n\n HeaderValue {\n\n inner: format!(\"{}\", mime),\n\n }\n\n }\n\n}\n\n\n\nimpl FromStr for HeaderValue {\n\n type Err = Error;\n\n\n\n /// Create a new `HeaderValue`.\n\n ///\n\n /// This checks it's valid ASCII.\n\n fn from_str(s: &str) -> Result<Self, Self::Err> {\n\n crate::ensure!(s.is_ascii(), \"String slice should be valid ASCII\");\n", "file_path": "src/headers/header_value.rs", "rank": 25, "score": 81754.54464606568 }, { "content": "\n\nimpl PartialEq<str> for HeaderValues {\n\n fn eq(&self, other: &str) -> bool {\n\n self.inner.len() == 1 && self.inner[0] == other\n\n }\n\n}\n\n\n\nimpl<'a> PartialEq<&'a str> for HeaderValues {\n\n fn eq(&self, other: &&'a str) -> bool {\n\n self.inner.len() == 1 && &self.inner[0] == other\n\n }\n\n}\n\n\n\nimpl<'a> PartialEq<[&'a str]> for HeaderValues {\n\n fn eq(&self, other: &[&'a str]) -> bool {\n\n self.inner.iter().eq(other.iter())\n\n }\n\n}\n\n\n\nimpl PartialEq<String> for HeaderValues {\n", "file_path": "src/headers/header_values.rs", "rank": 26, "score": 81752.51244480355 }, { "content": " }\n\n}\n\n\n\nimpl ToHeaderValues for String {\n\n type Iter = option::IntoIter<HeaderValue>;\n\n\n\n fn to_header_values(&self) -> crate::Result<Self::Iter> {\n\n self.as_str().to_header_values()\n\n }\n\n}\n\n\n\nimpl ToHeaderValues for &String {\n\n type Iter = option::IntoIter<HeaderValue>;\n\n\n\n fn to_header_values(&self) -> crate::Result<Self::Iter> {\n\n self.as_str().to_header_values()\n\n }\n\n}\n\n\n\nimpl ToHeaderValues for Cow<'_, str> {\n\n type Iter = option::IntoIter<HeaderValue>;\n\n\n\n fn to_header_values(&self) -> crate::Result<Self::Iter> {\n\n self.as_ref().to_header_values()\n\n }\n\n}\n", "file_path": "src/headers/to_header_values.rs", "rank": 27, "score": 81752.37406690572 }, { "content": " Ok(self.iter().cloned())\n\n }\n\n}\n\n\n\nimpl<'a> ToHeaderValues for &'a [HeaderValue] {\n\n type Iter = iter::Cloned<slice::Iter<'a, HeaderValue>>;\n\n\n\n fn to_header_values(&self) -> crate::Result<Self::Iter> {\n\n Ok(self.iter().cloned())\n\n }\n\n}\n\n\n\nimpl<'a> ToHeaderValues for &'a str {\n\n type Iter = option::IntoIter<HeaderValue>;\n\n\n\n fn to_header_values(&self) -> crate::Result<Self::Iter> {\n\n let value = self\n\n .parse()\n\n .map_err(|err| io::Error::new(io::ErrorKind::Other, err))?;\n\n Ok(Some(value).into_iter())\n", "file_path": "src/headers/to_header_values.rs", "rank": 28, "score": 81750.75300109257 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_debug() {\n\n let header_value = HeaderValue::from_str(\"foo0\").unwrap();\n\n assert_eq!(format!(\"{:?}\", header_value), \"\\\"foo0\\\"\");\n\n }\n\n}\n", "file_path": "src/headers/header_value.rs", "rank": 29, "score": 81749.99329469341 }, { "content": " };\n\n assert_eq!(format!(\"{:?}\", header_values), \"\\\"foo0\\\"\");\n\n }\n\n #[test]\n\n fn test_debug_multiple() {\n\n let header_values = HeaderValues {\n\n inner: vec![\"foo0\".parse().unwrap(), \"foo1\".parse().unwrap()],\n\n };\n\n assert_eq!(format!(\"{:?}\", header_values), r#\"[\"foo0\", \"foo1\"]\"#);\n\n }\n\n}\n", "file_path": "src/headers/header_values.rs", "rank": 30, "score": 81749.85675579419 }, { "content": " pub fn from_bytes(bytes: Vec<u8>) -> Result<Self, Error> {\n\n crate::ensure!(bytes.is_ascii(), \"Bytes should be valid ASCII\");\n\n\n\n // This is permitted because ASCII is valid UTF-8, and we just checked that.\n\n let string = unsafe { String::from_utf8_unchecked(bytes) };\n\n Ok(Self { inner: string })\n\n }\n\n\n\n /// Converts a vector of bytes to a `HeaderValue` without checking that the string contains\n\n /// valid ASCII.\n\n ///\n\n /// # Safety\n\n ///\n\n /// This function is unsafe because it does not check that the bytes passed to it are valid\n\n /// ASCII. If this constraint is violated, it may cause memory\n\n /// unsafety issues with future users of the HeaderValue, as the rest of the library assumes\n\n /// that Strings are valid ASCII.\n\n pub unsafe fn from_bytes_unchecked(bytes: Vec<u8>) -> Self {\n\n let string = String::from_utf8_unchecked(bytes);\n\n Self { inner: string }\n", "file_path": "src/headers/header_value.rs", "rank": 31, "score": 81749.84517841584 }, { "content": "use std::borrow::Cow;\n\nuse std::io;\n\nuse std::iter;\n\nuse std::option;\n\nuse std::slice;\n\n\n\nuse crate::headers::{HeaderValue, HeaderValues, Values};\n\n\n\n/// A trait for objects which can be converted or resolved to one or more `HeaderValue`s.\n", "file_path": "src/headers/to_header_values.rs", "rank": 32, "score": 81748.17175758652 }, { "content": "#[test]\n\nfn empty_query_string_for_struct_with_no_required_fields() {\n\n let req = http_types::Request::new(Method::Get, Url::parse(\"http://example.com\").unwrap());\n\n\n\n let params = req.query::<OptionalParams>();\n\n assert!(params.is_ok());\n\n}\n", "file_path": "tests/querystring.rs", "rank": 33, "score": 80988.38603164129 }, { "content": "/// [HTTP quoted string collection](https://fetch.spec.whatwg.org/#collect-an-http-quoted-string)\n\n///\n\n/// Assumes that the first char is '\"'\n\nfn collect_http_quoted_string(mut input: &str) -> (String, &str) {\n\n // 2.\n\n let mut value = String::new();\n\n // 4.\n\n input = &input[1..];\n\n // 5.\n\n loop {\n\n // 1.\n\n let (add_value, new_input) =\n\n collect_code_point_sequence_slice(input, &['\"', '\\\\'] as &[char]);\n\n value.push_str(add_value);\n\n let mut chars = new_input.chars();\n\n // 3.\n\n if let Some(quote_or_backslash) = chars.next() {\n\n // 4.\n\n input = chars.as_str();\n\n //5.\n\n if quote_or_backslash == '\\\\' {\n\n if let Some(c) = chars.next() {\n\n // 2.\n", "file_path": "src/mime/parse.rs", "rank": 34, "score": 77762.91769191597 }, { "content": "use std::collections::hash_map;\n\nuse std::iter::Iterator;\n\n\n\nuse crate::headers::{HeaderName, HeaderValue, HeaderValues};\n\n\n\n/// Iterator over the header values.\n\n#[derive(Debug)]\n\npub struct Values<'a> {\n\n pub(super) inner: Option<hash_map::Values<'a, HeaderName, HeaderValues>>,\n\n slot: Option<&'a HeaderValues>,\n\n cursor: usize,\n\n}\n\n\n\nimpl<'a> Values<'a> {\n\n /// Constructor for `Headers`.\n\n pub(crate) fn new(inner: hash_map::Values<'a, HeaderName, HeaderValues>) -> Self {\n\n Self {\n\n inner: Some(inner),\n\n slot: None,\n\n cursor: 0,\n", "file_path": "src/headers/values.rs", "rank": 35, "score": 74887.33841266601 }, { "content": " }\n\n }\n\n\n\n /// Constructor for `HeaderValues`.\n\n pub(crate) fn new_values(values: &'a HeaderValues) -> Self {\n\n Self {\n\n inner: None,\n\n slot: Some(values),\n\n cursor: 0,\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> Iterator for Values<'a> {\n\n type Item = &'a HeaderValue;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n loop {\n\n // Check if we have a vec in the current slot, and if not set one.\n\n if self.slot.is_none() {\n", "file_path": "src/headers/values.rs", "rank": 36, "score": 74883.86761325852 }, { "content": " let next = match self.inner.as_mut() {\n\n Some(inner) => inner.next()?,\n\n None => return None,\n\n };\n\n self.cursor = 0;\n\n self.slot = Some(next);\n\n }\n\n\n\n // Get the next item\n\n match self.slot.unwrap().get(self.cursor) {\n\n // If an item is found, increment the cursor and return the item.\n\n Some(item) => {\n\n self.cursor += 1;\n\n return Some(item);\n\n }\n\n // If no item is found, unset the slot and loop again.\n\n None => {\n\n self.slot = None;\n\n continue;\n\n }\n", "file_path": "src/headers/values.rs", "rank": 37, "score": 74869.87506764068 }, { "content": " }\n\n }\n\n }\n\n\n\n #[inline]\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n\n match self.inner.as_ref() {\n\n Some(inner) => inner.size_hint(),\n\n None => (0, None),\n\n }\n\n }\n\n}\n", "file_path": "src/headers/values.rs", "rank": 38, "score": 74867.08233282155 }, { "content": "\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.inner.next()\n\n }\n\n\n\n #[inline]\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n\n self.inner.size_hint()\n\n }\n\n}\n\n\n\nimpl ToHeaderValues for IfNoneMatch {\n\n type Iter = option::IntoIter<HeaderValue>;\n\n fn to_header_values(&self) -> crate::Result<Self::Iter> {\n\n // A HeaderValue will always convert into itself.\n\n Ok(self.value().to_header_values().unwrap())\n\n }\n\n}\n\n\n\nimpl Debug for IfNoneMatch {\n", "file_path": "src/conditional/if_none_match.rs", "rank": 39, "score": 71555.4788758348 }, { "content": "\n\n /// Sets the `If-None-Match` header.\n\n pub fn apply(&self, mut headers: impl AsMut<Headers>) {\n\n headers.as_mut().insert(IF_NONE_MATCH, self.value());\n\n }\n\n\n\n /// Get the `HeaderName`.\n\n pub fn name(&self) -> HeaderName {\n\n IF_NONE_MATCH\n\n }\n\n\n\n /// Get the `HeaderValue`.\n\n pub fn value(&self) -> HeaderValue {\n\n let mut output = String::new();\n\n for (n, etag) in self.entries.iter().enumerate() {\n\n match n {\n\n 0 => write!(output, \"{}\", etag.to_string()).unwrap(),\n\n _ => write!(output, \", {}\", etag.to_string()).unwrap(),\n\n };\n\n }\n", "file_path": "src/conditional/if_none_match.rs", "rank": 40, "score": 71550.28138048633 }, { "content": " self.iter()\n\n }\n\n}\n\n\n\nimpl<'a> IntoIterator for &'a mut IfNoneMatch {\n\n type Item = &'a mut ETag;\n\n type IntoIter = IterMut<'a>;\n\n\n\n #[inline]\n\n fn into_iter(self) -> Self::IntoIter {\n\n self.iter_mut()\n\n }\n\n}\n\n\n\n/// A borrowing iterator over entries in `IfNoneMatch`.\n\n#[derive(Debug)]\n\npub struct IntoIter {\n\n inner: std::vec::IntoIter<ETag>,\n\n}\n\n\n", "file_path": "src/conditional/if_none_match.rs", "rank": 41, "score": 71549.36673065717 }, { "content": " type Item = &'a ETag;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.inner.next()\n\n }\n\n\n\n #[inline]\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n\n self.inner.size_hint()\n\n }\n\n}\n\n\n\n/// A mutable iterator over entries in `IfNoneMatch`.\n\n#[derive(Debug)]\n\npub struct IterMut<'a> {\n\n inner: slice::IterMut<'a, ETag>,\n\n}\n\n\n\nimpl<'a> Iterator for IterMut<'a> {\n\n type Item = &'a mut ETag;\n", "file_path": "src/conditional/if_none_match.rs", "rank": 42, "score": 71547.12577196064 }, { "content": "impl Iterator for IntoIter {\n\n type Item = ETag;\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.inner.next()\n\n }\n\n\n\n #[inline]\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n\n self.inner.size_hint()\n\n }\n\n}\n\n\n\n/// A lending iterator over entries in `IfNoneMatch`.\n\n#[derive(Debug)]\n\npub struct Iter<'a> {\n\n inner: slice::Iter<'a, ETag>,\n\n}\n\n\n\nimpl<'a> Iterator for Iter<'a> {\n", "file_path": "src/conditional/if_none_match.rs", "rank": 43, "score": 71546.99283891662 }, { "content": "/// assert_eq!(entries.next().unwrap(), &ETag::new(\"0xbeefcafe\".to_string()));\n\n/// #\n\n/// # Ok(()) }\n\n/// ```\n\npub struct IfNoneMatch {\n\n entries: Vec<ETag>,\n\n wildcard: bool,\n\n}\n\n\n\nimpl IfNoneMatch {\n\n /// Create a new instance of `IfNoneMatch`.\n\n pub fn new() -> Self {\n\n Self {\n\n entries: vec![],\n\n wildcard: false,\n\n }\n\n }\n\n\n\n /// Create a new instance from headers.\n\n pub fn from_headers(headers: impl AsRef<Headers>) -> crate::Result<Option<Self>> {\n", "file_path": "src/conditional/if_none_match.rs", "rank": 44, "score": 71545.78386407837 }, { "content": " let mut entries = vec![];\n\n let headers = match headers.as_ref().get(IF_NONE_MATCH) {\n\n Some(headers) => headers,\n\n None => return Ok(None),\n\n };\n\n\n\n let mut wildcard = false;\n\n for value in headers {\n\n for part in value.as_str().trim().split(',') {\n\n let part = part.trim();\n\n if part == \"*\" {\n\n wildcard = true;\n\n continue;\n\n }\n\n entries.push(ETag::from_str(part)?);\n\n }\n\n }\n\n\n\n Ok(Some(Self { entries, wildcard }))\n\n }\n", "file_path": "src/conditional/if_none_match.rs", "rank": 45, "score": 71544.21714123269 }, { "content": "//! Apply the HTTP method if the ETags do not match.\n\n//!\n\n//! This is used to update caches or to prevent uploading a new resource when\n\n//! one already exists.\n\n\n\nuse crate::conditional::ETag;\n\nuse crate::headers::{HeaderName, HeaderValue, Headers, ToHeaderValues, IF_NONE_MATCH};\n\n\n\nuse std::fmt::{self, Debug, Write};\n\nuse std::iter::Iterator;\n\nuse std::option;\n\nuse std::slice;\n\n\n\n/// Apply the HTTP method if the ETags do not match.\n\n///\n\n/// This is used to update caches or to prevent uploading a new resource when\n\n/// one already exists.\n\n///\n\n/// # Specifications\n\n///\n", "file_path": "src/conditional/if_none_match.rs", "rank": 46, "score": 71542.27686807804 }, { "content": "\n\n if self.wildcard {\n\n match output.len() {\n\n 0 => write!(output, \"*\").unwrap(),\n\n _ => write!(output, \", *\").unwrap(),\n\n };\n\n }\n\n\n\n // SAFETY: the internal string is validated to be ASCII.\n\n unsafe { HeaderValue::from_bytes_unchecked(output.into()) }\n\n }\n\n\n\n /// Push a directive into the list of entries.\n\n pub fn push(&mut self, directive: impl Into<ETag>) {\n\n self.entries.push(directive.into());\n\n }\n\n\n\n /// Returns `true` if a wildcard directive was set.\n\n pub fn wildcard(&self) -> bool {\n\n self.wildcard\n", "file_path": "src/conditional/if_none_match.rs", "rank": 47, "score": 71540.99970456489 }, { "content": "}\n\n\n\nimpl IntoIterator for IfNoneMatch {\n\n type Item = ETag;\n\n type IntoIter = IntoIter;\n\n\n\n #[inline]\n\n fn into_iter(self) -> Self::IntoIter {\n\n IntoIter {\n\n inner: self.entries.into_iter(),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> IntoIterator for &'a IfNoneMatch {\n\n type Item = &'a ETag;\n\n type IntoIter = Iter<'a>;\n\n\n\n #[inline]\n\n fn into_iter(self) -> Self::IntoIter {\n", "file_path": "src/conditional/if_none_match.rs", "rank": 48, "score": 71540.78938722613 }, { "content": "/// - [RFC 7232, section 3.2: If-None-Match](https://tools.ietf.org/html/rfc7232#section-3.2)\n\n///\n\n/// # Examples\n\n///\n\n/// ```\n\n/// # fn main() -> http_types::Result<()> {\n\n/// #\n\n/// use http_types::Response;\n\n/// use http_types::conditional::{IfNoneMatch, ETag};\n\n///\n\n/// let mut entries = IfNoneMatch::new();\n\n/// entries.push(ETag::new(\"0xcafebeef\".to_string()));\n\n/// entries.push(ETag::new(\"0xbeefcafe\".to_string()));\n\n///\n\n/// let mut res = Response::new(200);\n\n/// entries.apply(&mut res);\n\n///\n\n/// let entries = IfNoneMatch::from_headers(res)?.unwrap();\n\n/// let mut entries = entries.iter();\n\n/// assert_eq!(entries.next().unwrap(), &ETag::new(\"0xcafebeef\".to_string()));\n", "file_path": "src/conditional/if_none_match.rs", "rank": 49, "score": 71538.27837168625 }, { "content": " fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n let mut list = f.debug_list();\n\n for directive in &self.entries {\n\n list.entry(directive);\n\n }\n\n list.finish()\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use crate::conditional::{ETag, IfNoneMatch};\n\n use crate::Response;\n\n\n\n #[test]\n\n fn smoke() -> crate::Result<()> {\n\n let mut entries = IfNoneMatch::new();\n\n entries.push(ETag::new(\"0xcafebeef\".to_string()));\n\n entries.push(ETag::new(\"0xbeefcafe\".to_string()));\n\n\n", "file_path": "src/conditional/if_none_match.rs", "rank": 50, "score": 71537.9738226397 }, { "content": " let mut res = Response::new(200);\n\n entries.apply(&mut res);\n\n\n\n let entries = IfNoneMatch::from_headers(res)?.unwrap();\n\n let mut entries = entries.iter();\n\n assert_eq!(\n\n entries.next().unwrap(),\n\n &ETag::new(\"0xcafebeef\".to_string())\n\n );\n\n assert_eq!(\n\n entries.next().unwrap(),\n\n &ETag::new(\"0xbeefcafe\".to_string())\n\n );\n\n Ok(())\n\n }\n\n\n\n #[test]\n\n fn wildcard() -> crate::Result<()> {\n\n let mut entries = IfNoneMatch::new();\n\n entries.push(ETag::new(\"0xcafebeef\".to_string()));\n", "file_path": "src/conditional/if_none_match.rs", "rank": 51, "score": 71534.26559170915 }, { "content": " }\n\n\n\n /// Set the wildcard directive.\n\n pub fn set_wildcard(&mut self, wildcard: bool) {\n\n self.wildcard = wildcard\n\n }\n\n\n\n /// An iterator visiting all server entries.\n\n pub fn iter(&self) -> Iter<'_> {\n\n Iter {\n\n inner: self.entries.iter(),\n\n }\n\n }\n\n\n\n /// An iterator visiting all server entries.\n\n pub fn iter_mut(&mut self) -> IterMut<'_> {\n\n IterMut {\n\n inner: self.entries.iter_mut(),\n\n }\n\n }\n", "file_path": "src/conditional/if_none_match.rs", "rank": 52, "score": 71533.44803105126 }, { "content": " entries.set_wildcard(true);\n\n\n\n let mut res = Response::new(200);\n\n entries.apply(&mut res);\n\n\n\n let entries = IfNoneMatch::from_headers(res)?.unwrap();\n\n assert_eq!(entries.wildcard(), true);\n\n let mut entries = entries.iter();\n\n assert_eq!(\n\n entries.next().unwrap(),\n\n &ETag::new(\"0xcafebeef\".to_string())\n\n );\n\n Ok(())\n\n }\n\n}\n", "file_path": "src/conditional/if_none_match.rs", "rank": 53, "score": 71533.31480165986 }, { "content": "use std::collections::hash_map;\n\nuse std::iter::Iterator;\n\n\n\nuse crate::headers::{HeaderName, HeaderValues};\n\n\n\n/// Iterator over the headers.\n\n#[derive(Debug)]\n\npub struct IterMut<'a> {\n\n pub(super) inner: hash_map::IterMut<'a, HeaderName, HeaderValues>,\n\n}\n\n\n\nimpl<'a> Iterator for IterMut<'a> {\n\n type Item = (&'a HeaderName, &'a mut HeaderValues);\n\n\n\n fn next(&mut self) -> Option<Self::Item> {\n\n self.inner.next()\n\n }\n\n\n\n #[inline]\n\n fn size_hint(&self) -> (usize, Option<usize>) {\n\n self.inner.size_hint()\n\n }\n\n}\n", "file_path": "src/headers/iter_mut.rs", "rank": 54, "score": 71275.20931913934 }, { "content": "fn format_value(input: &str) -> Cow<'_, str> {\n\n match parse_token(input) {\n\n (_, \"\") => input.into(),\n\n _ => {\n\n let mut string = String::from(\"\\\"\");\n\n for ch in input.chars() {\n\n if let '\\\\' | '\"' = ch {\n\n string.push('\\\\');\n\n }\n\n string.push(ch);\n\n }\n\n string.push('\"');\n\n string.into()\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/proxies/forwarded.rs", "rank": 55, "score": 69453.3722948444 }, { "content": "fn parse_value(input: &str) -> (Option<Cow<'_, str>>, &str) {\n\n match parse_token(input) {\n\n (Some(token), rest) => (Some(Cow::Borrowed(token)), rest),\n\n (None, rest) => parse_quoted_string(rest),\n\n }\n\n}\n\n\n", "file_path": "src/proxies/forwarded.rs", "rank": 56, "score": 63437.72718161573 }, { "content": "#[derive(Deserialize)]\n\nstruct Params {\n\n msg: String,\n\n}\n\n\n", "file_path": "tests/querystring.rs", "rank": 57, "score": 62690.04906032752 }, { "content": "/// Trait to signal the requirements for an underlying connection type.\n\npub trait InnerConnection: AsyncRead + AsyncWrite + Send + Sync + Unpin {}\n\nimpl<T: AsyncRead + AsyncWrite + Send + Sync + Unpin> InnerConnection for T {}\n\n\n\nimpl AsyncRead for Connection {\n\n fn poll_read(\n\n mut self: Pin<&mut Self>,\n\n cx: &mut Context<'_>,\n\n buf: &mut [u8],\n\n ) -> Poll<io::Result<usize>> {\n\n Pin::new(&mut self.inner).poll_read(cx, buf)\n\n }\n\n}\n\n\n\nimpl AsyncWrite for Connection {\n\n fn poll_write(\n\n mut self: Pin<&mut Self>,\n\n cx: &mut Context<'_>,\n\n buf: &[u8],\n\n ) -> Poll<io::Result<usize>> {\n\n Pin::new(&mut self.inner).poll_write(cx, buf)\n", "file_path": "src/upgrade/connection.rs", "rank": 58, "score": 62657.38902952587 }, { "content": "fn main() {\n\n match compile_probe() {\n\n Some(status) if status.success() => println!(\"cargo:rustc-cfg=backtrace\"),\n\n _ => {}\n\n }\n\n}\n\n\n", "file_path": "build.rs", "rank": 59, "score": 60983.78671517417 }, { "content": "struct VersionVisitor;\n\n\n\nimpl<'de> Visitor<'de> for VersionVisitor {\n\n type Value = Version;\n\n\n\n fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {\n\n write!(formatter, \"a HTTP version as &str\")\n\n }\n\n\n\n fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>\n\n where\n\n E: Error,\n\n {\n\n match v {\n\n \"HTTP/0.9\" => Ok(Version::Http0_9),\n\n \"HTTP/1.0\" => Ok(Version::Http1_0),\n\n \"HTTP/1.1\" => Ok(Version::Http1_1),\n\n \"HTTP/2\" => Ok(Version::Http2_0),\n\n \"HTTP/3\" => Ok(Version::Http3_0),\n\n _ => Err(Error::invalid_value(serde::de::Unexpected::Str(v), &self)),\n", "file_path": "src/version.rs", "rank": 60, "score": 60801.488476560466 }, { "content": "struct MethodVisitor;\n\n\n\nimpl<'de> Visitor<'de> for MethodVisitor {\n\n type Value = Method;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n write!(formatter, \"a HTTP method &str\")\n\n }\n\n\n\n fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>\n\n where\n\n E: DeError,\n\n {\n\n match Method::from_str(v) {\n\n Ok(method) => Ok(method),\n\n Err(_) => Err(DeError::invalid_value(Unexpected::Str(v), &self)),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/method.rs", "rank": 61, "score": 60801.488476560466 }, { "content": "#[derive(Deserialize)]\n\nstruct OptionalParams {\n\n _msg: Option<String>,\n\n _time: Option<u64>,\n\n}\n\n\n", "file_path": "tests/querystring.rs", "rank": 62, "score": 60801.488476560466 }, { "content": "#[test]\n\nfn ensure() {\n\n fn inner() -> http_types::Result<()> {\n\n ensure!(true, \"Oh yes\");\n\n bail!(\"Oh no!\");\n\n }\n\n let res = inner();\n\n assert!(res.is_err());\n\n let err = res.unwrap_err();\n\n assert_eq!(err.status(), StatusCode::InternalServerError);\n\n}\n\n\n", "file_path": "tests/error.rs", "rank": 63, "score": 58903.53597672182 }, { "content": "fn match_ignore_case<'a>(start: &'static str, input: &'a str) -> (bool, &'a str) {\n\n let len = start.len();\n\n if input[..len].eq_ignore_ascii_case(start) {\n\n (true, &input[len..])\n\n } else {\n\n (false, input)\n\n }\n\n}\n\n\n", "file_path": "src/proxies/forwarded.rs", "rank": 64, "score": 57904.128153243524 }, { "content": " pub trait Sealed {}\n\n\n\n impl<T, E> Sealed for Result<T, E> {}\n\n impl<T> Sealed for Option<T> {}\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::Status;\n\n\n\n #[test]\n\n fn construct_shorthand_with_valid_status_code() {\n\n let _res = Some(()).status(200).unwrap();\n\n }\n\n\n\n #[test]\n\n #[should_panic(expected = \"Could not convert into a valid `StatusCode`\")]\n\n fn construct_shorthand_with_invalid_status_code() {\n\n let res: Result<(), std::io::Error> =\n\n Err(std::io::Error::new(std::io::ErrorKind::Other, \"oh no!\"));\n\n let _res = res.status(600).unwrap();\n\n }\n\n}\n", "file_path": "src/status.rs", "rank": 65, "score": 57360.67786783991 }, { "content": "#[test]\n\nfn test() {\n\n let mime = parse(\"text/html\").unwrap();\n\n assert_eq!(mime.basetype(), \"text\");\n\n assert_eq!(mime.subtype(), \"html\");\n\n\n\n // technically invalid mime, but allow anyway\n\n let mime = parse(\"text/html;\").unwrap();\n\n assert_eq!(mime.basetype(), \"text\");\n\n assert_eq!(mime.subtype(), \"html\");\n\n\n\n let mime = parse(\"text/html; charset=utf-8\").unwrap();\n\n assert_eq!(mime.basetype(), \"text\");\n\n assert_eq!(mime.subtype(), \"html\");\n\n assert_eq!(mime.param(\"charset\").unwrap(), \"utf-8\");\n\n\n\n let mime = parse(\"text/html; charset=utf-8;\").unwrap();\n\n assert_eq!(mime.basetype(), \"text\");\n\n assert_eq!(mime.subtype(), \"html\");\n\n assert_eq!(mime.param(\"charset\").unwrap(), \"utf-8\");\n\n\n\n assert!(parse(\"text\").is_err());\n\n assert!(parse(\"text/\").is_err());\n\n assert!(parse(\"t/\").is_err());\n\n assert!(parse(\"t/h\").is_ok());\n\n}\n\n\n\n/// Web Platform tests for MIME type parsing\n\n/// From https://github.com/web-platform-tests/wpt/blob/master/mimesniff/mime-types/resources/mime-types.json\n", "file_path": "src/mime/parse.rs", "rank": 66, "score": 57031.409963440674 }, { "content": "#[test]\n\nfn can_be_boxed() {\n\n fn can_be_boxed() -> Result<(), Box<dyn std::error::Error + Send + Sync + 'static>> {\n\n let err = io::Error::new(io::ErrorKind::Other, \"Oh no\");\n\n Err(Error::new(StatusCode::NotFound, err).into())\n\n }\n\n assert!(can_be_boxed().is_err());\n\n}\n\n\n", "file_path": "tests/error.rs", "rank": 67, "score": 57031.409963440674 }, { "content": "#[test]\n\nfn security_test() {\n\n let mut policy = security::ContentSecurityPolicy::new();\n\n policy\n\n .default_src(security::Source::SameOrigin)\n\n .default_src(\"areweasyncyet.rs\")\n\n .script_src(security::Source::SameOrigin)\n\n .script_src(security::Source::UnsafeInline)\n\n .object_src(security::Source::None)\n\n .base_uri(security::Source::None)\n\n .upgrade_insecure_requests();\n\n\n\n let mut res = Response::new(StatusCode::Ok);\n\n res.set_body(\"Hello, Chashu!\");\n\n\n\n security::default(&mut res);\n\n policy.apply(&mut res);\n\n\n\n assert_eq!(res[\"content-security-policy\"], \"base-uri 'none'; default-src 'self' areweasyncyet.rs; object-src 'none'; script-src 'self' 'unsafe-inline'; upgrade-insecure-requests\");\n\n}\n", "file_path": "tests/security.rs", "rank": 68, "score": 57031.409963440674 }, { "content": "#[test]\n\nfn ensure_eq() {\n\n fn inner() -> http_types::Result<()> {\n\n ensure_eq!(1, 1, \"Oh yes\");\n\n bail!(\"Oh no!\");\n\n }\n\n let res = inner();\n\n assert!(res.is_err());\n\n let err = res.unwrap_err();\n\n assert_eq!(err.status(), StatusCode::InternalServerError);\n\n}\n\n\n", "file_path": "tests/error.rs", "rank": 69, "score": 57031.409963440674 }, { "content": "#[test]\n\nfn option_ext() {\n\n use http_types::Status;\n\n fn run() -> http_types::Result<()> {\n\n None.status(StatusCode::NotFound)\n\n }\n\n let res = run();\n\n assert!(res.is_err());\n\n\n\n let err = res.unwrap_err();\n\n assert_eq!(err.status(), StatusCode::NotFound);\n\n}\n\n\n", "file_path": "tests/error.rs", "rank": 70, "score": 57031.409963440674 }, { "content": "#[test]\n\nfn result_ext() {\n\n use http_types::Status;\n\n fn run() -> http_types::Result<()> {\n\n let err = io::Error::new(io::ErrorKind::Other, \"Oh no\");\n\n Err(err).status(StatusCode::NotFound)?;\n\n Ok(())\n\n }\n\n let res = run();\n\n assert!(res.is_err());\n\n\n\n let err = res.unwrap_err();\n\n assert_eq!(err.status(), StatusCode::NotFound);\n\n}\n\n\n", "file_path": "tests/error.rs", "rank": 71, "score": 57031.409963440674 }, { "content": "#[test]\n\nfn malformatted_query() {\n\n let req = http_types::Request::new(\n\n Method::Get,\n\n Url::parse(\"http://example.com/?error=should_fail\").unwrap(),\n\n );\n\n\n\n let params = req.query::<Params>();\n\n assert!(params.is_err());\n\n assert_eq!(\n\n params.err().unwrap().to_string(),\n\n \"failed with reason: missing field `msg`\"\n\n );\n\n}\n\n\n", "file_path": "tests/querystring.rs", "rank": 72, "score": 57031.409963440674 }, { "content": "struct StatusCodeU16Visitor;\n\n\n\nimpl<'de> Visitor<'de> for StatusCodeU16Visitor {\n\n type Value = StatusCode;\n\n\n\n fn expecting(&self, formatter: &mut fmt::Formatter) -> fmt::Result {\n\n write!(formatter, \"a u16 representing the status code\")\n\n }\n\n\n\n fn visit_i16<E>(self, v: i16) -> Result<Self::Value, E>\n\n where\n\n E: DeError,\n\n {\n\n self.visit_u16(v as u16)\n\n }\n\n\n\n fn visit_i32<E>(self, v: i32) -> Result<Self::Value, E>\n\n where\n\n E: DeError,\n\n {\n", "file_path": "src/status_code.rs", "rank": 73, "score": 56121.553100341 }, { "content": "#[derive(Default)]\n\nstruct IdHasher(u64);\n\n\n\nimpl Hasher for IdHasher {\n\n fn write(&mut self, _: &[u8]) {\n\n unreachable!(\"TypeId calls write_u64\");\n\n }\n\n\n\n #[inline]\n\n fn write_u64(&mut self, id: u64) {\n\n self.0 = id;\n\n }\n\n\n\n #[inline]\n\n fn finish(&self) -> u64 {\n\n self.0\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "src/extensions.rs", "rank": 74, "score": 55936.21334425972 }, { "content": "#[test]\n\nfn unsuccessfully_deserialize_query() {\n\n let req = http_types::Request::new(Method::Get, Url::parse(\"http://example.com/\").unwrap());\n\n\n\n let params = req.query::<Params>();\n\n assert!(params.is_err());\n\n assert_eq!(\n\n params.err().unwrap().to_string(),\n\n \"failed with reason: missing field `msg`\"\n\n );\n\n}\n\n\n", "file_path": "tests/querystring.rs", "rank": 75, "score": 55337.66300754594 }, { "content": "#[test]\n\nfn successfully_deserialize_query() {\n\n let req = http_types::Request::new(\n\n Method::Get,\n\n Url::parse(\"http://example.com/?msg=Hello\").unwrap(),\n\n );\n\n\n\n let params = req.query::<Params>();\n\n assert!(params.is_ok());\n\n assert_eq!(params.unwrap().msg, \"Hello\");\n\n}\n\n\n", "file_path": "tests/querystring.rs", "rank": 76, "score": 55337.66300754594 }, { "content": "#[test]\n\nfn whatwag_tests() {\n\n fn assert_parse(input: &str, expected: &str) {\n\n let actual = parse(input).unwrap();\n\n assert_eq!(actual.to_string(), expected);\n\n }\n\n\n\n fn assert_fails(input: &str) {\n\n assert!(parse(input).is_err());\n\n }\n\n\n\n fn assert_parse_and_encoding(\n\n input: &str,\n\n expected: &str,\n\n _encoding: impl Into<Option<&'static str>>,\n\n ) {\n\n //TODO: check encoding\n\n assert_parse(input, expected);\n\n }\n\n\n\n // Basics\n", "file_path": "src/mime/parse.rs", "rank": 77, "score": 55337.66300754594 }, { "content": "#[test]\n\nfn internal_server_error_by_default() {\n\n fn run() -> http_types::Result<()> {\n\n Err(io::Error::new(io::ErrorKind::Other, \"Oh no\").into())\n\n }\n\n let err = run().unwrap_err();\n\n assert_eq!(err.status(), 500);\n\n}\n\n\n", "file_path": "tests/error.rs", "rank": 78, "score": 53797.96009962703 }, { "content": "#[test]\n\nfn normal_error_into_http_types_error() {\n\n let http_types_error: Error =\n\n std::io::Error::new(std::io::ErrorKind::Other, \"irrelevant\").into();\n\n assert_eq!(http_types_error.status(), StatusCode::InternalServerError);\n\n\n\n let http_types_error = Error::new(\n\n StatusCode::ImATeapot,\n\n std::io::Error::new(std::io::ErrorKind::Other, \"irrelevant\"),\n\n );\n\n assert_eq!(http_types_error.status(), StatusCode::ImATeapot);\n\n}\n\n\n", "file_path": "tests/error.rs", "rank": 79, "score": 52392.20016863542 }, { "content": "#[test]\n\nfn anyhow_error_into_http_types_error() {\n\n let anyhow_error =\n\n anyhow::Error::new(std::io::Error::new(std::io::ErrorKind::Other, \"irrelevant\"));\n\n let http_types_error: Error = anyhow_error.into();\n\n assert_eq!(http_types_error.status(), StatusCode::InternalServerError);\n\n\n\n let anyhow_error =\n\n anyhow::Error::new(std::io::Error::new(std::io::ErrorKind::Other, \"irrelevant\"));\n\n let http_types_error: Error = Error::new(StatusCode::ImATeapot, anyhow_error);\n\n assert_eq!(http_types_error.status(), StatusCode::ImATeapot);\n\n}\n\n\n", "file_path": "tests/error.rs", "rank": 80, "score": 52392.20016863542 }, { "content": "#[test]\n\nfn u16_into_status_code_in_http_types_error() {\n\n let http_types_error = Error::new(404, io::Error::new(io::ErrorKind::Other, \"Not Found\"));\n\n let http_types_error2 = Error::new(\n\n StatusCode::NotFound,\n\n io::Error::new(io::ErrorKind::Other, \"Not Found\"),\n\n );\n\n assert_eq!(http_types_error.status(), http_types_error2.status());\n\n\n\n let http_types_error = Error::from_str(404, \"Not Found\");\n\n assert_eq!(http_types_error.status(), StatusCode::NotFound);\n\n}\n\n\n", "file_path": "tests/error.rs", "rank": 81, "score": 51103.63365930163 }, { "content": "#[test]\n\nfn test_req_res_set_body() {\n\n let mut req = Request::new(Method::Get, Url::parse(\"http://example.com/\").unwrap());\n\n req.set_body(Body::empty());\n\n let mut res = Response::new(StatusCode::Ok);\n\n res.set_body(req);\n\n let body = future::block_on(async move {\n\n let mut body = Vec::new();\n\n res.read_to_end(&mut body).await.unwrap();\n\n body\n\n });\n\n assert!(body.is_empty());\n\n}\n\n\n", "file_path": "tests/req_res_body.rs", "rank": 82, "score": 49918.191972125096 }, { "content": "fn compile_probe() -> Option<ExitStatus> {\n\n let rustc = env::var_os(\"RUSTC\")?;\n\n let out_dir = env::var_os(\"OUT_DIR\")?;\n\n let probefile = Path::new(&out_dir).join(\"probe.rs\");\n\n fs::write(&probefile, PROBE).ok()?;\n\n Command::new(rustc)\n\n .stderr(Stdio::null())\n\n .arg(\"--edition=2018\")\n\n .arg(\"--crate-name=http_types_build\")\n\n .arg(\"--crate-type=lib\")\n\n .arg(\"--emit=metadata\")\n\n .arg(\"--out-dir\")\n\n .arg(out_dir)\n\n .arg(probefile)\n\n .status()\n\n .ok()\n\n}\n", "file_path": "build.rs", "rank": 83, "score": 48971.11308673455 }, { "content": "#[test]\n\nfn test_req_res_take_replace_body() {\n\n let mut req = Request::new(Method::Get, Url::parse(\"http://example.com/\").unwrap());\n\n req.take_body();\n\n let mut res = Response::new(StatusCode::Ok);\n\n res.replace_body(req);\n\n let body = future::block_on(async move {\n\n let mut body = Vec::new();\n\n res.read_to_end(&mut body).await.unwrap();\n\n body\n\n });\n\n assert!(body.is_empty());\n\n}\n", "file_path": "tests/req_res_body.rs", "rank": 84, "score": 48823.971704996904 }, { "content": "#[test]\n\n#[should_panic]\n\nfn fail_test_u16_into_status_code_in_http_types_error_from_str() {\n\n let _http_types_error = Error::from_str(1000, \"Incorrect status code\");\n\n}\n", "file_path": "tests/error.rs", "rank": 85, "score": 47810.833548459676 }, { "content": "#[test]\n\n#[should_panic]\n\nfn fail_test_u16_into_status_code_in_http_types_error_new() {\n\n let _http_types_error = Error::new(\n\n 1000,\n\n io::Error::new(io::ErrorKind::Other, \"Incorrect status code\"),\n\n );\n\n}\n\n\n", "file_path": "tests/error.rs", "rank": 86, "score": 47810.833548459676 }, { "content": " /// header if there aren't any. Or else append to the existing list of headers.\n\n pub fn append(&mut self, name: impl Into<HeaderName>, values: impl ToHeaderValues) {\n\n let name = name.into();\n\n match self.get_mut(&name) {\n\n Some(headers) => {\n\n let mut values: HeaderValues = values.to_header_values().unwrap().collect();\n\n headers.append(&mut values);\n\n }\n\n None => {\n\n self.insert(name, values);\n\n }\n\n }\n\n }\n\n\n\n /// Get a reference to a header.\n\n pub fn get(&self, name: impl Into<HeaderName>) -> Option<&HeaderValues> {\n\n self.headers.get(&name.into())\n\n }\n\n\n\n /// Get a mutable reference to a header.\n", "file_path": "src/headers/headers.rs", "rank": 87, "score": 47604.45555363076 }, { "content": " type Item = (&'a HeaderName, &'a HeaderValues);\n\n type IntoIter = Iter<'a>;\n\n\n\n #[inline]\n\n fn into_iter(self) -> Self::IntoIter {\n\n self.iter()\n\n }\n\n}\n\n\n\nimpl<'a> IntoIterator for &'a mut Headers {\n\n type Item = (&'a HeaderName, &'a mut HeaderValues);\n\n type IntoIter = IterMut<'a>;\n\n\n\n #[inline]\n\n fn into_iter(self) -> Self::IntoIter {\n\n self.iter_mut()\n\n }\n\n}\n\n\n\nimpl Debug for Headers {\n", "file_path": "src/headers/headers.rs", "rank": 88, "score": 47602.2525721483 }, { "content": " pub fn get_mut(&mut self, name: impl Into<HeaderName>) -> Option<&mut HeaderValues> {\n\n self.headers.get_mut(&name.into())\n\n }\n\n\n\n /// Remove a header.\n\n pub fn remove(&mut self, name: impl Into<HeaderName>) -> Option<HeaderValues> {\n\n self.headers.remove(&name.into())\n\n }\n\n\n\n /// An iterator visiting all header pairs in arbitrary order.\n\n pub fn iter(&self) -> Iter<'_> {\n\n Iter {\n\n inner: self.headers.iter(),\n\n }\n\n }\n\n\n\n /// An iterator visiting all header pairs in arbitrary order, with mutable references to the\n\n /// values.\n\n pub fn iter_mut(&mut self) -> IterMut<'_> {\n\n IterMut {\n", "file_path": "src/headers/headers.rs", "rank": 89, "score": 47601.50135806004 }, { "content": " inner: self.headers.iter_mut(),\n\n }\n\n }\n\n\n\n /// An iterator visiting all header names in arbitrary order.\n\n pub fn names(&self) -> Names<'_> {\n\n Names {\n\n inner: self.headers.keys(),\n\n }\n\n }\n\n\n\n /// An iterator visiting all header values in arbitrary order.\n\n pub fn values(&self) -> Values<'_> {\n\n Values::new(self.headers.values())\n\n }\n\n}\n\n\n\nimpl Index<HeaderName> for Headers {\n\n type Output = HeaderValues;\n\n\n", "file_path": "src/headers/headers.rs", "rank": 90, "score": 47601.276770233715 }, { "content": "/// # Examples\n\n///\n\n/// ```\n\n/// use http_types::{Response, StatusCode};\n\n///\n\n/// let mut res = Response::new(StatusCode::Ok);\n\n/// res.insert_header(\"hello\", \"foo0\");\n\n/// assert_eq!(res[\"hello\"], \"foo0\");\n\n/// ```\n\n#[derive(Clone)]\n\npub struct Headers {\n\n pub(crate) headers: HashMap<HeaderName, HeaderValues>,\n\n}\n\n\n\nimpl Headers {\n\n /// Create a new instance.\n\n pub(crate) fn new() -> Self {\n\n Self {\n\n headers: HashMap::new(),\n\n }\n", "file_path": "src/headers/headers.rs", "rank": 91, "score": 47600.34033940719 }, { "content": " }\n\n\n\n /// Insert a header into the headers.\n\n ///\n\n /// Not that this will replace all header values for a given header name.\n\n /// If you wish to add header values for a header name that already exists\n\n /// use `Headers::append`\n\n pub fn insert(\n\n &mut self,\n\n name: impl Into<HeaderName>,\n\n values: impl ToHeaderValues,\n\n ) -> Option<HeaderValues> {\n\n let name = name.into();\n\n let values: HeaderValues = values.to_header_values().unwrap().collect();\n\n self.headers.insert(name, values)\n\n }\n\n\n\n /// Append a header to the headers.\n\n ///\n\n /// Unlike `insert` this function will not override the contents of a header, but insert a\n", "file_path": "src/headers/headers.rs", "rank": 92, "score": 47598.32637166153 }, { "content": " fn index(&self, name: &str) -> &HeaderValues {\n\n let name = HeaderName::from_str(name).expect(\"string slice needs to be valid ASCII\");\n\n self.get(name).expect(\"no entry found for name\")\n\n }\n\n}\n\n\n\nimpl IntoIterator for Headers {\n\n type Item = (HeaderName, HeaderValues);\n\n type IntoIter = IntoIter;\n\n\n\n /// Returns a iterator of references over the remaining items.\n\n #[inline]\n\n fn into_iter(self) -> Self::IntoIter {\n\n IntoIter {\n\n inner: self.headers.into_iter(),\n\n }\n\n }\n\n}\n\n\n\nimpl<'a> IntoIterator for &'a Headers {\n", "file_path": "src/headers/headers.rs", "rank": 93, "score": 47597.96059846812 }, { "content": " fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n f.debug_map().entries(self.headers.iter()).finish()\n\n }\n\n}\n\n\n\nimpl AsRef<Headers> for Headers {\n\n fn as_ref(&self) -> &Headers {\n\n self\n\n }\n\n}\n\n\n\nimpl AsMut<Headers> for Headers {\n\n fn as_mut(&mut self) -> &mut Headers {\n\n self\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "src/headers/headers.rs", "rank": 94, "score": 47597.45321203627 }, { "content": "//! HTTP headers.\n\n\n\nuse std::collections::HashMap;\n\nuse std::convert::Into;\n\nuse std::fmt::{self, Debug};\n\nuse std::iter::IntoIterator;\n\nuse std::ops::Index;\n\nuse std::str::FromStr;\n\n\n\nuse crate::headers::{\n\n HeaderName, HeaderValues, IntoIter, Iter, IterMut, Names, ToHeaderValues, Values,\n\n};\n\n\n\n/// A collection of HTTP Headers.\n\n///\n\n/// Headers are never manually constructed, but are part of `Request`,\n\n/// `Response`, and `Trailers`. Each of these types implements `AsRef<Headers>`\n\n/// and `AsMut<Headers>` so functions that want to modify headers can be generic\n\n/// over either of these traits.\n\n///\n", "file_path": "src/headers/headers.rs", "rank": 95, "score": 47596.597887539356 }, { "content": " /// Returns a reference to the value corresponding to the supplied name.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics if the name is not present in `Headers`.\n\n #[inline]\n\n fn index(&self, name: HeaderName) -> &HeaderValues {\n\n self.get(name).expect(\"no entry found for name\")\n\n }\n\n}\n\n\n\nimpl Index<&str> for Headers {\n\n type Output = HeaderValues;\n\n\n\n /// Returns a reference to the value corresponding to the supplied name.\n\n ///\n\n /// # Panics\n\n ///\n\n /// Panics if the name is not present in `Headers`.\n\n #[inline]\n", "file_path": "src/headers/headers.rs", "rank": 96, "score": 47593.38822314193 }, { "content": "\n\n #[test]\n\n fn index_into_headers() {\n\n let mut headers = Headers::new();\n\n headers.insert(\"hello\", \"foo0\");\n\n assert_eq!(headers[\"hello\"], \"foo0\");\n\n assert_eq!(headers.get(\"hello\").unwrap(), \"foo0\");\n\n }\n\n\n\n #[test]\n\n fn test_debug_single() {\n\n let mut headers = Headers::new();\n\n headers.insert(\"single\", \"foo0\");\n\n assert_eq!(format!(\"{:?}\", headers), r#\"{\"single\": \"foo0\"}\"#);\n\n }\n\n\n\n #[test]\n\n fn test_debug_multiple() {\n\n let mut headers = Headers::new();\n\n headers.append(\"multi\", \"foo0\");\n\n headers.append(\"multi\", \"foo1\");\n\n assert_eq!(format!(\"{:?}\", headers), r#\"{\"multi\": [\"foo0\", \"foo1\"]}\"#);\n\n }\n\n}\n", "file_path": "src/headers/headers.rs", "rank": 97, "score": 47589.274632675886 }, { "content": " use std::str::FromStr;\n\n\n\n const STATIC_HEADER: HeaderName = HeaderName::from_lowercase_str(\"hello\");\n\n\n\n #[test]\n\n fn test_header_name_static_non_static() -> crate::Result<()> {\n\n let static_header = HeaderName::from_lowercase_str(\"hello\");\n\n let non_static_header = HeaderName::from_str(\"hello\")?;\n\n\n\n let mut headers = Headers::new();\n\n headers.append(STATIC_HEADER, \"foo0\");\n\n headers.append(static_header.clone(), \"foo1\");\n\n headers.append(non_static_header.clone(), \"foo2\");\n\n\n\n assert_eq!(headers[STATIC_HEADER], [\"foo0\", \"foo1\", \"foo2\",][..]);\n\n assert_eq!(headers[static_header], [\"foo0\", \"foo1\", \"foo2\",][..]);\n\n assert_eq!(headers[non_static_header], [\"foo0\", \"foo1\", \"foo2\",][..]);\n\n\n\n Ok(())\n\n }\n", "file_path": "src/headers/headers.rs", "rank": 98, "score": 47585.96650551681 }, { "content": "/// https://tools.ietf.org/html/rfc7230#section-3.2.6\n\nfn tchar(c: char) -> bool {\n\n matches!(\n\n c, 'a'..='z'\n\n | 'A'..='Z'\n\n | '0'..='9'\n\n | '!'\n\n | '#'\n\n | '$'\n\n | '%'\n\n | '&'\n\n | '\\''\n\n | '*'\n\n | '+'\n\n | '-'\n\n | '.'\n\n | '^'\n\n | '_'\n\n | '`'\n\n | '|'\n\n | '~'\n\n )\n\n}\n\n\n", "file_path": "src/parse_utils.rs", "rank": 99, "score": 47289.0461314275 } ]
Rust
sflk-lang/src/parser.rs
anima-libera/sflk
973e7435ec44e5d775aad5737ad3835b6558d0f2
use crate::ast::{Chop, Comment, Expr, Node, Program, Stmt, TargetExpr}; use crate::scu::{Loc, SourceCodeUnit}; use crate::tokenizer::{BinOp, CharReadingHead, Kw, Matched, StmtBinOp, Tok, Tokenizer}; use std::{collections::VecDeque, rc::Rc}; pub struct ParsingWarning { } pub struct TokBuffer { crh: CharReadingHead, tokenizer: Tokenizer, toks_ahead: VecDeque<(Tok, Loc)>, } impl TokBuffer { pub fn from(crh: CharReadingHead) -> TokBuffer { TokBuffer { crh, tokenizer: Tokenizer::new(), toks_ahead: VecDeque::new(), } } fn prepare_max_index(&mut self, n: usize) { if self.toks_ahead.len() < n + 1 { self.toks_ahead.reserve(n - self.toks_ahead.len()); } while self.toks_ahead.len() < n + 1 { self.toks_ahead .push_back(self.tokenizer.pop_tok(&mut self.crh)); } } fn prepare_all(&mut self) { loop { self.toks_ahead .push_back(self.tokenizer.pop_tok(&mut self.crh)); if matches!(self.toks_ahead.back().map(|t| &t.0), Some(Tok::Eof)) { break; } } } fn peek(&mut self, n: usize) -> &(Tok, Loc) { self.prepare_max_index(n); &self.toks_ahead[n] } fn prepared(&self) -> &VecDeque<(Tok, Loc)> { &self.toks_ahead } fn pop(&mut self) -> (Tok, Loc) { self.peek(0); let tok_loc_opt = self.toks_ahead.pop_front(); if let Some(tok_loc) = tok_loc_opt { tok_loc } else { panic!("bug: no token to pop") } } fn disc(&mut self) { if self.toks_ahead.pop_front().is_none() { panic!("bug: token discarded but not peeked before") } } } impl TokBuffer { fn scu(&self) -> Rc<SourceCodeUnit> { self.crh.scu() } } pub struct Parser {} impl Parser { pub fn new() -> Parser { Parser {} } } impl Parser { pub fn parse_program(&mut self, tb: &mut TokBuffer) -> Node<Program> { let stmts = self.parse_all_as_stmts(tb); Node::from(Program { stmts }, Loc::total_of(tb.scu())) } fn parse_all_as_stmts(&mut self, tb: &mut TokBuffer) -> Vec<Node<Stmt>> { let mut stmts: Vec<Node<Stmt>> = Vec::new(); loop { let comments = self.parse_comments(tb); if matches!(tb.peek(0).0, Tok::Eof) { break; } let mut stmt_node = self.parse_stmt(tb); stmt_node.add_left_comments(comments); stmts.push(stmt_node); } stmts } fn parse_stmts(&mut self, tb: &mut TokBuffer) -> Vec<Node<Stmt>> { let mut stmts: Vec<Node<Stmt>> = Vec::new(); while let Some(stmt) = self.maybe_parse_stmt(tb) { stmts.push(stmt); } stmts } fn parse_stmt(&mut self, tb: &mut TokBuffer) -> Node<Stmt> { let left_comments = self.parse_comments(tb); let mut stmt_node = if let Some(stmt_node) = self.maybe_parse_stmt(tb) { stmt_node } else { let (_tok, loc) = tb.pop(); Node::from(Stmt::Invalid, loc) }; stmt_node.add_left_comments(left_comments); stmt_node } fn maybe_parse_stmt(&mut self, tb: &mut TokBuffer) -> Option<Node<Stmt>> { let (first_tok, first_loc) = tb.peek(0); if let Tok::Kw(kw) = first_tok { match kw { Kw::Np => { let kw_loc = first_loc.clone(); tb.disc(); Some(Node::from(Stmt::Nop, kw_loc)) } Kw::Pr => { let kw_loc = first_loc.clone(); tb.disc(); let expr_node = self.parse_expr(tb); let full_loc = &kw_loc + expr_node.loc(); Some(Node::from(Stmt::Print { expr: expr_node }, full_loc)) } Kw::Nl => { let kw_loc = first_loc.clone(); tb.disc(); Some(Node::from(Stmt::Newline, kw_loc)) } Kw::Ev => { let kw_loc = first_loc.clone(); tb.disc(); let expr_node = self.parse_expr(tb); let full_loc = &kw_loc + expr_node.loc(); Some(Node::from(Stmt::Evaluate { expr: expr_node }, full_loc)) } Kw::Do => { let kw_loc = first_loc.clone(); tb.disc(); let expr_node = self.parse_expr(tb); let full_loc = &kw_loc + expr_node.loc(); Some(Node::from(Stmt::Do { expr: expr_node }, full_loc)) } Kw::Dh => { let kw_loc = first_loc.clone(); tb.disc(); let expr_node = self.parse_expr(tb); let full_loc = &kw_loc + expr_node.loc(); Some(Node::from(Stmt::DoHere { expr: expr_node }, full_loc)) } Kw::Fh => { let kw_loc = first_loc.clone(); tb.disc(); let expr_node = self.parse_expr(tb); let full_loc = &kw_loc + expr_node.loc(); Some(Node::from(Stmt::DoFileHere { expr: expr_node }, full_loc)) } Kw::If => { let kw_loc = first_loc.clone(); tb.disc(); let cond_expr_node = self.parse_expr(tb); let th_stmt_node = self.maybe_parse_stmt_extension_stmt(tb, Kw::Th); let el_stmt_node = self.maybe_parse_stmt_extension_stmt(tb, Kw::El); let mut full_loc = kw_loc; if let Some(stmt_node) = &th_stmt_node { full_loc += stmt_node.loc(); } if let Some(stmt_node) = &el_stmt_node { full_loc += stmt_node.loc(); } Some(Node::from( Stmt::If { cond_expr: cond_expr_node, th_stmt: th_stmt_node.map(Box::new), el_stmt: el_stmt_node.map(Box::new), }, full_loc, )) } _ => { let kw_loc = first_loc.clone(); tb.disc(); Some(Node::from(Stmt::Invalid, kw_loc)) } } } else if let Some(stmt) = self.maybe_parse_assign_stmt(tb) { Some(stmt) } else { None } } fn maybe_parse_stmt_extension_stmt( &mut self, tb: &mut TokBuffer, kw: Kw, ) -> Option<Node<Stmt>> { let (tok, _) = tb.peek(0); match tok { Tok::Kw(tok_kw) if *tok_kw == kw => { tb.disc(); Some(self.parse_stmt(tb)) } _ => None, } } fn maybe_parse_assign_stmt(&mut self, tb: &mut TokBuffer) -> Option<Node<Stmt>> { tb.prepare_max_index(1); let prepared = tb.prepared(); match (&prepared[0], &prepared[1]) { ((Tok::Name { string, .. }, name_loc), (Tok::StmtBinOp(StmtBinOp::ToLeft), _)) => { let target_node = Node::from(TargetExpr::VariableName(string.clone()), name_loc.clone()); tb.disc(); tb.disc(); let expr_node = self.parse_expr(tb); let total_loc = target_node.loc() + expr_node.loc(); Some(Node::from( Stmt::Assign { target: target_node, expr: expr_node, }, total_loc, )) } _ => None, } } fn parse_expr(&mut self, tb: &mut TokBuffer) -> Node<Expr> { let expr_node = self.parse_expr_beg(tb); let mut chops: Vec<Node<Chop>> = Vec::new(); while let Some(chop_node) = self.maybe_parse_chop(tb) { chops.push(chop_node); } if chops.is_empty() { expr_node } else { let loc = expr_node.loc() + chops.last().unwrap().loc(); Node::from( Expr::Chain { init: Box::new(expr_node), chops, }, loc, ) } } fn parse_expr_beg(&mut self, tb: &mut TokBuffer) -> Node<Expr> { let (tok, left_loc) = tb.pop(); match tok { Tok::Name { string, .. } => Node::from(Expr::VariableName(string), left_loc), Tok::Integer(integer) => Node::from(Expr::IntegerLiteral(integer), left_loc), Tok::String { content, .. } => Node::from(Expr::StringLiteral(content), left_loc), Tok::Left(Matched::Curly) => { let stmts = self.parse_stmts(tb); let (right_tok, right_loc) = tb.pop(); match right_tok { Tok::Right(Matched::Curly) => { Node::from(Expr::BlockLiteral(stmts), left_loc + right_loc) } _ => panic!("TODO: generate an error here"), } } Tok::Left(Matched::Paren) => { let expr_node = self.parse_expr(tb); let (right_tok, right_loc) = tb.pop(); match right_tok { Tok::Right(Matched::Paren) => { Node::from(expr_node.unwrap(), left_loc + right_loc) } _ => panic!("TODO: generate an error here"), } } _ => Node::from(Expr::Invalid, left_loc), } } fn maybe_parse_chop(&mut self, tb: &mut TokBuffer) -> Option<Node<Chop>> { let (op_tok, op_loc) = tb.peek(0).clone(); if let Tok::BinOp(op) = op_tok { tb.disc(); let expr_node = self.parse_expr_beg(tb); let full_loc = &op_loc + expr_node.loc(); match op { BinOp::Plus => Some(Node::from(Chop::Plus(expr_node), full_loc)), BinOp::Minus => Some(Node::from(Chop::Minus(expr_node), full_loc)), BinOp::Star => Some(Node::from(Chop::Star(expr_node), full_loc)), BinOp::Slash => Some(Node::from(Chop::Slash(expr_node), full_loc)), BinOp::ToRight => Some(Node::from(Chop::ToRight(expr_node), full_loc)), } } else { None } } fn parse_comments(&mut self, tb: &mut TokBuffer) -> Vec<Node<Comment>> { let mut comments: Vec<Node<Comment>> = Vec::new(); while let Some(comment) = self.maybe_parse_comment(tb) { comments.push(comment); } comments } fn maybe_parse_comment(&mut self, tb: &mut TokBuffer) -> Option<Node<Comment>> { let (tok, loc) = tb.peek(0); if let Tok::Comment { content, delimitation_thickness, no_end_hash_warning, } = tok { let comment_node = Node::from( Comment::new(content.to_owned(), delimitation_thickness.to_owned()), loc.to_owned(), ); tb.disc(); Some(comment_node) } else { None } } }
use crate::ast::{Chop, Comment, Expr, Node, Program, Stmt, TargetExpr}; use crate::scu::{Loc, SourceCodeUnit}; use crate::tokenizer::{BinOp, CharReadingHead, Kw, Matched, StmtBinOp, Tok, Tokenizer}; use std::{collections::VecDeque, rc::Rc}; pub struct ParsingWarning { } pub struct TokBuffer { crh: CharReadingHead, tokenizer: Tokenizer, toks_ahead: VecDeque<(Tok, Loc)>, } impl TokBuffer { pub fn from(crh: CharReadingHead) -> TokBuffer { TokBuffer { crh, tokenizer: Tokenizer::new(), toks_ahead: VecDeque::new(), } } fn prepare_max_index(&mut self, n: usize) { if self.toks_ahead.len() < n + 1 { self.toks_ahead.reserve(n - self.toks_ahead.len()); } while self.toks_ahead.len() < n + 1 { self.toks_ahead .push_back(self.tokenizer.pop_tok(&mut self.crh)); } } fn prepare_all(&mut self) { loop { self.toks_ahead .push_back(self.tokenizer.pop_tok(&mut self.crh)); if matches!(self.toks_ahead.back().map(|t| &t.0), Some(Tok::Eof)) { break; } } } fn peek(&mut self, n: usize) -> &(Tok, Loc) { self.prepare_max_index(n); &self.toks_ahead[n] } fn prepared(&self) -> &VecDeque<(Tok, Loc)> { &self.toks_ahead } fn pop(&mut self) -> (Tok, Loc) { self.peek(0); let tok_loc_opt = self.toks_ahead.pop_front(); if let Some(tok_loc) = tok_loc_opt { tok_loc } else { panic!("bug: no token to pop") } } fn disc(&mut self) { if self.toks_ahead.pop_front().is_none() { panic!("bug: token discarded but not peeked before") } } } impl TokBuffer { fn scu(&self) -> Rc<SourceCodeUnit> { self.crh.scu() } } pub struct Parser {} impl Parser { pub fn new() -> Parser { Parser {} } } impl Parser { pub fn parse_program(&mut self, tb: &mut TokBuffer) -> Node<Program> { let stmts = self.parse_all_as_stmts(tb); Node::from(Program { stmts }, Loc::total_of(tb.scu())) } fn parse_all_as_stmts(&mut self, tb: &mut TokBuffer) -> Vec<Node<Stmt>> { let mut stmts: Vec<Node<Stmt>> = Vec::new(); loop { let comments = self.parse_comments(tb); if matches!(tb.peek(0).0, Tok::Eof) { break; } let mut stmt_node = self.parse_stmt(tb); stmt_node.add_left_comments(comments); stmts.push(stmt_node); } stmts } fn parse_stmts(&mut self, tb: &mut TokBuffer) -> Vec<Node<Stmt>> { let mut stmts: Vec<Node<Stmt>> = Vec::new(); while let Some(stmt) = self.maybe_parse_stmt(tb) { stmts.push(stmt); } stmts } fn parse_stmt(&mut self, tb: &mut TokBuffer) -> Node<Stmt> { let left_comments = self.parse_comments(tb); let mut stmt_node = if let Some(stmt_node) = self.maybe_parse_stmt(tb) { stmt_node } else { let (_tok, loc) = tb.pop(); Node::from(Stmt::Invalid, loc) }; stmt_node.add_left_comments(left_comments); stmt_node } fn maybe_parse_stmt(&mut self, tb: &mut TokBuffer) -> Option<Node<Stmt>> { let (first_tok, first_loc) = tb.peek(0); if let Tok::Kw(kw) = first_tok { match kw { Kw::Np => { let kw_loc = first_loc.clone(); tb.disc(); Some(Node::from(Stmt::Nop, kw_loc)) } Kw::Pr => { let kw_loc = first_loc.clone(); tb.disc(); let expr_node = self.parse_expr(tb); let full_loc = &kw_loc + expr_node.loc(); Some(Node::from(Stmt::Print { expr: expr_node }, full_loc)) } Kw::Nl => { let kw_loc = first_loc.clone(); tb.disc(); Some(Node::from(Stmt::Newline, kw_loc)) } Kw::Ev => { let kw_loc = first_loc.clone(); tb.disc(); let expr_node = self.parse_expr(tb); let full_loc = &kw_loc + expr_node.loc(); Some(Node::from(Stmt::Evaluate { expr: expr_node }, full_loc)) } Kw::Do => { let kw_loc = first_loc.clone(); tb.disc(); let expr_node = self.parse_expr(tb); let full_loc = &kw_loc + expr_node.loc(); Some(Node::from(Stmt::Do { expr: expr_node }, full_loc)) } Kw::Dh => { let kw_loc = first_loc.clone(); tb.disc(); let expr_node = self.parse_expr(tb); let full_loc = &kw_loc + expr_node.loc(); Some(Node::from(Stmt::DoHere { expr: expr_node }, full_loc)) } Kw::Fh => { let kw_loc = first_loc.clone(); tb.disc
le let Some(chop_node) = self.maybe_parse_chop(tb) { chops.push(chop_node); } if chops.is_empty() { expr_node } else { let loc = expr_node.loc() + chops.last().unwrap().loc(); Node::from( Expr::Chain { init: Box::new(expr_node), chops, }, loc, ) } } fn parse_expr_beg(&mut self, tb: &mut TokBuffer) -> Node<Expr> { let (tok, left_loc) = tb.pop(); match tok { Tok::Name { string, .. } => Node::from(Expr::VariableName(string), left_loc), Tok::Integer(integer) => Node::from(Expr::IntegerLiteral(integer), left_loc), Tok::String { content, .. } => Node::from(Expr::StringLiteral(content), left_loc), Tok::Left(Matched::Curly) => { let stmts = self.parse_stmts(tb); let (right_tok, right_loc) = tb.pop(); match right_tok { Tok::Right(Matched::Curly) => { Node::from(Expr::BlockLiteral(stmts), left_loc + right_loc) } _ => panic!("TODO: generate an error here"), } } Tok::Left(Matched::Paren) => { let expr_node = self.parse_expr(tb); let (right_tok, right_loc) = tb.pop(); match right_tok { Tok::Right(Matched::Paren) => { Node::from(expr_node.unwrap(), left_loc + right_loc) } _ => panic!("TODO: generate an error here"), } } _ => Node::from(Expr::Invalid, left_loc), } } fn maybe_parse_chop(&mut self, tb: &mut TokBuffer) -> Option<Node<Chop>> { let (op_tok, op_loc) = tb.peek(0).clone(); if let Tok::BinOp(op) = op_tok { tb.disc(); let expr_node = self.parse_expr_beg(tb); let full_loc = &op_loc + expr_node.loc(); match op { BinOp::Plus => Some(Node::from(Chop::Plus(expr_node), full_loc)), BinOp::Minus => Some(Node::from(Chop::Minus(expr_node), full_loc)), BinOp::Star => Some(Node::from(Chop::Star(expr_node), full_loc)), BinOp::Slash => Some(Node::from(Chop::Slash(expr_node), full_loc)), BinOp::ToRight => Some(Node::from(Chop::ToRight(expr_node), full_loc)), } } else { None } } fn parse_comments(&mut self, tb: &mut TokBuffer) -> Vec<Node<Comment>> { let mut comments: Vec<Node<Comment>> = Vec::new(); while let Some(comment) = self.maybe_parse_comment(tb) { comments.push(comment); } comments } fn maybe_parse_comment(&mut self, tb: &mut TokBuffer) -> Option<Node<Comment>> { let (tok, loc) = tb.peek(0); if let Tok::Comment { content, delimitation_thickness, no_end_hash_warning, } = tok { let comment_node = Node::from( Comment::new(content.to_owned(), delimitation_thickness.to_owned()), loc.to_owned(), ); tb.disc(); Some(comment_node) } else { None } } }
(); let expr_node = self.parse_expr(tb); let full_loc = &kw_loc + expr_node.loc(); Some(Node::from(Stmt::DoFileHere { expr: expr_node }, full_loc)) } Kw::If => { let kw_loc = first_loc.clone(); tb.disc(); let cond_expr_node = self.parse_expr(tb); let th_stmt_node = self.maybe_parse_stmt_extension_stmt(tb, Kw::Th); let el_stmt_node = self.maybe_parse_stmt_extension_stmt(tb, Kw::El); let mut full_loc = kw_loc; if let Some(stmt_node) = &th_stmt_node { full_loc += stmt_node.loc(); } if let Some(stmt_node) = &el_stmt_node { full_loc += stmt_node.loc(); } Some(Node::from( Stmt::If { cond_expr: cond_expr_node, th_stmt: th_stmt_node.map(Box::new), el_stmt: el_stmt_node.map(Box::new), }, full_loc, )) } _ => { let kw_loc = first_loc.clone(); tb.disc(); Some(Node::from(Stmt::Invalid, kw_loc)) } } } else if let Some(stmt) = self.maybe_parse_assign_stmt(tb) { Some(stmt) } else { None } } fn maybe_parse_stmt_extension_stmt( &mut self, tb: &mut TokBuffer, kw: Kw, ) -> Option<Node<Stmt>> { let (tok, _) = tb.peek(0); match tok { Tok::Kw(tok_kw) if *tok_kw == kw => { tb.disc(); Some(self.parse_stmt(tb)) } _ => None, } } fn maybe_parse_assign_stmt(&mut self, tb: &mut TokBuffer) -> Option<Node<Stmt>> { tb.prepare_max_index(1); let prepared = tb.prepared(); match (&prepared[0], &prepared[1]) { ((Tok::Name { string, .. }, name_loc), (Tok::StmtBinOp(StmtBinOp::ToLeft), _)) => { let target_node = Node::from(TargetExpr::VariableName(string.clone()), name_loc.clone()); tb.disc(); tb.disc(); let expr_node = self.parse_expr(tb); let total_loc = target_node.loc() + expr_node.loc(); Some(Node::from( Stmt::Assign { target: target_node, expr: expr_node, }, total_loc, )) } _ => None, } } fn parse_expr(&mut self, tb: &mut TokBuffer) -> Node<Expr> { let expr_node = self.parse_expr_beg(tb); let mut chops: Vec<Node<Chop>> = Vec::new(); whi
random
[ { "content": "struct Comments {\n\n\tleft_comments: Vec<Comment>,\n\n\tinternal_comments: Vec<Comment>,\n\n}\n\n\n\nimpl Comments {\n\n\tfn new() -> Comments {\n\n\t\tComments {\n\n\t\t\tleft_comments: Vec::new(),\n\n\t\t\tinternal_comments: Vec::new(),\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl<T> Node<T> {\n\n\tpub fn loc(&self) -> &Loc {\n\n\t\t&self.loc\n\n\t}\n\n\n\n\tpub fn add_loc(mut self, loc: Loc) -> Node<T> {\n", "file_path": "sflk-lang/src/ast.rs", "rank": 0, "score": 68579.70982262268 }, { "content": "pub fn symb(name: &str) -> CompletionItem {\n\n\tCompletionItem {\n\n\t\tlabel: name.to_string(),\n\n\t\t..Default::default()\n\n\t}\n\n}\n", "file_path": "sflk-lsp/src/completion.rs", "rank": 1, "score": 62788.05831466205 }, { "content": "pub fn escape_string(string: &str, escape_style: &Style) -> String {\n\n\tlet mut ret = String::new();\n\n\tstring.chars().for_each(|ch| {\n\n\t\tif let Some(escaped) = escape_character(ch) {\n\n\t\t\tret.extend(format!(\"{}{}{}\", escape_style.0, escaped, escape_style.1).chars());\n\n\t\t} else {\n\n\t\t\tret.push(ch);\n\n\t\t}\n\n\t});\n\n\tret\n\n}\n\n\n", "file_path": "sflk-lang/src/utils.rs", "rank": 2, "score": 55823.193903961546 }, { "content": "struct Settings {\n\n\tpath: String,\n\n\troot_filename: Option<String>,\n\n\tdebug_mode: bool,\n\n\twants_help: bool,\n\n\twants_version: bool,\n\n}\n\n\n\nimpl Settings {\n\n\tfn from_args() -> Settings {\n\n\t\tlet mut args = std::env::args();\n\n\t\tlet mut settings = Settings {\n\n\t\t\tpath: args.next().unwrap_or(String::from(\"sflk\")),\n\n\t\t\troot_filename: None,\n\n\t\t\tdebug_mode: false,\n\n\t\t\twants_help: false,\n\n\t\t\twants_version: false,\n\n\t\t};\n\n\t\tfor arg in args {\n\n\t\t\tif arg == \"-d\" || arg == \"--debug\" {\n", "file_path": "sflk-lang/src/main.rs", "rank": 3, "score": 40875.71423406876 }, { "content": "#[derive(Debug, Clone)]\n\nstruct Indent {\n\n\tis_context: bool,\n\n\tstyle: Style,\n\n}\n\n\n\nconst INDENT_START: &str = \"┌\";\n\nconst INDENT_NORMAL: &str = \"│\";\n\nconst INDENT_WEAK: &str = \"╎\";\n\n\n\nimpl IndentedLog {\n\n\tpub fn print(&self, writer: &mut impl std::fmt::Write) {\n\n\t\tlet mut indents: Vec<Indent> = Vec::new();\n\n\t\tlet mut is_newline: bool = true;\n\n\t\tfor item in &self.items {\n\n\t\t\tmatch item {\n\n\t\t\t\tItem::IndentAdd { string, indent } => {\n\n\t\t\t\t\tprint_indents(writer, &indents, Some(indent));\n\n\t\t\t\t\twriteln!(writer, \"{}{}{}\", indent.style.0, string, indent.style.1)\n\n\t\t\t\t\t\t.expect(\"TODO\");\n\n\t\t\t\t\tis_newline = true;\n", "file_path": "sflk-lang/src/log.rs", "rank": 4, "score": 40875.71423406876 }, { "content": "#[derive(Debug, Clone)]\n\nstruct Cx {\n\n\tvarmap: HashMap<String, Obj>,\n\n}\n\n\n\nimpl Cx {\n\n\tfn new() -> Cx {\n\n\t\tCx {\n\n\t\t\tvarmap: HashMap::new(),\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl Cx {\n\n\tfn import(&mut self, other: Cx) {\n\n\t\tself.varmap.extend(other.varmap);\n\n\t}\n\n}\n\n\n", "file_path": "sflk-lang/src/machine.rs", "rank": 5, "score": 40875.71423406876 }, { "content": "#[derive(Debug)]\n\nstruct Backend {\n\n\tclient: Client,\n\n}\n\n\n\n#[tower_lsp::async_trait]\n\nimpl LanguageServer for Backend {\n\n\tasync fn initialize(&self, _: InitializeParams) -> Result<InitializeResult> {\n\n\t\tOk(InitializeResult {\n\n\t\t\tcapabilities: ServerCapabilities {\n\n\t\t\t\t// We always sync documents\n\n\t\t\t\ttext_document_sync: Some(TextDocumentSyncCapability::Kind(\n\n\t\t\t\t\tTextDocumentSyncKind::Full,\n\n\t\t\t\t)),\n\n\t\t\t\t// We know how to react on hover\n\n\t\t\t\thover_provider: Some(HoverProviderCapability::Simple(true)),\n\n\t\t\t\t// We know how to provide completion\n\n\t\t\t\tcompletion_provider: Some(CompletionOptions {\n\n\t\t\t\t\ttrigger_characters: Some(vec![\n\n\t\t\t\t\t\t\" \".to_string(),\n\n\t\t\t\t\t\t\"=\".to_string(),\n", "file_path": "sflk-lsp/src/main.rs", "rank": 6, "score": 40875.71423406876 }, { "content": "fn main() {\n\n\tlet settings = Settings::from_args();\n\n\n\n\tlet mut did_something = false;\n\n\tif settings.wants_version {\n\n\t\tprintln!(\n\n\t\t\t\"SFLK reference interpreter, version {}.{}.{} ({})\",\n\n\t\t\t0, 1, 0, \"indev\"\n\n\t\t);\n\n\t\tprintln!(\"{}\", NO_WARRANTY_NOTE);\n\n\t\tdid_something = true;\n\n\t}\n\n\tif settings.wants_help {\n\n\t\tprint!(\"{}\", HELP_MESSAGE);\n\n\t\tdid_something = true;\n\n\t}\n\n\tif settings.root_filename.is_none() && !did_something {\n\n\t\tprintln!(\n\n\t\t\t\"No filename provided, nothing to do. Try `{} --help` for usage.\",\n\n\t\t\tsettings.path\n\n\t\t);\n\n\t}\n\n\n\n\tlet mut mem = machine::Mem::new(settings.debug_mode);\n\n\tmem.exec_file(settings.root_filename.unwrap());\n\n\tif let Some(debug_mem) = mem.debug_mem_opt {\n\n\t\tdebug_mem.log.print_to_stdout();\n\n\t}\n\n}\n", "file_path": "sflk-lang/src/main.rs", "rank": 7, "score": 40875.71423406876 }, { "content": "fn print_indents(\n\n\twriter: &mut impl std::fmt::Write,\n\n\tindents: &Vec<Indent>,\n\n\tadd_start: Option<&Indent>,\n\n) {\n\n\tlet last_cx_index = match add_start {\n\n\t\tSome(indent) if indent.is_context => indents.len(),\n\n\t\t_ => indents\n\n\t\t\t.iter()\n\n\t\t\t.rposition(|indent| indent.is_context)\n\n\t\t\t.unwrap_or(0),\n\n\t};\n\n\tfor indent in indents[..last_cx_index].iter() {\n\n\t\twrite!(\n\n\t\t\twriter,\n\n\t\t\t\"{}{}{}\",\n\n\t\t\tindent.style.0,\n\n\t\t\tif indent.is_context {\n\n\t\t\t\tINDENT_NORMAL\n\n\t\t\t} else {\n", "file_path": "sflk-lang/src/log.rs", "rank": 8, "score": 39812.13543764086 }, { "content": "fn print_indents(\n\n\twriter: &mut impl std::fmt::Write,\n\n\tindent_styles: &Vec<(Style, Tube)>,\n\n\tright_override: RightTube,\n\n) {\n\n\tif let Some(((right_style, _), indents_left)) = indent_styles.split_last() {\n\n\t\tfor (style, tube) in indents_left {\n\n\t\t\twrite!(writer, \"{}{}{}\", style.0, tube.str(), style.1).expect(\"error\");\n\n\t\t}\n\n\t\twrite!(\n\n\t\t\twriter,\n\n\t\t\t\"{}{}{}\",\n\n\t\t\tright_style.0,\n\n\t\t\tright_override.str(),\n\n\t\t\tright_style.1\n\n\t\t)\n\n\t\t.expect(\"error\");\n\n\t}\n\n}\n", "file_path": "sflk-lang/src/stringtree.rs", "rank": 9, "score": 39812.13543764086 }, { "content": "struct ExCx {\n\n\tcx: Cx,\n\n\ti: Vec<usize>,\n\n\tflow: Flow,\n\n}\n\n\n\nimpl ExCx {\n\n\tfn new() -> ExCx {\n\n\t\tExCx {\n\n\t\t\tcx: Cx::new(),\n\n\t\t\ti: Vec::new(),\n\n\t\t\tflow: Flow::Next,\n\n\t\t}\n\n\t}\n\n}\n\n\n", "file_path": "sflk-lang/src/machine.rs", "rank": 10, "score": 39812.13543764086 }, { "content": "pub trait Treeable {\n\n\tfn tree(&self, loc: &Loc) -> StringTree;\n\n}\n\n\n\nimpl<T> From<&Node<T>> for StringTree\n\nwhere\n\n\tT: Treeable,\n\n{\n\n\tfn from(node: &Node<T>) -> StringTree {\n\n\t\tnode.content.tree(node.loc())\n\n\t}\n\n}\n\n\n\nimpl Treeable for Chop {\n\n\tfn tree(&self, loc: &Loc) -> StringTree {\n\n\t\tmatch self {\n\n\t\t\tChop::Plus(expr_node) => StringTree::new_node(\n\n\t\t\t\tformat!(\"chop plus\"),\n\n\t\t\t\tstyles::NORMAL,\n\n\t\t\t\tvec![StringTree::from(expr_node)],\n", "file_path": "sflk-lang/src/ast.rs", "rank": 11, "score": 39002.06096856446 }, { "content": "fn escape_character(ch: char) -> Option<String> {\n\n\tmatch ch {\n\n\t\t// ASCII range escapes\n\n\t\t'\\\"' => Some(String::from(\"\\\\\\\"\")),\n\n\t\t'\\\\' => Some(String::from(\"\\\\\\\\\")),\n\n\t\t'\\n' => Some(String::from(\"\\\\n\")),\n\n\t\t'\\t' => Some(String::from(\"\\\\t\")),\n\n\t\t'\\x1b' => Some(String::from(\"\\\\e\")),\n\n\t\t'\\x07' => Some(String::from(\"\\\\a\")),\n\n\t\t'\\x08' => Some(String::from(\"\\\\b\")),\n\n\t\t'\\x0b' => Some(String::from(\"\\\\v\")),\n\n\t\t'\\x0c' => Some(String::from(\"\\\\f\")),\n\n\t\t'\\r' => Some(String::from(\"\\\\r\")),\n\n\t\tch if (ch as u32) < (' ' as u32) => Some(format!(\"\\\\x{:02x}\", ch as u32)),\n\n\t\t// Non-ASCII range espaces\n\n\t\t'�' => Some(String::from(\"\\\\?\")),\n\n\t\t// Not to be escaped\n\n\t\t_ => None,\n\n\t}\n\n}\n", "file_path": "sflk-lang/src/utils.rs", "rank": 12, "score": 31013.61374933135 }, { "content": "\t}\n\n}\n\n\n\nimpl Block {\n\n\tpub fn new(stmts: Vec<Stmt>) -> Block {\n\n\t\tBlock { stmts }\n\n\t}\n\n}\n\n\n\nimpl Block {\n\n\tpub fn clone_multiply(&self, n: usize) -> Block {\n\n\t\tlet mut stmts = Vec::new();\n\n\t\tfor _ in 0..n {\n\n\t\t\tfor stmt in &self.stmts {\n\n\t\t\t\tstmts.push(stmt.clone());\n\n\t\t\t}\n\n\t\t}\n\n\t\tBlock { stmts }\n\n\t}\n\n}\n", "file_path": "sflk-lang/src/program.rs", "rank": 13, "score": 28514.525646104044 }, { "content": "\t},\n\n\tEvaluate {\n\n\t\texpr: Expr,\n\n\t},\n\n\tIf {\n\n\t\tcond_expr: Expr,\n\n\t\tth_stmt: Option<Box<Stmt>>,\n\n\t\tel_stmt: Option<Box<Stmt>>,\n\n\t},\n\n\tInvalid, // TODO\n\n}\n\n\n\nimpl From<&Stmt> for StringTree {\n\n\tfn from(stmt: &Stmt) -> StringTree {\n\n\t\tmatch stmt {\n\n\t\t\tStmt::Nop => StringTree::new_leaf(String::from(\"nop\"), styles::NORMAL),\n\n\t\t\tStmt::Print { expr } => StringTree::new_node(\n\n\t\t\t\tString::from(\"print\"),\n\n\t\t\t\tstyles::NORMAL,\n\n\t\t\t\tvec![StringTree::from(expr)],\n", "file_path": "sflk-lang/src/program.rs", "rank": 14, "score": 28514.51026191527 }, { "content": "\t\t\t\tvec![StringTree::from(expr)],\n\n\t\t\t),\n\n\t\t\tStmt::Evaluate { expr } => StringTree::new_node(\n\n\t\t\t\tString::from(\"evaluate\"),\n\n\t\t\t\tstyles::NORMAL,\n\n\t\t\t\tvec![StringTree::from(expr)],\n\n\t\t\t),\n\n\t\t\tStmt::If {\n\n\t\t\t\tcond_expr,\n\n\t\t\t\tth_stmt,\n\n\t\t\t\tel_stmt,\n\n\t\t\t} => StringTree::new_node(String::from(\"if\"), styles::NORMAL, {\n\n\t\t\t\tlet mut vec: Vec<StringTree> = Vec::with_capacity(3);\n\n\t\t\t\tvec.push(StringTree::from(cond_expr));\n\n\t\t\t\tif let Some(stmt) = th_stmt {\n\n\t\t\t\t\tvec.push(StringTree::from(&**stmt));\n\n\t\t\t\t}\n\n\t\t\t\tif let Some(stmt) = el_stmt {\n\n\t\t\t\t\tvec.push(StringTree::from(&**stmt));\n\n\t\t\t\t}\n", "file_path": "sflk-lang/src/program.rs", "rank": 15, "score": 28513.458788589087 }, { "content": "use crate::object::Obj;\n\nuse crate::stringtree::StringTree;\n\nuse crate::utils::styles;\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Block {\n\n\tpub stmts: Vec<Stmt>,\n\n}\n\n\n\nimpl From<&Block> for StringTree {\n\n\tfn from(block: &Block) -> StringTree {\n\n\t\tStringTree::new_node(\n\n\t\t\t\"block\".to_owned(),\n\n\t\t\tstyles::CYAN,\n\n\t\t\tblock\n\n\t\t\t\t.stmts\n\n\t\t\t\t.iter()\n\n\t\t\t\t.map(|stmt| StringTree::from(stmt))\n\n\t\t\t\t.collect(),\n\n\t\t)\n", "file_path": "sflk-lang/src/program.rs", "rank": 16, "score": 28512.045763576614 }, { "content": "pub enum Chop {\n\n\tPlus(Expr),\n\n\tMinus(Expr),\n\n\tStar(Expr),\n\n\tSlash(Expr),\n\n\tToRight(Expr),\n\n}\n\n\n\nimpl From<&Chop> for StringTree {\n\n\tfn from(chop: &Chop) -> StringTree {\n\n\t\tmatch chop {\n\n\t\t\tChop::Plus(expr) => StringTree::new_node(\n\n\t\t\t\tformat!(\"chop plus\"),\n\n\t\t\t\tstyles::NORMAL,\n\n\t\t\t\tvec![StringTree::from(expr)],\n\n\t\t\t),\n\n\t\t\tChop::Minus(expr) => StringTree::new_node(\n\n\t\t\t\tformat!(\"chop minus\"),\n\n\t\t\t\tstyles::NORMAL,\n\n\t\t\t\tvec![StringTree::from(expr)],\n", "file_path": "sflk-lang/src/program.rs", "rank": 17, "score": 28511.479720364187 }, { "content": "\t\t\t),\n\n\t\t\tStmt::Newline => StringTree::new_leaf(String::from(\"newline\"), styles::NORMAL),\n\n\t\t\tStmt::Assign { varname, expr } => StringTree::new_node(\n\n\t\t\t\tformat!(\"assign to variable {}\", varname),\n\n\t\t\t\tstyles::NORMAL,\n\n\t\t\t\tvec![StringTree::from(expr)],\n\n\t\t\t),\n\n\t\t\tStmt::Do { expr } => StringTree::new_node(\n\n\t\t\t\tString::from(\"do\"),\n\n\t\t\t\tstyles::NORMAL,\n\n\t\t\t\tvec![StringTree::from(expr)],\n\n\t\t\t),\n\n\t\t\tStmt::DoHere { expr } => StringTree::new_node(\n\n\t\t\t\tString::from(\"do here\"),\n\n\t\t\t\tstyles::NORMAL,\n\n\t\t\t\tvec![StringTree::from(expr)],\n\n\t\t\t),\n\n\t\t\tStmt::DoFileHere { expr } => StringTree::new_node(\n\n\t\t\t\tString::from(\"do file here\"),\n\n\t\t\t\tstyles::NORMAL,\n", "file_path": "sflk-lang/src/program.rs", "rank": 18, "score": 28510.827471478326 }, { "content": "\n\nimpl From<&Expr> for StringTree {\n\n\tfn from(expr: &Expr) -> StringTree {\n\n\t\tmatch expr {\n\n\t\t\tExpr::Var { varname } => {\n\n\t\t\t\tStringTree::new_leaf(format!(\"variable {}\", varname), styles::NORMAL)\n\n\t\t\t}\n\n\t\t\tExpr::Const { val } => StringTree::from(val),\n\n\t\t\tExpr::Chain { init_expr, chops } => StringTree::new_node(\n\n\t\t\t\t\"chain\".to_string(),\n\n\t\t\t\tstyles::BLUE,\n\n\t\t\t\tstd::iter::once(StringTree::from(&**init_expr))\n\n\t\t\t\t\t.chain(chops.iter().map(|chop| StringTree::from(chop)))\n\n\t\t\t\t\t.collect(),\n\n\t\t\t),\n\n\t\t}\n\n\t}\n\n}\n\n\n\n#[derive(Debug, Clone)]\n", "file_path": "sflk-lang/src/program.rs", "rank": 19, "score": 28508.670097800372 }, { "content": "\t\t\t),\n\n\t\t\tChop::Star(expr) => StringTree::new_node(\n\n\t\t\t\tformat!(\"chop star\"),\n\n\t\t\t\tstyles::NORMAL,\n\n\t\t\t\tvec![StringTree::from(expr)],\n\n\t\t\t),\n\n\t\t\tChop::Slash(expr) => StringTree::new_node(\n\n\t\t\t\tformat!(\"chop slash\"),\n\n\t\t\t\tstyles::NORMAL,\n\n\t\t\t\tvec![StringTree::from(expr)],\n\n\t\t\t),\n\n\t\t\tChop::ToRight(expr) => StringTree::new_node(\n\n\t\t\t\tformat!(\"chop to right\"),\n\n\t\t\t\tstyles::NORMAL,\n\n\t\t\t\tvec![StringTree::from(expr)],\n\n\t\t\t),\n\n\t\t}\n\n\t}\n\n}\n", "file_path": "sflk-lang/src/program.rs", "rank": 20, "score": 28507.363741295212 }, { "content": "\t\t\t\tvec\n\n\t\t\t}),\n\n\t\t\tStmt::Invalid => StringTree::new_leaf(format!(\"invalid\"), styles::BOLD_LIGHT_RED), // TODO\n\n\t\t}\n\n\t}\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub enum Expr {\n\n\tVar {\n\n\t\tvarname: String,\n\n\t},\n\n\tConst {\n\n\t\tval: Obj,\n\n\t},\n\n\tChain {\n\n\t\tinit_expr: Box<Expr>,\n\n\t\tchops: Vec<Chop>,\n\n\t},\n\n}\n", "file_path": "sflk-lang/src/program.rs", "rank": 21, "score": 28507.343462783516 }, { "content": "\n\n#[derive(Debug, Clone)]\n\npub enum Stmt {\n\n\tNop,\n\n\tPrint {\n\n\t\texpr: Expr,\n\n\t},\n\n\tNewline,\n\n\tAssign {\n\n\t\tvarname: String,\n\n\t\texpr: Expr,\n\n\t},\n\n\tDo {\n\n\t\texpr: Expr,\n\n\t},\n\n\tDoHere {\n\n\t\texpr: Expr,\n\n\t},\n\n\tDoFileHere {\n\n\t\texpr: Expr,\n", "file_path": "sflk-lang/src/program.rs", "rank": 22, "score": 28506.8539719419 }, { "content": "\t\t\t\tcrh.disc();\n\n\t\t\t\t(Tok::StmtBinOp(StmtBinOp::ToLeft), loc)\n\n\t\t\t}\n\n\t\t\tSome('#') => self.pop_comment_tok(crh),\n\n\t\t\tSome(ch) => {\n\n\t\t\t\tcrh.disc();\n\n\t\t\t\t(Tok::InvalidCharacter(ch), loc)\n\n\t\t\t}\n\n\t\t\tNone => (Tok::Eof, loc),\n\n\t\t}\n\n\t}\n\n\n\n\tfn pop_word(&mut self, crh: &mut CharReadingHead) -> (String, Loc) {\n\n\t\tlet mut word_string = String::new();\n\n\t\tlet mut loc = crh.loc();\n\n\t\twhile let Some(ch) = crh.peek() {\n\n\t\t\tif !ch.is_ascii_alphabetic() {\n\n\t\t\t\tbreak;\n\n\t\t\t}\n\n\t\t\tword_string.push(ch);\n", "file_path": "sflk-lang/src/tokenizer.rs", "rank": 41, "score": 27828.23481355745 }, { "content": "\t\tloc.raw_length = integer_string.bytes().len();\n\n\t\t(integer_string, loc)\n\n\t}\n\n\n\n\tfn pop_string_tok(&mut self, crh: &mut CharReadingHead) -> (Tok, Loc) {\n\n\t\tlet mut content = String::new();\n\n\t\tlet mut no_end_quote_warning = false;\n\n\t\tlet mut invalid_escape_sequence_errors: Vec<(EscapeSequenceError, usize)> = Vec::new();\n\n\t\tlet mut offset = 0;\n\n\t\tlet mut loc = crh.loc();\n\n\t\tstd::assert_eq!(crh.peek(), Some('\\\"'));\n\n\t\tcrh.disc();\n\n\t\tloop {\n\n\t\t\tmatch crh.peek() {\n\n\t\t\t\tNone => {\n\n\t\t\t\t\tno_end_quote_warning = true;\n\n\t\t\t\t\tbreak;\n\n\t\t\t\t}\n\n\t\t\t\tSome('\\\"') => {\n\n\t\t\t\t\tloc += crh.loc();\n", "file_path": "sflk-lang/src/tokenizer.rs", "rank": 42, "score": 27828.067089493816 }, { "content": "\n\nimpl Tokenizer {\n\n\tpub fn pop_tok(&mut self, crh: &mut CharReadingHead) -> (Tok, Loc) {\n\n\t\tcrh.skip_ws();\n\n\t\tlet loc = crh.loc();\n\n\t\tmatch crh.peek() {\n\n\t\t\tSome(ch) if ch.is_ascii_alphabetic() => {\n\n\t\t\t\tlet (word_string, word_loc) = self.pop_word(crh);\n\n\t\t\t\t(self.word_to_tok(word_string), word_loc)\n\n\t\t\t}\n\n\t\t\tSome(ch) if ch.is_ascii_digit() => {\n\n\t\t\t\tlet (integer_string, word_loc) = self.pop_integer(crh);\n\n\t\t\t\t(Tok::Integer(integer_string), word_loc)\n\n\t\t\t}\n\n\t\t\tSome('\\\"') => self.pop_string_tok(crh),\n\n\t\t\tSome('+') => {\n\n\t\t\t\tcrh.disc();\n\n\t\t\t\t(Tok::BinOp(BinOp::Plus), loc)\n\n\t\t\t}\n\n\t\t\tSome('-') => {\n", "file_path": "sflk-lang/src/tokenizer.rs", "rank": 43, "score": 27826.700112354967 }, { "content": "\n\n\tfn pop_comment_tok(&mut self, crh: &mut CharReadingHead) -> (Tok, Loc) {\n\n\t\tassert_eq!(crh.peek(), Some('#'));\n\n\t\tlet mut loc = crh.loc();\n\n\t\tlet mut delimitation_thickness = 0;\n\n\t\twhile let Some('#') = crh.peek() {\n\n\t\t\tdelimitation_thickness += 1;\n\n\t\t\tloc += crh.loc();\n\n\t\t\tcrh.disc();\n\n\t\t}\n\n\t\tlet delimitation_thickness = delimitation_thickness;\n\n\t\tlet mut content = String::new();\n\n\t\tlet mut no_end_hash_warning = false;\n\n\t\tloop {\n\n\t\t\tif let Some('#') = crh.peek() {\n\n\t\t\t\tlet mut hashes_thickness = 0;\n\n\t\t\t\twhile let Some('#') = crh.peek() {\n\n\t\t\t\t\thashes_thickness += 1;\n\n\t\t\t\t\tloc += crh.loc();\n\n\t\t\t\t\tcrh.disc();\n", "file_path": "sflk-lang/src/tokenizer.rs", "rank": 44, "score": 27826.574718440323 }, { "content": "\t\t\t\tlet len = word.len();\n\n\t\t\t\tTok::Name {\n\n\t\t\t\t\tstring: word,\n\n\t\t\t\t\tunstable_warning: len == 2,\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\n\n\tfn pop_integer(&mut self, crh: &mut CharReadingHead) -> (String, Loc) {\n\n\t\tlet mut integer_string = String::new();\n\n\t\tlet mut loc = crh.loc();\n\n\t\twhile let Some(ch) = crh.peek() {\n\n\t\t\tif !ch.is_ascii_digit() {\n\n\t\t\t\tbreak;\n\n\t\t\t}\n\n\t\t\tinteger_string.push(ch);\n\n\t\t\tcrh.disc();\n\n\t\t}\n\n\t\tstd::assert!(integer_string.len() >= 1);\n", "file_path": "sflk-lang/src/tokenizer.rs", "rank": 45, "score": 27822.061972476895 }, { "content": "\n\n#[derive(Debug, Clone)]\n\npub enum Matched {\n\n\tParen,\n\n\tCurly,\n\n\tBracket,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub enum StmtBinOp {\n\n\tToLeft,\n\n}\n\n\n\npub struct Tokenizer {}\n\n\n\nimpl Tokenizer {\n\n\tpub fn new() -> Tokenizer {\n\n\t\tTokenizer {}\n\n\t}\n\n}\n", "file_path": "sflk-lang/src/tokenizer.rs", "rank": 46, "score": 27820.212092007896 }, { "content": "\t\tlet mut loc = crh.loc();\n\n\t\tlet mut len = 1;\n\n\t\tlet base = {\n\n\t\t\tmatch crh.peek() {\n\n\t\t\t\tSome('x') => {\n\n\t\t\t\t\tcrh.disc();\n\n\t\t\t\t\t16\n\n\t\t\t\t}\n\n\t\t\t\tSome('d') => {\n\n\t\t\t\t\tcrh.disc();\n\n\t\t\t\t\t10\n\n\t\t\t\t}\n\n\t\t\t\t_ => unreachable!(),\n\n\t\t\t}\n\n\t\t};\n\n\t\tlet mut character_code = 0;\n\n\t\tif crh.peek() == Some('(') {\n\n\t\t\tloc += crh.loc();\n\n\t\t\tlen += 1;\n\n\t\t\tcrh.disc();\n", "file_path": "sflk-lang/src/tokenizer.rs", "rank": 47, "score": 27819.802604571552 }, { "content": "\t\t\traw_length: match self.peek() {\n\n\t\t\t\tSome(ch) => ch.len_utf8(),\n\n\t\t\t\tNone => 0,\n\n\t\t\t},\n\n\t\t}\n\n\t}\n\n\n\n\tfn skip_ws(&mut self) {\n\n\t\tloop {\n\n\t\t\tmatch self.peek() {\n\n\t\t\t\tSome(ch) if ch.is_ascii_whitespace() => self.disc(),\n\n\t\t\t\t_ => break,\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl CharReadingHead {\n\n\tpub fn scu(&self) -> Rc<SourceCodeUnit> {\n\n\t\tRc::clone(&self.scu)\n", "file_path": "sflk-lang/src/tokenizer.rs", "rank": 48, "score": 27819.582424553806 }, { "content": "\t\t\t\t\tcrh.disc();\n\n\t\t\t\t\tcontent.push(ch);\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t}\n\n\t\t(\n\n\t\t\tTok::String {\n\n\t\t\t\tcontent,\n\n\t\t\t\tno_end_quote_warning,\n\n\t\t\t\tinvalid_escape_sequence_errors,\n\n\t\t\t},\n\n\t\t\tloc,\n\n\t\t)\n\n\t}\n\n\n\n\tfn pop_escaped(\n\n\t\t&mut self,\n\n\t\tcrh: &mut CharReadingHead,\n\n\t) -> (Result<String, EscapeSequenceError>, usize, Loc) {\n\n\t\tlet loc_beg = crh.loc();\n", "file_path": "sflk-lang/src/tokenizer.rs", "rank": 49, "score": 27819.185262401865 }, { "content": "\t\t\tcrh.disc();\n\n\t\t}\n\n\t\tstd::assert!(word_string.len() >= 1);\n\n\t\tloc.raw_length = word_string.bytes().len();\n\n\t\t(word_string, loc)\n\n\t}\n\n\n\n\tfn word_to_tok(&self, word: String) -> Tok {\n\n\t\tmatch &word[..] {\n\n\t\t\t\"np\" => Tok::Kw(Kw::Np),\n\n\t\t\t\"pr\" => Tok::Kw(Kw::Pr),\n\n\t\t\t\"nl\" => Tok::Kw(Kw::Nl),\n\n\t\t\t\"do\" => Tok::Kw(Kw::Do),\n\n\t\t\t\"dh\" => Tok::Kw(Kw::Dh),\n\n\t\t\t\"fh\" => Tok::Kw(Kw::Fh),\n\n\t\t\t\"ev\" => Tok::Kw(Kw::Ev),\n\n\t\t\t\"if\" => Tok::Kw(Kw::If),\n\n\t\t\t\"th\" => Tok::Kw(Kw::Th),\n\n\t\t\t\"el\" => Tok::Kw(Kw::El),\n\n\t\t\t_ => {\n", "file_path": "sflk-lang/src/tokenizer.rs", "rank": 50, "score": 27819.004247998866 }, { "content": "\t\t\t\t}\n\n\t\t\t\tif hashes_thickness == delimitation_thickness {\n\n\t\t\t\t\tbreak;\n\n\t\t\t\t} else {\n\n\t\t\t\t\tcontent.extend(std::iter::repeat('#').take(hashes_thickness));\n\n\t\t\t\t}\n\n\t\t\t} else if let Some(ch) = crh.peek() {\n\n\t\t\t\tcontent.push(ch);\n\n\t\t\t\tloc += crh.loc();\n\n\t\t\t\tcrh.disc();\n\n\t\t\t} else {\n\n\t\t\t\tno_end_hash_warning = true;\n\n\t\t\t\tbreak;\n\n\t\t\t}\n\n\t\t}\n\n\t\t(\n\n\t\t\tTok::Comment {\n\n\t\t\t\tcontent,\n\n\t\t\t\tdelimitation_thickness,\n\n\t\t\t\tno_end_hash_warning,\n\n\t\t\t},\n\n\t\t\tloc,\n\n\t\t)\n\n\t}\n\n}\n", "file_path": "sflk-lang/src/tokenizer.rs", "rank": 51, "score": 27818.778319894867 }, { "content": "\t\t\tloop {\n\n\t\t\t\tif let Some(ch) = crh.peek() {\n\n\t\t\t\t\tmatch ch.to_digit(base) {\n\n\t\t\t\t\t\tSome(digit) => {\n\n\t\t\t\t\t\t\tloc += crh.loc();\n\n\t\t\t\t\t\t\tlen += 1;\n\n\t\t\t\t\t\t\tcrh.disc();\n\n\t\t\t\t\t\t\tcharacter_code = character_code * base + digit;\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tNone if ch == ')' => {\n\n\t\t\t\t\t\t\tloc += crh.loc();\n\n\t\t\t\t\t\t\tlen += 1;\n\n\t\t\t\t\t\t\tcrh.disc();\n\n\t\t\t\t\t\t\tbreak;\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tNone => {\n\n\t\t\t\t\t\t\treturn (\n\n\t\t\t\t\t\t\t\tErr(EscapeSequenceError::InvalidDigitCharacter(ch)),\n\n\t\t\t\t\t\t\t\tlen,\n\n\t\t\t\t\t\t\t\tloc,\n", "file_path": "sflk-lang/src/tokenizer.rs", "rank": 52, "score": 27818.422135747674 }, { "content": "\t\t\t\tcrh.disc();\n\n\t\t\t\t(Tok::Left(Matched::Bracket), loc)\n\n\t\t\t}\n\n\t\t\tSome('{') => {\n\n\t\t\t\tcrh.disc();\n\n\t\t\t\t(Tok::Left(Matched::Curly), loc)\n\n\t\t\t}\n\n\t\t\tSome(')') => {\n\n\t\t\t\tcrh.disc();\n\n\t\t\t\t(Tok::Right(Matched::Paren), loc)\n\n\t\t\t}\n\n\t\t\tSome(']') => {\n\n\t\t\t\tcrh.disc();\n\n\t\t\t\t(Tok::Right(Matched::Bracket), loc)\n\n\t\t\t}\n\n\t\t\tSome('}') => {\n\n\t\t\t\tcrh.disc();\n\n\t\t\t\t(Tok::Right(Matched::Curly), loc)\n\n\t\t\t}\n\n\t\t\tSome('<') => {\n", "file_path": "sflk-lang/src/tokenizer.rs", "rank": 53, "score": 27818.07261917786 }, { "content": "\t\t\t\tcrh.disc();\n\n\t\t\t\t(Ok(\"\\r\".to_string()), 2, loc_beg + loc_end)\n\n\t\t\t}\n\n\t\t\tSome('x') | Some('d') => {\n\n\t\t\t\tlet (escaped, len, loc_end) = self.pop_hex_escaped(crh);\n\n\t\t\t\t(escaped, len + 1, loc_beg + loc_end)\n\n\t\t\t}\n\n\t\t\tSome(ch) => (\n\n\t\t\t\tErr(EscapeSequenceError::InvalidFirstCharacter(ch)),\n\n\t\t\t\t1,\n\n\t\t\t\tloc_beg,\n\n\t\t\t),\n\n\t\t\tNone => (Err(EscapeSequenceError::UnexpectedEof), 1, loc_beg),\n\n\t\t}\n\n\t}\n\n\n\n\tfn pop_hex_escaped(\n\n\t\t&mut self,\n\n\t\tcrh: &mut CharReadingHead,\n\n\t) -> (Result<String, EscapeSequenceError>, usize, Loc) {\n", "file_path": "sflk-lang/src/tokenizer.rs", "rank": 54, "score": 27816.306798876063 }, { "content": "\t\t\t\t\tcrh.disc();\n\n\t\t\t\t\tbreak;\n\n\t\t\t\t}\n\n\t\t\t\tSome('\\\\') => {\n\n\t\t\t\t\tlet (escaped, len, escaped_loc) = self.pop_escaped(crh);\n\n\t\t\t\t\tloc += escaped_loc;\n\n\t\t\t\t\tmatch escaped {\n\n\t\t\t\t\t\tOk(escaped_string) => {\n\n\t\t\t\t\t\t\tcontent += &escaped_string;\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tErr(error) => {\n\n\t\t\t\t\t\t\tcontent += \"�\";\n\n\t\t\t\t\t\t\tinvalid_escape_sequence_errors.push((error, offset));\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t}\n\n\t\t\t\t\toffset += len;\n\n\t\t\t\t}\n\n\t\t\t\tSome(ch) => {\n\n\t\t\t\t\tloc += crh.loc();\n\n\t\t\t\t\toffset += 1;\n", "file_path": "sflk-lang/src/tokenizer.rs", "rank": 55, "score": 27816.22183453335 }, { "content": "\t\t\t\tcrh.disc();\n\n\t\t\t\t(Tok::BinOp(BinOp::Minus), loc)\n\n\t\t\t}\n\n\t\t\tSome('*') => {\n\n\t\t\t\tcrh.disc();\n\n\t\t\t\t(Tok::BinOp(BinOp::Star), loc)\n\n\t\t\t}\n\n\t\t\tSome('/') => {\n\n\t\t\t\tcrh.disc();\n\n\t\t\t\t(Tok::BinOp(BinOp::Slash), loc)\n\n\t\t\t}\n\n\t\t\tSome('>') => {\n\n\t\t\t\tcrh.disc();\n\n\t\t\t\t(Tok::BinOp(BinOp::ToRight), loc)\n\n\t\t\t}\n\n\t\t\tSome('(') => {\n\n\t\t\t\tcrh.disc();\n\n\t\t\t\t(Tok::Left(Matched::Paren), loc)\n\n\t\t\t}\n\n\t\t\tSome('[') => {\n", "file_path": "sflk-lang/src/tokenizer.rs", "rank": 56, "score": 27815.91793797817 }, { "content": "\t\tstd::assert_eq!(crh.peek(), Some('\\\\'));\n\n\t\tcrh.disc();\n\n\t\tmatch crh.peek() {\n\n\t\t\tSome('\\n') => {\n\n\t\t\t\tlet loc_end = crh.loc();\n\n\t\t\t\tcrh.disc();\n\n\t\t\t\t(Ok(\"\".to_string()), 2, loc_beg + loc_end)\n\n\t\t\t}\n\n\t\t\tSome('\\\\') => {\n\n\t\t\t\tlet loc_end = crh.loc();\n\n\t\t\t\tcrh.disc();\n\n\t\t\t\t(Ok(\"\\\\\".to_string()), 2, loc_beg + loc_end)\n\n\t\t\t}\n\n\t\t\tSome('\\\"') => {\n\n\t\t\t\tlet loc_end = crh.loc();\n\n\t\t\t\tcrh.disc();\n\n\t\t\t\t(Ok(\"\\\"\".to_string()), 2, loc_beg + loc_end)\n\n\t\t\t}\n\n\t\t\tSome('?') => {\n\n\t\t\t\tlet loc_end = crh.loc();\n", "file_path": "sflk-lang/src/tokenizer.rs", "rank": 57, "score": 27815.69995882744 }, { "content": "\t}\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub enum Tok {\n\n\tName {\n\n\t\tstring: String,\n\n\t\tunstable_warning: bool,\n\n\t},\n\n\tKw(Kw),\n\n\tInteger(String),\n\n\tString {\n\n\t\tcontent: String,\n\n\t\tno_end_quote_warning: bool,\n\n\t\tinvalid_escape_sequence_errors: Vec<(EscapeSequenceError, usize)>,\n\n\t\t// The usize is the `\\` character offset in literal\n\n\t},\n\n\tBinOp(BinOp),\n\n\tLeft(Matched),\n\n\tRight(Matched),\n", "file_path": "sflk-lang/src/tokenizer.rs", "rank": 58, "score": 27815.232894546 }, { "content": "impl CharReadingHead {\n\n\tfn peek(&self) -> Option<char> {\n\n\t\tself.scu.content[self.raw_index..].chars().next()\n\n\t}\n\n\n\n\tfn disc(&mut self) {\n\n\t\tif let Some(ch) = self.peek() {\n\n\t\t\tself.raw_index += ch.len_utf8();\n\n\t\t\tmatch ch {\n\n\t\t\t\t'\\n' => self.line += 1,\n\n\t\t\t\t_ => (),\n\n\t\t\t}\n\n\t\t}\n\n\t}\n\n\n\n\tfn loc(&self) -> Loc {\n\n\t\tLoc {\n\n\t\t\tscu: Rc::clone(&self.scu),\n\n\t\t\tline_start: self.line,\n\n\t\t\traw_index_start: self.raw_index,\n", "file_path": "sflk-lang/src/tokenizer.rs", "rank": 59, "score": 27814.876768108203 }, { "content": "use crate::scu::{Loc, SourceCodeUnit};\n\nuse std::rc::Rc;\n\n\n\n#[derive(Debug)]\n\npub struct CharReadingHead {\n\n\tscu: Rc<SourceCodeUnit>,\n\n\traw_index: usize,\n\n\tline: usize,\n\n}\n\n\n\nimpl CharReadingHead {\n\n\tpub fn from_scu(scu: Rc<SourceCodeUnit>) -> CharReadingHead {\n\n\t\tCharReadingHead {\n\n\t\t\tscu,\n\n\t\t\traw_index: 0,\n\n\t\t\tline: 1,\n\n\t\t}\n\n\t}\n\n}\n\n\n", "file_path": "sflk-lang/src/tokenizer.rs", "rank": 60, "score": 27814.655090966917 }, { "content": "\t\t\t\t\t\t\t)\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t}\n\n\t\t\t\t} else {\n\n\t\t\t\t\treturn (Err(EscapeSequenceError::UnexpectedEof), len, loc);\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t} else {\n\n\t\t\tfor _ in 0..2 {\n\n\t\t\t\tif let Some(ch) = crh.peek() {\n\n\t\t\t\t\tmatch ch.to_digit(base) {\n\n\t\t\t\t\t\tSome(digit) => {\n\n\t\t\t\t\t\t\tloc += crh.loc();\n\n\t\t\t\t\t\t\tlen += 1;\n\n\t\t\t\t\t\t\tcrh.disc();\n\n\t\t\t\t\t\t\tcharacter_code = character_code * base + digit;\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\tNone => {\n\n\t\t\t\t\t\t\treturn (\n\n\t\t\t\t\t\t\t\tErr(EscapeSequenceError::InvalidDigitCharacter(ch)),\n", "file_path": "sflk-lang/src/tokenizer.rs", "rank": 61, "score": 27813.774818396407 }, { "content": "\tStmtBinOp(StmtBinOp),\n\n\tInvalidCharacter(char),\n\n\tComment {\n\n\t\tcontent: String,\n\n\t\tdelimitation_thickness: usize,\n\n\t\tno_end_hash_warning: bool,\n\n\t},\n\n\tEof,\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub enum EscapeSequenceError {\n\n\tInvalidFirstCharacter(char),\n\n\tInvalidDigitCharacter(char),\n\n\tUnexpectedEof,\n\n\tInvalidUnicodeCodePoint(u32),\n\n}\n\n\n\n#[derive(Debug, Clone, PartialEq, Eq)]\n\npub enum Kw {\n", "file_path": "sflk-lang/src/tokenizer.rs", "rank": 62, "score": 27813.463190509785 }, { "content": "\t\t\t\tcrh.disc();\n\n\t\t\t\t(Ok(\"�\".to_string()), 2, loc_beg + loc_end)\n\n\t\t\t}\n\n\t\t\tSome('n') => {\n\n\t\t\t\tlet loc_end = crh.loc();\n\n\t\t\t\tcrh.disc();\n\n\t\t\t\t(Ok(\"\\n\".to_string()), 2, loc_beg + loc_end)\n\n\t\t\t}\n\n\t\t\tSome('t') => {\n\n\t\t\t\tlet loc_end = crh.loc();\n\n\t\t\t\tcrh.disc();\n\n\t\t\t\t(Ok(\"\\t\".to_string()), 2, loc_beg + loc_end)\n\n\t\t\t}\n\n\t\t\tSome('e') => {\n\n\t\t\t\tlet loc_end = crh.loc();\n\n\t\t\t\tcrh.disc();\n\n\t\t\t\t(Ok(\"\\x1b\".to_string()), 2, loc_beg + loc_end)\n\n\t\t\t}\n\n\t\t\tSome('a') => {\n\n\t\t\t\tlet loc_end = crh.loc();\n", "file_path": "sflk-lang/src/tokenizer.rs", "rank": 63, "score": 27811.278924461596 }, { "content": "\t\t\t\tcrh.disc();\n\n\t\t\t\t(Ok(\"\\x07\".to_string()), 2, loc_beg + loc_end)\n\n\t\t\t}\n\n\t\t\tSome('b') => {\n\n\t\t\t\tlet loc_end = crh.loc();\n\n\t\t\t\tcrh.disc();\n\n\t\t\t\t(Ok(\"\\x08\".to_string()), 2, loc_beg + loc_end)\n\n\t\t\t}\n\n\t\t\tSome('v') => {\n\n\t\t\t\tlet loc_end = crh.loc();\n\n\t\t\t\tcrh.disc();\n\n\t\t\t\t(Ok(\"\\x0b\".to_string()), 2, loc_beg + loc_end)\n\n\t\t\t}\n\n\t\t\tSome('f') => {\n\n\t\t\t\tlet loc_end = crh.loc();\n\n\t\t\t\tcrh.disc();\n\n\t\t\t\t(Ok(\"\\x0c\".to_string()), 2, loc_beg + loc_end)\n\n\t\t\t}\n\n\t\t\tSome('r') => {\n\n\t\t\t\tlet loc_end = crh.loc();\n", "file_path": "sflk-lang/src/tokenizer.rs", "rank": 64, "score": 27811.22760325859 }, { "content": "\tNp,\n\n\tPr,\n\n\tNl,\n\n\tDo,\n\n\tDh,\n\n\tFh,\n\n\tEv,\n\n\tIf,\n\n\tTh,\n\n\tEl,\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub enum BinOp {\n\n\tPlus,\n\n\tMinus,\n\n\tStar,\n\n\tSlash,\n\n\tToRight,\n\n}\n", "file_path": "sflk-lang/src/tokenizer.rs", "rank": 65, "score": 27808.408926336295 }, { "content": "\t\t\t\t\t\t\t\tlen,\n\n\t\t\t\t\t\t\t\tloc,\n\n\t\t\t\t\t\t\t)\n\n\t\t\t\t\t\t}\n\n\t\t\t\t\t}\n\n\t\t\t\t} else {\n\n\t\t\t\t\treturn (Err(EscapeSequenceError::UnexpectedEof), len, loc);\n\n\t\t\t\t}\n\n\t\t\t}\n\n\t\t}\n\n\t\tif let Some(ch) = std::char::from_u32(character_code) {\n\n\t\t\t(Ok(ch.to_string()), len, loc)\n\n\t\t} else {\n\n\t\t\t(\n\n\t\t\t\tErr(EscapeSequenceError::InvalidUnicodeCodePoint(character_code)),\n\n\t\t\t\tlen,\n\n\t\t\t\tloc,\n\n\t\t\t)\n\n\t\t}\n\n\t}\n", "file_path": "sflk-lang/src/tokenizer.rs", "rank": 66, "score": 27806.695464761833 }, { "content": "use crate::object::Obj;\n\nuse crate::parser::ParsingWarning;\n\nuse crate::program;\n\nuse crate::scu::Loc;\n\nuse crate::stringtree::StringTree;\n\nuse crate::utils::{escape_string, styles};\n\n\n\n// TODO:\n\n// - move Loc here, or not ?\n\n// - delete Located\n\n// - parser->program must become parser->ast->program\n\n\n\npub struct Node<T> {\n\n\tcontent: T,\n\n\tloc: Loc,\n\n\tcomments: Comments,\n\n\twarnings: Vec<ParsingWarning>,\n\n}\n\n\n\nimpl<T> Node<T> {\n", "file_path": "sflk-lang/src/ast.rs", "rank": 67, "score": 21.25149642764058 }, { "content": "\t\t\t\tel_stmt,\n\n\t\t\t} => program::Stmt::If {\n\n\t\t\t\tcond_expr: cond_expr.content.to_machine_expr(),\n\n\t\t\t\tth_stmt: th_stmt\n\n\t\t\t\t\t.as_ref()\n\n\t\t\t\t\t.map(|stmt| Box::new((*stmt).content.to_machine_stmt())),\n\n\t\t\t\tel_stmt: el_stmt\n\n\t\t\t\t\t.as_ref()\n\n\t\t\t\t\t.map(|stmt| Box::new((*stmt).content.to_machine_stmt())),\n\n\t\t\t},\n\n\t\t\tStmt::Invalid => program::Stmt::Invalid,\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl TargetExpr {\n\n\tfn is_invalid(&self) -> bool {\n\n\t\tmatch self {\n\n\t\t\tTargetExpr::VariableName(_) => false,\n\n\t\t\tTargetExpr::Invalid => true,\n", "file_path": "sflk-lang/src/ast.rs", "rank": 68, "score": 18.627499514713385 }, { "content": "\t\t\t\t.into_iter()\n\n\t\t\t\t.map(|node_comment| node_comment.unwrap()),\n\n\t\t);\n\n\t}\n\n\n\n\tpub fn add_internal_comments(&mut self, comments: Vec<Node<Comment>>) {\n\n\t\tself.comments.internal_comments.extend(\n\n\t\t\tcomments\n\n\t\t\t\t.into_iter()\n\n\t\t\t\t.map(|node_comment| node_comment.unwrap()),\n\n\t\t);\n\n\t}\n\n}\n\n\n\npub struct Comment {\n\n\tcontent: String,\n\n\tdelimitation_thickness: usize,\n\n}\n\n\n\nimpl Comment {\n\n\tpub fn new(content: String, delimitation_thickness: usize) -> Comment {\n\n\t\tComment {\n\n\t\t\tcontent,\n\n\t\t\tdelimitation_thickness,\n\n\t\t}\n\n\t}\n\n}\n\n\n", "file_path": "sflk-lang/src/ast.rs", "rank": 69, "score": 18.60565204967499 }, { "content": "\tpub fn from(content: T, loc: Loc) -> Node<T> {\n\n\t\tNode {\n\n\t\t\tcontent,\n\n\t\t\tloc,\n\n\t\t\tcomments: Comments::new(),\n\n\t\t\twarnings: Vec::new(),\n\n\t\t}\n\n\t}\n\n\n\n\tpub fn unwrap(self) -> T {\n\n\t\tself.content\n\n\t}\n\n\n\n\tpub fn unwrap_ref(&self) -> &T {\n\n\t\t&self.content\n\n\t}\n\n\n\n\tpub fn add_left_comments(&mut self, comments: Vec<Node<Comment>>) {\n\n\t\tself.comments.left_comments.extend(\n\n\t\t\tcomments\n", "file_path": "sflk-lang/src/ast.rs", "rank": 70, "score": 18.425834867469533 }, { "content": "\t\t// TODO:\n\n\t\t// Change the + impl for Loc so that this looks better\n\n\t\t// like seriously wtf is even that\n\n\t\tself.loc = &loc + &self.loc;\n\n\t\tself.loc = self.loc + loc;\n\n\t\tself\n\n\t}\n\n}\n\n\n\nimpl<T> Node<T> {\n\n\tpub fn map<U>(self, func: impl FnOnce(T) -> U) -> Node<U> {\n\n\t\tNode {\n\n\t\t\tcontent: func(self.content),\n\n\t\t\tloc: self.loc,\n\n\t\t\tcomments: self.comments,\n\n\t\t\twarnings: self.warnings,\n\n\t\t}\n\n\t}\n\n}\n\n\n", "file_path": "sflk-lang/src/ast.rs", "rank": 71, "score": 17.329523384835245 }, { "content": "pub struct Program {\n\n\tpub stmts: Vec<Node<Stmt>>,\n\n}\n\n\n\npub enum Stmt {\n\n\tNop,\n\n\tPrint {\n\n\t\texpr: Node<Expr>,\n\n\t},\n\n\tNewline,\n\n\tAssign {\n\n\t\ttarget: Node<TargetExpr>,\n\n\t\texpr: Node<Expr>,\n\n\t},\n\n\tEvaluate {\n\n\t\texpr: Node<Expr>,\n\n\t},\n\n\tDo {\n\n\t\texpr: Node<Expr>,\n\n\t},\n", "file_path": "sflk-lang/src/ast.rs", "rank": 72, "score": 16.507562826738557 }, { "content": "\tfn tree(&self, loc: &Loc) -> StringTree {\n\n\t\tmatch self {\n\n\t\t\tTargetExpr::VariableName(name) => {\n\n\t\t\t\tStringTree::new_leaf(format!(\"target variable {}\", name), styles::NORMAL)\n\n\t\t\t}\n\n\t\t\tTargetExpr::Invalid => StringTree::new_leaf(format!(\"invalid\"), styles::BOLD_LIGHT_RED), // TODO\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl Treeable for Stmt {\n\n\tfn tree(&self, loc: &Loc) -> StringTree {\n\n\t\tmatch self {\n\n\t\t\tStmt::Nop => StringTree::new_leaf(format!(\"nop\"), styles::NORMAL),\n\n\t\t\tStmt::Print { expr } => StringTree::new_node(\n\n\t\t\t\tformat!(\"print\"),\n\n\t\t\t\tstyles::NORMAL,\n\n\t\t\t\tvec![StringTree::from(expr)],\n\n\t\t\t),\n\n\t\t\tStmt::Newline => StringTree::new_leaf(format!(\"newline\"), styles::NORMAL),\n", "file_path": "sflk-lang/src/ast.rs", "rank": 73, "score": 16.28790787072947 }, { "content": "\t\t\t\t.map(|stmt_node| StringTree::from(stmt_node))\n\n\t\t\t\t.collect(),\n\n\t\t)\n\n\t}\n\n}\n\n\n\nimpl Program {\n\n\tpub fn to_machine_block(&self) -> program::Block {\n\n\t\tprogram::Block {\n\n\t\t\tstmts: self\n\n\t\t\t\t.stmts\n\n\t\t\t\t.iter()\n\n\t\t\t\t.map(|stmt_node| stmt_node.content.to_machine_stmt())\n\n\t\t\t\t.collect(),\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl Stmt {\n\n\tfn is_invalid(&self) -> bool {\n", "file_path": "sflk-lang/src/ast.rs", "rank": 74, "score": 15.78319461693825 }, { "content": "\tDoHere {\n\n\t\texpr: Node<Expr>,\n\n\t},\n\n\tDoFileHere {\n\n\t\texpr: Node<Expr>,\n\n\t},\n\n\tIf {\n\n\t\tcond_expr: Node<Expr>,\n\n\t\tth_stmt: Option<Box<Node<Stmt>>>,\n\n\t\tel_stmt: Option<Box<Node<Stmt>>>,\n\n\t},\n\n\tInvalid, // TODO: Add error details\n\n}\n\n\n\npub enum TargetExpr {\n\n\tVariableName(String),\n\n\tInvalid, // TODO: Add error details\n\n}\n\n\n\npub enum Expr {\n", "file_path": "sflk-lang/src/ast.rs", "rank": 75, "score": 15.563889531400697 }, { "content": "\t\t\t\t\t\t.map(|stmt| (*stmt).content.is_invalid())\n\n\t\t\t\t\t\t.unwrap_or(false)\n\n\t\t\t\t\t|| el_stmt\n\n\t\t\t\t\t\t.as_ref()\n\n\t\t\t\t\t\t.map(|stmt| (*stmt).content.is_invalid())\n\n\t\t\t\t\t\t.unwrap_or(false)\n\n\t\t\t},\n\n\t\t\tStmt::Invalid => true,\n\n\t\t}\n\n\t}\n\n\n\n\tfn to_machine_stmt(&self) -> program::Stmt {\n\n\t\tmatch self {\n\n\t\t\tStmt::Nop => program::Stmt::Nop,\n\n\t\t\tStmt::Print { expr } => program::Stmt::Print {\n\n\t\t\t\texpr: expr.content.to_machine_expr(),\n\n\t\t\t},\n\n\t\t\tStmt::Newline => program::Stmt::Newline,\n\n\t\t\tStmt::Assign { target, expr } => program::Stmt::Assign {\n\n\t\t\t\tvarname: match &target.content {\n", "file_path": "sflk-lang/src/ast.rs", "rank": 76, "score": 15.041938362180364 }, { "content": "\t\t\t\tstyles::NORMAL,\n\n\t\t\t\tvec![StringTree::from(expr)],\n\n\t\t\t),\n\n\t\t\tStmt::If {\n\n\t\t\t\tcond_expr,\n\n\t\t\t\tth_stmt,\n\n\t\t\t\tel_stmt,\n\n\t\t\t} => StringTree::new_node(format!(\"if\"), styles::NORMAL, {\n\n\t\t\t\tlet mut vec: Vec<StringTree> = Vec::with_capacity(3);\n\n\t\t\t\tvec.push(StringTree::from(cond_expr));\n\n\t\t\t\tif let Some(stmt) = th_stmt {\n\n\t\t\t\t\tvec.push(StringTree::from(&**stmt));\n\n\t\t\t\t} else {\n\n\t\t\t\t\tvec.push(StringTree::new_leaf(\n\n\t\t\t\t\t\tformat!(\"no then branch\"),\n\n\t\t\t\t\t\tstyles::NORMAL,\n\n\t\t\t\t\t));\n\n\t\t\t\t}\n\n\t\t\t\tif let Some(stmt) = el_stmt {\n\n\t\t\t\t\tvec.push(StringTree::from(&**stmt));\n", "file_path": "sflk-lang/src/ast.rs", "rank": 77, "score": 14.533185505538281 }, { "content": "\t\tmatch self {\n\n\t\t\tExpr::VariableName(varname) => program::Expr::Var {\n\n\t\t\t\tvarname: varname.to_string(),\n\n\t\t\t},\n\n\t\t\tExpr::IntegerLiteral(integer_string) => program::Expr::Const {\n\n\t\t\t\tval: Obj::Integer(str::parse(&integer_string).expect(\"TODO: bigints\")),\n\n\t\t\t},\n\n\t\t\tExpr::StringLiteral(string_string) => program::Expr::Const {\n\n\t\t\t\tval: Obj::String(string_string.clone()),\n\n\t\t\t},\n\n\t\t\tExpr::BlockLiteral(stmts) => program::Expr::Const {\n\n\t\t\t\tval: Obj::Block(program::Block {\n\n\t\t\t\t\tstmts: stmts\n\n\t\t\t\t\t\t.iter()\n\n\t\t\t\t\t\t.map(|stmt_node| stmt_node.content.to_machine_stmt())\n\n\t\t\t\t\t\t.collect(),\n\n\t\t\t\t}),\n\n\t\t\t},\n\n\t\t\tExpr::Chain { init, chops } => program::Expr::Chain {\n\n\t\t\t\tinit_expr: Box::new(init.content.to_machine_expr()),\n", "file_path": "sflk-lang/src/ast.rs", "rank": 78, "score": 14.286218380291292 }, { "content": "\t\t\t\t} else {\n\n\t\t\t\t\tvec.push(StringTree::new_leaf(\n\n\t\t\t\t\t\tformat!(\"no else branch\"),\n\n\t\t\t\t\t\tstyles::NORMAL,\n\n\t\t\t\t\t));\n\n\t\t\t\t}\n\n\t\t\t\tvec\n\n\t\t\t}),\n\n\t\t\tStmt::Invalid => StringTree::new_leaf(format!(\"invalid\"), styles::BOLD_LIGHT_RED), // TODO\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl Treeable for Program {\n\n\tfn tree(&self, loc: &Loc) -> StringTree {\n\n\t\tStringTree::new_node(\n\n\t\t\tformat!(\"program\"),\n\n\t\t\tstyles::CYAN,\n\n\t\t\tself.stmts\n\n\t\t\t\t.iter()\n", "file_path": "sflk-lang/src/ast.rs", "rank": 79, "score": 14.166118611872134 }, { "content": "\t}\n\n\n\n\tfn exec_file_here(&mut self, filename: String) {\n\n\t\tlet scu = std::rc::Rc::new(crate::scu::SourceCodeUnit::from_filename(&filename));\n\n\t\tlet mut tfr =\n\n\t\t\tcrate::parser::TokBuffer::from(crate::tokenizer::CharReadingHead::from_scu(scu));\n\n\n\n\t\tlet mut parser = crate::parser::Parser::new();\n\n\t\tlet ast = parser.parse_program(&mut tfr);\n\n\t\tif let Some(debug_mem) = &mut self.debug_mem_opt {\n\n\t\t\tdebug_mem\n\n\t\t\t\t.log\n\n\t\t\t\t.log_line(String::from(\"Program tree\"), styles::NEGATIVE);\n\n\t\t\tcrate::stringtree::StringTree::from(&ast).print(&mut debug_mem.log);\n\n\t\t}\n\n\t\tlet block_program = ast.unwrap_ref().to_machine_block();\n\n\n\n\t\tself.log_line(String::from(\"Program execution\"), styles::NEGATIVE);\n\n\t\tself.exec_block_here(&block_program);\n\n\t\tself.log_line(String::from(\"Program end\"), styles::NEGATIVE);\n", "file_path": "sflk-lang/src/machine.rs", "rank": 80, "score": 13.927629231935502 }, { "content": "\t\t\tStmt::Assign { target, expr } => StringTree::new_node(\n\n\t\t\t\tformat!(\"assign\"),\n\n\t\t\t\tstyles::NORMAL,\n\n\t\t\t\tvec![StringTree::from(target), StringTree::from(expr)],\n\n\t\t\t),\n\n\t\t\tStmt::Evaluate { expr } => StringTree::new_node(\n\n\t\t\t\tformat!(\"evaluate\"),\n\n\t\t\t\tstyles::NORMAL,\n\n\t\t\t\tvec![StringTree::from(expr)],\n\n\t\t\t),\n\n\t\t\tStmt::Do { expr } => {\n\n\t\t\t\tStringTree::new_node(format!(\"do\"), styles::NORMAL, vec![StringTree::from(expr)])\n\n\t\t\t}\n\n\t\t\tStmt::DoHere { expr } => StringTree::new_node(\n\n\t\t\t\tformat!(\"do here\"),\n\n\t\t\t\tstyles::NORMAL,\n\n\t\t\t\tvec![StringTree::from(expr)],\n\n\t\t\t),\n\n\t\t\tStmt::DoFileHere { expr } => StringTree::new_node(\n\n\t\t\t\tformat!(\"do file here\"),\n", "file_path": "sflk-lang/src/ast.rs", "rank": 81, "score": 13.222098597010243 }, { "content": "\t\t\t\t\t} else {\n\n\t\t\t\t\t\tself.log_line(String::from(\"no then branch\"), styles::NORMAL);\n\n\t\t\t\t\t}\n\n\t\t\t\t} else {\n\n\t\t\t\t\tif let Some(stmt) = el_stmt {\n\n\t\t\t\t\t\tself.log_line(String::from(\"else branch\"), styles::NORMAL);\n\n\t\t\t\t\t\tself.exec_stmt(stmt);\n\n\t\t\t\t\t} else {\n\n\t\t\t\t\t\tself.log_line(String::from(\"no else branch\"), styles::NORMAL);\n\n\t\t\t\t\t}\n\n\t\t\t\t}\n\n\t\t\t\tself.log_deindent();\n\n\t\t\t}\n\n\t\t\tStmt::Invalid => println!(\"TODO: invalid stmt\"),\n\n\t\t}\n\n\t}\n\n\n\n\tfn eval_expr(&mut self, expr: &Expr) -> Obj {\n\n\t\tmatch expr {\n\n\t\t\tExpr::Var { varname } => {\n", "file_path": "sflk-lang/src/machine.rs", "rank": 82, "score": 12.853793494710814 }, { "content": "\t\t\t),\n\n\t\t\tChop::Invalid => StringTree::new_leaf(format!(\"invalid\"), styles::BOLD_LIGHT_RED), // TODO\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl Treeable for Expr {\n\n\tfn tree(&self, loc: &Loc) -> StringTree {\n\n\t\tmatch self {\n\n\t\t\tExpr::VariableName(name) => {\n\n\t\t\t\tStringTree::new_leaf(format!(\"variable {}\", name), styles::NORMAL)\n\n\t\t\t}\n\n\t\t\tExpr::IntegerLiteral(integer) => {\n\n\t\t\t\tStringTree::new_leaf(format!(\"integer {}\", integer), styles::NORMAL)\n\n\t\t\t}\n\n\t\t\tExpr::StringLiteral(string) => StringTree::new_leaf(\n\n\t\t\t\tformat!(\"string \\\"{}\\\"\", escape_string(string, &styles::UNDERLINE)),\n\n\t\t\t\tstyles::NORMAL,\n\n\t\t\t),\n\n\t\t\tExpr::BlockLiteral(stmts) => StringTree::new_node(\n", "file_path": "sflk-lang/src/ast.rs", "rank": 83, "score": 12.573738791871339 }, { "content": "\t\t\t\tformat!(\"block\"),\n\n\t\t\t\tstyles::CYAN,\n\n\t\t\t\tstmts\n\n\t\t\t\t\t.iter()\n\n\t\t\t\t\t.map(|stmt_node| StringTree::from(stmt_node))\n\n\t\t\t\t\t.collect(),\n\n\t\t\t),\n\n\t\t\tExpr::Chain { init, chops } => StringTree::new_node(\n\n\t\t\t\tformat!(\"chain\"),\n\n\t\t\t\tstyles::BLUE,\n\n\t\t\t\tstd::iter::once(StringTree::from(&**init))\n\n\t\t\t\t\t.chain(chops.iter().map(|chop_node| StringTree::from(chop_node)))\n\n\t\t\t\t\t.collect(),\n\n\t\t\t),\n\n\t\t\tExpr::Invalid => StringTree::new_leaf(format!(\"invalid\"), styles::BOLD_LIGHT_RED), // TODO\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl Treeable for TargetExpr {\n", "file_path": "sflk-lang/src/ast.rs", "rank": 84, "score": 12.400811149344856 }, { "content": "\t}\n\n\n\n\tfn exec_stmts_here(&mut self, stmts: &Vec<Stmt>) {\n\n\t\tself.excx_mut(0).i.push(0);\n\n\t\tloop {\n\n\t\t\tif *self.excx(0).i.last().unwrap() >= stmts.len() {\n\n\t\t\t\tbreak;\n\n\t\t\t}\n\n\t\t\tmatch self.excx_mut(0).flow {\n\n\t\t\t\tFlow::Next => (),\n\n\t\t\t\tFlow::Restart => self.excx_mut(0).flow = Flow::Next,\n\n\t\t\t\tFlow::End => break,\n\n\t\t\t}\n\n\t\t\tself.exec_stmt(&stmts[*self.excx(0).i.last().unwrap()]);\n\n\t\t\tmatch self.excx_mut(0).flow {\n\n\t\t\t\tFlow::Next => *self.excx_mut(0).i.last_mut().unwrap() += 1,\n\n\t\t\t\tFlow::Restart => {\n\n\t\t\t\t\t*self.excx_mut(0).i.last_mut().unwrap() = 0;\n\n\t\t\t\t\tself.log_line(String::from(\"restart\"), styles::NORMAL);\n\n\t\t\t\t}\n", "file_path": "sflk-lang/src/machine.rs", "rank": 85, "score": 12.365915961914325 }, { "content": "\n\nimpl Loc {\n\n\tpub fn total_of(scu: Rc<SourceCodeUnit>) -> Loc {\n\n\t\tLoc {\n\n\t\t\tscu: Rc::clone(&scu),\n\n\t\t\tline_start: 1,\n\n\t\t\traw_index_start: 0,\n\n\t\t\traw_length: scu.content.len(),\n\n\t\t}\n\n\t}\n\n}\n\n\n\n#[derive(Debug, Clone)]\n\npub struct Loc {\n\n\tpub scu: Rc<SourceCodeUnit>,\n\n\tpub line_start: usize,\n\n\tpub raw_index_start: usize,\n\n\tpub raw_length: usize,\n\n}\n\n\n", "file_path": "sflk-lang/src/scu.rs", "rank": 86, "score": 11.71453345934022 }, { "content": "impl Loc {\n\n\tpub fn line(&self) -> usize {\n\n\t\tself.line_start\n\n\t}\n\n}\n\n\n\nimpl AddAssign<&Loc> for Loc {\n\n\tfn add_assign(&mut self, right: &Loc) {\n\n\t\tstd::assert_eq!(Rc::as_ptr(&self.scu), Rc::as_ptr(&right.scu));\n\n\t\tstd::assert!(self.line_start <= right.line_start);\n\n\t\tstd::assert!(self.raw_index_start <= right.raw_index_start);\n\n\t\tself.raw_length += (right.raw_index_start - self.raw_index_start) + right.raw_length;\n\n\t}\n\n}\n\n\n\nimpl AddAssign for Loc {\n\n\tfn add_assign(&mut self, right: Loc) {\n\n\t\t*self += &right;\n\n\t}\n\n}\n", "file_path": "sflk-lang/src/scu.rs", "rank": 87, "score": 11.398527608139663 }, { "content": "\t\t\t\t\tTargetExpr::VariableName(varname) => varname.to_string(),\n\n\t\t\t\t\tTargetExpr::Invalid => todo!(),\n\n\t\t\t\t},\n\n\t\t\t\texpr: expr.content.to_machine_expr(),\n\n\t\t\t},\n\n\t\t\tStmt::Evaluate { expr } => program::Stmt::Evaluate {\n\n\t\t\t\texpr: expr.content.to_machine_expr(),\n\n\t\t\t},\n\n\t\t\tStmt::Do { expr } => program::Stmt::Do {\n\n\t\t\t\texpr: expr.content.to_machine_expr(),\n\n\t\t\t},\n\n\t\t\tStmt::DoHere { expr } => program::Stmt::DoHere {\n\n\t\t\t\texpr: expr.content.to_machine_expr(),\n\n\t\t\t},\n\n\t\t\tStmt::DoFileHere { expr } => program::Stmt::DoFileHere {\n\n\t\t\t\texpr: expr.content.to_machine_expr(),\n\n\t\t\t},\n\n\t\t\tStmt::If {\n\n\t\t\t\tcond_expr,\n\n\t\t\t\tth_stmt,\n", "file_path": "sflk-lang/src/ast.rs", "rank": 88, "score": 11.251103046809671 }, { "content": "use crate::utils::{styles, StdoutWriter, Style};\n\n\n\npub struct IndentedLog {\n\n\titems: Vec<Item>,\n\n}\n\n\n\nimpl IndentedLog {\n\n\tpub fn new() -> IndentedLog {\n\n\t\tIndentedLog { items: Vec::new() }\n\n\t}\n\n\n\n\tfn push(&mut self, item: Item) {\n\n\t\tself.items.push(item);\n\n\t}\n\n\n\n\tpub fn indent(&mut self, string: String, is_context: bool, style: Style) {\n\n\t\tassert!(!string.contains(\"\\n\"));\n\n\t\tself.push(Item::IndentAdd {\n\n\t\t\tstring,\n\n\t\t\tindent: Indent { is_context, style },\n", "file_path": "sflk-lang/src/log.rs", "rank": 89, "score": 11.178146376451766 }, { "content": "\t\tmatch self {\n\n\t\t\tStmt::Nop => false,\n\n\t\t\tStmt::Print { expr } => expr.content.is_invalid(),\n\n\t\t\tStmt::Newline => false,\n\n\t\t\tStmt::Assign { target, expr } => {\n\n\t\t\t\ttarget.content.is_invalid() || expr.content.is_invalid()\n\n\t\t\t}\n\n\t\t\tStmt::Evaluate { expr } => expr.content.is_invalid(),\n\n\t\t\tStmt::Do { expr } => expr.content.is_invalid(),\n\n\t\t\tStmt::DoHere { expr } => expr.content.is_invalid(),\n\n\t\t\tStmt::DoFileHere { expr } => expr.content.is_invalid(),\n\n\t\t\t#[rustfmt::skip]\n\n\t\t\tStmt::If {\n\n\t\t\t\tcond_expr,\n\n\t\t\t\tth_stmt,\n\n\t\t\t\tel_stmt,\n\n\t\t\t} => {\n\n\t\t\t\tcond_expr.content.is_invalid()\n\n\t\t\t\t\t|| th_stmt\n\n\t\t\t\t\t\t.as_ref()\n", "file_path": "sflk-lang/src/ast.rs", "rank": 90, "score": 11.135646540032564 }, { "content": "use crate::program::Block;\n\nuse crate::stringtree::StringTree;\n\nuse crate::utils::{escape_string, styles};\n\n\n\n#[derive(Debug, Clone)]\n\npub enum Obj {\n\n\tInteger(isize),\n\n\tString(String),\n\n\tBlock(Block),\n\n\t//Cx(Cx), // Coming soon!\n\n}\n\n\n\nimpl From<&Obj> for StringTree {\n\n\tfn from(obj: &Obj) -> StringTree {\n\n\t\tmatch obj {\n\n\t\t\tObj::Integer(integer) => {\n\n\t\t\t\tStringTree::new_leaf(format!(\"integer {}\", integer), styles::NORMAL)\n\n\t\t\t}\n\n\t\t\tObj::String(string) => StringTree::new_leaf(\n\n\t\t\t\tformat!(\"string \\\"{}\\\"\", escape_string(&string, &styles::UNDERLINE)),\n", "file_path": "sflk-lang/src/object.rs", "rank": 91, "score": 11.100204572833073 }, { "content": "use crate::utils::Style;\n\n\n\npub struct StringTree {\n\n\tstring: String,\n\n\tstyle: Style,\n\n\tsub_trees: Vec<StringTree>,\n\n}\n\n\n\nimpl StringTree {\n\n\tpub fn new_leaf(string: String, style: Style) -> StringTree {\n\n\t\tStringTree {\n\n\t\t\tstring,\n\n\t\t\tstyle,\n\n\t\t\tsub_trees: Vec::new(),\n\n\t\t}\n\n\t}\n\n\n\n\tpub fn new_node(string: String, style: Style, sub_trees: Vec<StringTree>) -> StringTree {\n\n\t\tStringTree {\n\n\t\t\tstring,\n", "file_path": "sflk-lang/src/stringtree.rs", "rank": 92, "score": 10.913378091895183 }, { "content": "\tVariableName(String),\n\n\tIntegerLiteral(String),\n\n\tStringLiteral(String),\n\n\tBlockLiteral(Vec<Node<Stmt>>),\n\n\tChain {\n\n\t\tinit: Box<Node<Expr>>,\n\n\t\tchops: Vec<Node<Chop>>,\n\n\t},\n\n\tInvalid, // TODO: Add error details\n\n}\n\n\n\npub enum Chop {\n\n\tPlus(Node<Expr>),\n\n\tMinus(Node<Expr>),\n\n\tStar(Node<Expr>),\n\n\tSlash(Node<Expr>),\n\n\tToRight(Node<Expr>),\n\n\tInvalid, // TODO: Add error details\n\n}\n\n\n", "file_path": "sflk-lang/src/ast.rs", "rank": 93, "score": 10.854906774225594 }, { "content": "use crate::log::IndentedLog;\n\nuse crate::object::Obj;\n\nuse crate::program::{Block, Chop, Expr, Stmt};\n\nuse crate::utils::{styles, Style};\n\nuse std::collections::HashMap;\n\n\n\n#[derive(Debug, Clone)]\n", "file_path": "sflk-lang/src/machine.rs", "rank": 94, "score": 10.779372925826975 }, { "content": "\n\nimpl AddAssign<&Loc> for &mut Loc {\n\n\tfn add_assign(&mut self, right: &Loc) {\n\n\t\t**self += right;\n\n\t}\n\n}\n\n\n\nimpl Add for Loc {\n\n\ttype Output = Loc;\n\n\tfn add(mut self, right: Loc) -> Loc {\n\n\t\tself += right;\n\n\t\tself\n\n\t}\n\n}\n\n\n\nimpl Add for &Loc {\n\n\ttype Output = Loc;\n\n\tfn add(self, right: &Loc) -> Loc {\n\n\t\tlet mut loc = self.clone();\n\n\t\tloc += right;\n\n\t\tloc\n\n\t}\n\n}\n", "file_path": "sflk-lang/src/scu.rs", "rank": 95, "score": 10.661387908587852 }, { "content": "\t\t\t),\n\n\t\t\tChop::Minus(expr_node) => StringTree::new_node(\n\n\t\t\t\tformat!(\"chop minus\"),\n\n\t\t\t\tstyles::NORMAL,\n\n\t\t\t\tvec![StringTree::from(expr_node)],\n\n\t\t\t),\n\n\t\t\tChop::Star(expr_node) => StringTree::new_node(\n\n\t\t\t\tformat!(\"chop star\"),\n\n\t\t\t\tstyles::NORMAL,\n\n\t\t\t\tvec![StringTree::from(expr_node)],\n\n\t\t\t),\n\n\t\t\tChop::Slash(expr_node) => StringTree::new_node(\n\n\t\t\t\tformat!(\"chop slash\"),\n\n\t\t\t\tstyles::NORMAL,\n\n\t\t\t\tvec![StringTree::from(expr_node)],\n\n\t\t\t),\n\n\t\t\tChop::ToRight(expr_node) => StringTree::new_node(\n\n\t\t\t\tformat!(\"chop to_right\"),\n\n\t\t\t\tstyles::NORMAL,\n\n\t\t\t\tvec![StringTree::from(expr_node)],\n", "file_path": "sflk-lang/src/ast.rs", "rank": 96, "score": 10.306165984594742 }, { "content": "\n\npub struct StdoutWriter;\n\n\n\nimpl std::fmt::Write for StdoutWriter {\n\n\tfn write_str(&mut self, string: &str) -> Result<(), std::fmt::Error> {\n\n\t\tprint!(\"{}\", string);\n\n\t\tOk(())\n\n\t}\n\n}\n\n\n\nimpl StdoutWriter {\n\n\tpub fn new() -> StdoutWriter {\n\n\t\tStdoutWriter {}\n\n\t}\n\n}\n", "file_path": "sflk-lang/src/utils.rs", "rank": 97, "score": 10.152260653355757 }, { "content": "\t\t\ttrue\n\n\t\t} else {\n\n\t\t\tfalse\n\n\t\t}\n\n\t}\n\n\n\n\tfn varget(&self, varname: &str) -> &Obj {\n\n\t\tself.excx(0)\n\n\t\t\t.cx\n\n\t\t\t.varmap\n\n\t\t\t.get(varname)\n\n\t\t\t.expect(format!(\"get variable {} but it doesn't exist\", varname).as_str())\n\n\t}\n\n}\n\n\n\nimpl Mem {\n\n\tpub fn exec_file(&mut self, filename: String) {\n\n\t\tself.push_excx(ExCx::new());\n\n\t\tself.exec_file_here(filename);\n\n\t\tself.pop_excx();\n", "file_path": "sflk-lang/src/machine.rs", "rank": 98, "score": 9.994467693784266 }, { "content": "\tpub debug_mem_opt: Option<DebugMem>,\n\n}\n\n\n\nimpl Mem {\n\n\tpub fn new(is_debug_mode: bool) -> Mem {\n\n\t\tMem {\n\n\t\t\texcx_stack: Vec::new(),\n\n\t\t\tdebug_mem_opt: if is_debug_mode {\n\n\t\t\t\tSome(DebugMem::new())\n\n\t\t\t} else {\n\n\t\t\t\tNone\n\n\t\t\t},\n\n\t\t}\n\n\t}\n\n}\n\n\n\nimpl DebugMem {\n\n\tfn log_indent(&mut self, string: String, is_context: bool, style: Style) {\n\n\t\tself.log.indent(string, is_context, style);\n\n\t}\n", "file_path": "sflk-lang/src/machine.rs", "rank": 99, "score": 9.916263903125083 } ]
Rust
cubespin/src/mcube.rs
setekhid/tastes
923c3ececc02a0f6282b7507a861a4ed68b19314
#[derive(Hash, Eq, PartialEq, Debug)] pub struct StatT(pub [[i8; 9]; 6]); pub fn print(stat: &StatT) { let StatT(s) = *stat; println!(" {} {} {}", s[0][0], s[0][1], s[0][2]); println!(" {} {} {}", s[0][3], s[0][4], s[0][5]); println!(" {} {} {}", s[0][6], s[0][7], s[0][8]); println!(""); println!("{} {} {} {} {} {} {} {} {} {} {} {}", s[1][0], s[1][1], s[1][2], s[2][0], s[2][1], s[2][2], s[3][0], s[3][1], s[3][2], s[4][0], s[4][1], s[4][2]); println!("{} {} {} {} {} {} {} {} {} {} {} {}", s[1][3], s[1][4], s[1][5], s[2][3], s[2][4], s[2][5], s[3][3], s[3][4], s[3][5], s[4][3], s[4][4], s[4][5]); println!("{} {} {} {} {} {} {} {} {} {} {} {}", s[1][6], s[1][7], s[1][8], s[2][6], s[2][7], s[2][8], s[3][6], s[3][7], s[3][8], s[4][6], s[4][7], s[4][8]); println!(""); println!(" {} {} {}", s[5][0], s[5][1], s[5][2]); println!(" {} {} {}", s[5][3], s[5][4], s[5][5]); println!(" {} {} {}", s[5][6], s[5][7], s[5][8]); } pub fn spin(stat: StatT, op: i8) -> StatT { match op { 0 => stat, 1 => spin1(stat), -1 => spin1(spin1(spin1(stat))), 2 => spin2(stat), -2 => spin2(spin2(spin2(stat))), 3 => spin3(stat), -3 => spin3(spin3(spin3(stat))), 4 => spin4(stat), -4 => spin4(spin4(spin4(stat))), 5 => spin5(stat), -5 => spin5(spin5(spin5(stat))), 6 => spin6(stat), -6 => spin6(spin6(spin6(stat))), _ => stat } } fn swap_cells(stat: StatT, f1: usize, c1: usize, f2: usize, c2: usize, f3: usize, c3: usize, f4: usize, c4: usize) -> StatT { let StatT(mut s) = stat; { let swap = s[f4-1][c4]; s[f4-1][c4] = s[f3-1][c3]; s[f3-1][c3] = s[f2-1][c2]; s[f2-1][c2] = s[f1-1][c1]; s[f1-1][c1] = swap; } return StatT(s); } fn swap_arris(stat: StatT, f1: usize, a1: [usize; 3], f2: usize, a2: [usize; 3], f3: usize, a3: [usize; 3], f4: usize, a4: [usize; 3]) -> StatT { let mut s = stat; for ci in 0..3 { s = swap_cells(s, f1, a1[ci], f2, a2[ci], f3, a3[ci], f4, a4[ci]); } return s; } fn spin_face(stat: StatT, fi: usize) -> StatT { let mut s = stat; s = swap_cells(s, fi, 1, fi, 5, fi, 7, fi, 3); s = swap_cells(s, fi, 0, fi, 2, fi, 8, fi, 6); return s; } fn spin1(stat: StatT) -> StatT { swap_arris(spin_face(stat, 1), 5, [2, 1, 0], 4, [2, 1, 0], 3, [2, 1, 0], 2, [2, 1, 0]) } fn spin2(stat: StatT) -> StatT { swap_arris(spin_face(stat, 2), 1, [0, 3, 6], 3, [0, 3, 6], 6, [0, 3, 6], 5, [8, 5, 2]) } fn spin3(stat: StatT) -> StatT { swap_arris(spin_face(stat, 3), 1, [6, 7, 8], 4, [0, 3, 6], 6, [2, 1, 0], 2, [8, 5, 2]) } fn spin4(stat: StatT) -> StatT { swap_arris(spin_face(stat, 4), 1, [8, 5, 2], 5, [0, 3, 6], 6, [8, 5, 2], 3, [8, 5, 2]) } fn spin5(stat: StatT) -> StatT { swap_arris(spin_face(stat, 5), 1, [2, 1, 0], 2, [0, 3, 6], 6, [6, 7, 8], 4, [8, 5, 2]) } fn spin6(stat: StatT) -> StatT { swap_arris(spin_face(stat, 6), 3, [6, 7, 8], 4, [6, 7, 8], 5, [6, 7, 8], 2, [6, 7, 8]) } use std::collections::HashMap; use std::rc::Rc; struct BfsNode(Option<Rc<BfsNode>>, i8); pub fn autospin(begin_stat: &StatT, end_stat: &StatT) -> Vec<i8> { let mut lleaves: HashMap<StatT, Rc<BfsNode>> = HashMap::new(); let mut rleaves: HashMap<StatT, Rc<BfsNode>> = HashMap::new(); lleaves.insert(clone(begin_stat), Rc::new(BfsNode(None, 0))); rleaves.insert(clone(end_stat), Rc::new(BfsNode(None, 0))); match link_trees(&lleaves, &rleaves, check_linkage(&lleaves, &rleaves)) { Some(result) => return result, None => () } for _ in 0..16 { lleaves = expand_leaves(lleaves); match link_trees(&lleaves, &rleaves, check_linkage(&lleaves, &rleaves)) { Some(result) => return result, None => () } rleaves = expand_leaves(rleaves); match link_trees(&lleaves, &rleaves, check_linkage(&lleaves, &rleaves)) { Some(result) => return result, None => () } } return vec![0]; } fn clone(stat: &StatT) -> StatT { StatT(stat.0.clone()) } fn expand_leaves(leaves: HashMap<StatT, Rc<BfsNode>>) -> HashMap<StatT, Rc<BfsNode>> { let mut fresh: HashMap<StatT, Rc<BfsNode>> = HashMap::new(); for (stat, steps) in leaves { for step_op in 1..7 { fresh.insert(spin(clone(&stat), step_op), Rc::new(BfsNode(Some(steps.clone()), step_op))); fresh.insert(spin(clone(&stat), -step_op), Rc::new(BfsNode(Some(steps.clone()), -step_op))); } } return fresh; } fn check_linkage<'a>(lleaves: &'a HashMap<StatT, Rc<BfsNode>>, rleaves: &'a HashMap<StatT, Rc<BfsNode>>) -> Option<&'a StatT> { for stat in lleaves.keys() { if rleaves.contains_key(stat) { return Some(stat); } } return None; } fn link_trees(lleaves: &HashMap<StatT, Rc<BfsNode>>, rleaves: &HashMap<StatT, Rc<BfsNode>>, linkage: Option<&StatT>) -> Option<Vec<i8>> { match linkage { Some(key_stat) => { let mut llist = bfstep2list_hm(lleaves.get(key_stat)); let mut rlist = bfstep2list_hm(rleaves.get(key_stat)); rlist.reverse(); for rlist_step in rlist { llist.push(-rlist_step); } Some(llist) }, None => None } } fn bfstep2list_hm(steps: Option<&Rc<BfsNode>>) -> Vec<i8> { match steps { Some(thing) => bfstep2list(Some(thing.clone())), None => bfstep2list(None) } } fn bfstep2list(steps: Option<Rc<BfsNode>>) -> Vec<i8> { match steps { Some(substeps) => { let mut step_list = bfstep2list(substeps.0.clone()); step_list.push(substeps.1); step_list }, None => Vec::new() } }
#[derive(Hash, Eq, PartialEq, Debug)] pub struct StatT(pub [[i8; 9]; 6]); pub fn print(stat: &StatT) { let StatT(s) = *stat; println!(" {} {} {}", s[0][0], s[0][1], s[0][2]); println!(" {} {} {}", s[0][3], s[0][4], s[0][5]); println!(" {} {} {}", s[0][6], s[0][7], s[0][8]); println!(""); println!("{} {} {} {} {} {} {} {} {} {} {} {}", s[1][0], s[1][1], s[1][2], s[2][0], s[2][1], s[2][2], s[3][0], s[3][1], s[3][2], s[4][0], s[4][1], s[4][2]); println!("{} {} {} {} {} {} {} {} {} {} {} {}", s[1][3], s[1][4], s[1][5], s[2][3], s[2][4], s[2][5], s[3][3], s[3][4], s[3][5], s[4][3], s[4][4], s[4][5]); println!("{} {} {} {} {} {} {} {} {} {} {} {}", s[1][6], s[1][7], s[1][8], s[2][6], s[2][7], s[2][8], s[3][6], s[3][7], s[3][8], s[4][6], s[4][7], s[4][8]); println!(""); println!(" {} {} {}", s[5][0], s[5][1], s[5][2]); println!(" {} {} {}", s[5][3], s[5][4], s[5][5]); println!(" {} {} {}", s[5][6], s[5][7], s[5][8]); } pub fn spin(stat: StatT, op: i8) -> StatT { match op { 0 => stat, 1 => spin1(stat), -1 => spin1(spin1(spin1(stat))), 2 => spin2(stat), -2 => spin2(spin2(spin2(stat))), 3 => spin3(stat), -3 => spin3(spin3(spin3(stat))), 4 => spin4(stat), -4 => spin4(spin4(spin4(stat))), 5 => spin5(stat), -5 => spin5(spin5(spin5(stat))), 6 => spin6(stat), -6 => spin6(spin6(spin6(stat))), _ => stat } } fn swap_cells(stat: StatT, f1: usize, c1: usize, f2: usize, c2: usize, f3: usize, c3: usize, f4: usize, c4: usize) -> StatT { let StatT(mut s) = stat; { let swap = s[f4-1][c4]; s[f4-1][c4] = s[f3-1][c3]; s[f3-1][c3] = s[f2-1][c2]; s[f2-1][c2] = s[f1-1][c1]; s[f1-1][c1] = swap; } return StatT(s); } fn swap_arris(stat: StatT, f1: usize, a1: [usize; 3], f2: usize, a2: [usize; 3], f3: usize, a3: [usize; 3], f4: usize, a4: [usize; 3]) -> StatT { let mut s = stat; for ci in 0..3 { s = swap_cells(s, f1, a1[ci], f2, a2[ci], f3, a3[ci], f4, a4[ci]); } return s; } fn spin_face(stat: StatT, fi: usize) -> StatT { let mut s = stat; s = swap_cells(s, fi, 1, fi, 5, fi, 7, fi, 3); s = swap_cells(s, fi, 0, fi, 2, fi, 8, fi, 6); return s; } fn spin1(stat: StatT) -> StatT { swap_arris(spin_face(stat, 1), 5, [2, 1, 0], 4, [2, 1, 0], 3, [2, 1, 0], 2, [2, 1, 0]) } fn spin2(stat: StatT) -> StatT { swap_arris(spin_face(stat, 2), 1, [0, 3, 6], 3, [0, 3, 6], 6, [0, 3, 6], 5, [8, 5, 2]) } fn spin3(stat: StatT) -> StatT { swap_arris(spin_face(stat, 3), 1, [6, 7, 8], 4, [0, 3, 6], 6, [2, 1, 0], 2, [8, 5, 2]) } fn spin4(stat: StatT) -> StatT { swap_arris(spin_face(stat, 4), 1, [8, 5, 2], 5, [0, 3, 6], 6, [8, 5, 2], 3, [8, 5, 2]) } fn spin5(stat: StatT) -> StatT { swap_arris(spin_face(stat, 5), 1, [2, 1, 0], 2, [0, 3, 6], 6, [6, 7, 8], 4, [8, 5, 2]) } fn spin6(stat: StatT) -> StatT { swap_arris(spin_face(stat, 6), 3, [6, 7, 8], 4, [6, 7, 8], 5, [6, 7, 8], 2, [6, 7, 8]) } use std::collections::HashMap; use std::rc::Rc; struct BfsNode(Option<Rc<BfsNode>>, i8); pub fn autospin(begin_stat: &StatT, end_stat: &StatT) -> Vec<i8> { let mut lleaves: HashMap<StatT, Rc<BfsNode>> = HashMap::new(); let mut rleaves: HashMap<StatT, Rc<BfsNode>> = HashMap::new(); lleaves.insert(clone(begin_stat), Rc::new(BfsNode(None, 0))); rleaves.insert(clone(end_stat), Rc::new(BfsNode(None, 0))); match link_trees(&lleaves, &rleaves, check_linkage(&lleaves, &rleaves)) {
); rlist.reverse(); for rlist_step in rlist { llist.push(-rlist_step); } Some(llist) }, None => None } } fn bfstep2list_hm(steps: Option<&Rc<BfsNode>>) -> Vec<i8> { match steps { Some(thing) => bfstep2list(Some(thing.clone())), None => bfstep2list(None) } } fn bfstep2list(steps: Option<Rc<BfsNode>>) -> Vec<i8> { match steps { Some(substeps) => { let mut step_list = bfstep2list(substeps.0.clone()); step_list.push(substeps.1); step_list }, None => Vec::new() } }
Some(result) => return result, None => () } for _ in 0..16 { lleaves = expand_leaves(lleaves); match link_trees(&lleaves, &rleaves, check_linkage(&lleaves, &rleaves)) { Some(result) => return result, None => () } rleaves = expand_leaves(rleaves); match link_trees(&lleaves, &rleaves, check_linkage(&lleaves, &rleaves)) { Some(result) => return result, None => () } } return vec![0]; } fn clone(stat: &StatT) -> StatT { StatT(stat.0.clone()) } fn expand_leaves(leaves: HashMap<StatT, Rc<BfsNode>>) -> HashMap<StatT, Rc<BfsNode>> { let mut fresh: HashMap<StatT, Rc<BfsNode>> = HashMap::new(); for (stat, steps) in leaves { for step_op in 1..7 { fresh.insert(spin(clone(&stat), step_op), Rc::new(BfsNode(Some(steps.clone()), step_op))); fresh.insert(spin(clone(&stat), -step_op), Rc::new(BfsNode(Some(steps.clone()), -step_op))); } } return fresh; } fn check_linkage<'a>(lleaves: &'a HashMap<StatT, Rc<BfsNode>>, rleaves: &'a HashMap<StatT, Rc<BfsNode>>) -> Option<&'a StatT> { for stat in lleaves.keys() { if rleaves.contains_key(stat) { return Some(stat); } } return None; } fn link_trees(lleaves: &HashMap<StatT, Rc<BfsNode>>, rleaves: &HashMap<StatT, Rc<BfsNode>>, linkage: Option<&StatT>) -> Option<Vec<i8>> { match linkage { Some(key_stat) => { let mut llist = bfstep2list_hm(lleaves.get(key_stat)); let mut rlist = bfstep2list_hm(rleaves.get(key_stat)
random
[ { "content": "fn mcube_mix(stat: mcube::StatT, steps: &[i8]) -> mcube::StatT {\n\n let mut s = stat;\n\n for st in steps {\n\n s = mcube::spin(s, *st);\n\n }\n\n return s;\n\n}\n", "file_path": "cubespin/src/main.rs", "rank": 6, "score": 107929.81570244649 }, { "content": "fn main() {\n\n let end_stat = mcube::StatT(\n\n [\n\n [\n\n 1, 1, 1,\n\n 1, 1, 1,\n\n 1, 1, 1\n\n ],\n\n [\n\n 2, 2, 2,\n\n 2, 2, 2,\n\n 2, 2, 2\n\n ],\n\n [\n\n 3, 3, 3,\n\n 3, 3, 3,\n\n 3, 3, 3\n\n ],\n\n [\n\n 4, 4, 4,\n", "file_path": "cubespin/src/main.rs", "rank": 20, "score": 41126.64519400487 }, { "content": "func calc_pub(ip1, ip2 uint32) ipv4_mask {\n\n\tif ip1 == ip2 {\n\n\t\treturn ipv4_mask{}\n\n}\n\n\n", "file_path": "locipv4/locipv4.go", "rank": 21, "score": 21051.43733601102 }, { "content": " 1, 6, 2\n\n ]\n\n ]\n\n );\n\n\n\n let mut curr_stat = mcube::StatT(begin_stat.0.clone());\n\n mcube::print(&curr_stat);\n\n\n\n let solutions = mcube::autospin(&begin_stat, &end_stat);\n\n println!(\"Auto spinning steps:\");\n\n\n\n for s in solutions {\n\n println!(\"{}\", s);\n\n curr_stat = mcube::spin(curr_stat, s);\n\n mcube::print(&curr_stat);\n\n }\n\n}\n\n\n", "file_path": "cubespin/src/main.rs", "rank": 23, "score": 3.9209995544870226 }, { "content": " 4, 4, 4,\n\n 4, 4, 4\n\n ],\n\n [\n\n 5, 5, 5,\n\n 5, 5, 5,\n\n 5, 5, 5\n\n ],\n\n [\n\n 6, 6, 6,\n\n 6, 6, 6,\n\n 6, 6, 6\n\n ]\n\n ]\n\n );\n\n let begin_stat = mcube_mix(mcube::StatT(end_stat.0.clone()),\n\n &[\n\n -1,\n\n 5,\n\n 6,\n", "file_path": "cubespin/src/main.rs", "rank": 24, "score": 1.886165301066986 }, { "content": " -3,\n\n 6,\n\n -3,\n\n 6,\n\n -2,\n\n 1,\n\n -4,\n\n -6\n\n ][..]\n\n );\n\n let _ = mcube::StatT(\n\n [\n\n [\n\n 5, 5, 4,\n\n 3, 1, 1,\n\n 2, 3, 1\n\n ],\n\n [\n\n 2, 4, 6,\n\n 3, 2, 6,\n", "file_path": "cubespin/src/main.rs", "rank": 25, "score": 1.6306973425680398 }, { "content": "// Copyright (c) 2015, Huitse Tai. All rights reserved.\n\n// Use of this source code is governed by a BSD-style license\n\n// that can be found in the LICENSE file.\n\n\n\n\n\nmod mcube;\n\n\n", "file_path": "cubespin/src/main.rs", "rank": 26, "score": 1.0933988149163516 } ]
Rust
src/sp_lib/datastore/history.rs
jrtabash/stock_portfolio
84dcb73dee43152b323159e780333d36ee23bfe3
use std::error::Error; use crate::util::datetime; use crate::util::datetime::LocalDate; use crate::util::price_type::PriceType; use crate::datastore::datastore::DataStore; pub type Price = PriceType; #[inline(always)] pub fn tag() -> &'static str { &"history" } pub struct HistoryEntry { pub date: LocalDate, pub open: Price, pub high: Price, pub low: Price, pub close: Price, pub adj_close: Price, pub volume: u64 } impl HistoryEntry { pub fn new(date: LocalDate, open: Price, high: Price, low: Price, close: Price, adj_close: Price, volume: u64) -> Self { HistoryEntry { date: date, open: open, high: high, low: low, close: close, adj_close: adj_close, volume: volume } } pub fn parse_csv(csv: &str) -> Result<Self, Box<dyn Error>> { let values: Vec<&str> = csv.split(',').map(|field| field.trim()).collect(); if values.len() == HistoryEntry::number_of_fields() { Ok(HistoryEntry { date: datetime::parse_date(&values[0])?, open: values[1].parse::<Price>()?, high: values[2].parse::<Price>()?, low: values[3].parse::<Price>()?, close: values[4].parse::<Price>()?, adj_close: values[5].parse::<Price>()?, volume: values[6].parse::<u64>()? }) } else { Err(format!("HistoryEntry: Invalid csv data length={} expected=7", values.len()).into()) } } #[inline(always)] pub fn number_of_fields() -> usize { return 7 } } pub struct History { symbol: String, entries: Vec<HistoryEntry> } impl History { pub fn new(symbol: &str) -> Self { History { symbol: String::from(symbol), entries: Vec::new() } } pub fn parse_csv(symbol: &str, csv: &str) -> Result<Self, Box<dyn Error>> { let mut hist = History::new(symbol); for line in csv.split('\n') { if line.is_empty() || line.starts_with(char::is_alphabetic) { continue; } hist.entries.push(HistoryEntry::parse_csv(line)?); } Ok(hist) } pub fn parse_filter_csv(symbol: &str, csv: &str, pred: impl Fn(&HistoryEntry) -> bool) -> Result<Self, Box<dyn Error>> { let mut hist = History::new(symbol); for line in csv.split('\n') { if line.is_empty() || line.starts_with(char::is_alphabetic) { continue; } let entry = HistoryEntry::parse_csv(line)?; if pred(&entry) { hist.entries.push(entry); } } Ok(hist) } pub fn ds_select_all(ds: &DataStore, symbol: &str) -> Result<Self, Box<dyn Error>> { let content = ds.select_symbol(tag(), symbol)?; History::parse_csv(symbol, &content) } pub fn ds_select_if(ds: &DataStore, symbol: &str, pred: impl Fn(&HistoryEntry) -> bool) -> Result<Self, Box<dyn Error>> { let content = ds.select_symbol(tag(), symbol)?; History::parse_filter_csv(symbol, &content, pred) } pub fn ds_select_last(ds: &DataStore, symbol: &str) -> Result<Self, Box<dyn Error>> { let content = ds.select_last(tag(), symbol)?; History::parse_csv(symbol, &content) } pub fn check_csv(csv: &str) -> Result<(), Box<dyn Error>> { let hist = History::parse_csv("history_check", csv)?; let cnt = hist.count(); if cnt > 0 { let entries = hist.entries; let mut last_date = entries[0].date; for i in 1..cnt { let curr_date = entries[i].date; datetime::check_dup_or_back_gap(&last_date, &curr_date)?; last_date = curr_date; } } Ok(()) } #[inline(always)] pub fn symbol(&self) -> &str { &self.symbol } #[inline(always)] pub fn entries(&self) -> &Vec<HistoryEntry> { &self.entries } #[inline(always)] pub fn count(&self) -> usize { self.entries.len() } } #[cfg(test)] mod tests { use super::*; #[test] fn test_history_entry() { let csv = "2021-02-25,26.1,31.0,22.0,24.0,24.0,9000"; let entry = HistoryEntry::parse_csv(&csv).unwrap(); check_entry(&entry, datetime::make_date(2021, 2, 25), 26.1, 31.0, 22.0, 24.0, 24.0, 9000); } #[test] fn test_history_entry_with_whitespace() { let csv = "2021-02-25, 26.1,31.0 ,22.0, 24.0 ,24.0,9000\n"; let entry = HistoryEntry::parse_csv(&csv).unwrap(); check_entry(&entry, datetime::make_date(2021, 2, 25), 26.1, 31.0, 22.0, 24.0, 24.0, 9000); } #[test] fn test_history_entry_error() { let csv = "2021-02-25,26.1,31.0,22.0,24.0,24.0"; assert!(HistoryEntry::parse_csv(&csv).is_err()); let csv = "2021-02-25,26.1,31.0,22.0,24.0,24.0,9000,123"; assert!(HistoryEntry::parse_csv(&csv).is_err()); } #[test] fn test_history_parse_csv() { let csv = "2021-02-24,25.0,30.0,20.0,26.0,26.0,10000\n\ 2021-02-25,26.1,31.0,22.0,24.0,24.0,9000\n\ 2021-02-26,24.9,32.0,24.0,28.0,28.0,11000"; let hist = History::parse_csv("AAPL", &csv).unwrap(); assert_eq!(hist.symbol(), "AAPL"); assert_eq!(hist.count(), 3); let entries = hist.entries(); check_entry(&entries[0], datetime::make_date(2021, 2, 24), 25.0, 30.0, 20.0, 26.0, 26.0, 10000); check_entry(&entries[1], datetime::make_date(2021, 2, 25), 26.1, 31.0, 22.0, 24.0, 24.0, 9000); check_entry(&entries[2], datetime::make_date(2021, 2, 26), 24.9, 32.0, 24.0, 28.0, 28.0, 11000); } #[test] fn test_history_parse_csv_with_header() { let csv = "Date,Open,High,Low,Close,Adj Close,Volume\n\ 2021-02-24,25.0,30.0,20.0,26.0,26.0,10000\n\ 2021-02-25,26.1,31.0,22.0,24.0,24.0,9000\n\ 2021-02-26,24.9,32.0,24.0,28.0,28.0,11000"; let hist = History::parse_csv("AAPL", &csv).unwrap(); assert_eq!(hist.symbol(), "AAPL"); assert_eq!(hist.count(), 3); let entries = hist.entries(); check_entry(&entries[0], datetime::make_date(2021, 2, 24), 25.0, 30.0, 20.0, 26.0, 26.0, 10000); check_entry(&entries[1], datetime::make_date(2021, 2, 25), 26.1, 31.0, 22.0, 24.0, 24.0, 9000); check_entry(&entries[2], datetime::make_date(2021, 2, 26), 24.9, 32.0, 24.0, 28.0, 28.0, 11000); } #[test] fn test_history_parse_csv_with_empty_lines() { let csv = "\n\ 2021-02-24,25.0,30.0,20.0,26.0,26.0,10000\n\ \n\ 2021-02-25,26.1,31.0,22.0,24.0,24.0,9000\n\ \n\ 2021-02-26,24.9,32.0,24.0,28.0,28.0,11000\n\ \n"; let hist = History::parse_csv("AAPL", &csv).unwrap(); assert_eq!(hist.symbol(), "AAPL"); assert_eq!(hist.count(), 3); let entries = hist.entries(); check_entry(&entries[0], datetime::make_date(2021, 2, 24), 25.0, 30.0, 20.0, 26.0, 26.0, 10000); check_entry(&entries[1], datetime::make_date(2021, 2, 25), 26.1, 31.0, 22.0, 24.0, 24.0, 9000); check_entry(&entries[2], datetime::make_date(2021, 2, 26), 24.9, 32.0, 24.0, 28.0, 28.0, 11000); } #[test] fn test_history_parse_filter_csv() { let csv = "2021-02-24,25.0,30.0,20.0,26.0,26.0,10000\n\ 2021-02-25,26.1,31.0,22.0,24.0,24.0,9000\n\ 2021-02-26,24.9,32.0,24.0,28.0,28.0,11000"; let hist = History::parse_filter_csv("AAPL", &csv, |entry| entry.date > datetime::make_date(2021, 2, 24)).unwrap(); assert_eq!(hist.symbol(), "AAPL"); assert_eq!(hist.count(), 2); let entries = hist.entries(); check_entry(&entries[0], datetime::make_date(2021, 2, 25), 26.1, 31.0, 22.0, 24.0, 24.0, 9000); check_entry(&entries[1], datetime::make_date(2021, 2, 26), 24.9, 32.0, 24.0, 28.0, 28.0, 11000); } #[test] fn test_history_parse_filter_csv_with_header() { let csv = "Date,Open,High,Low,Close,Adj Close,Volume\n\ 2021-02-24,25.0,30.0,20.0,26.0,26.0,10000\n\ 2021-02-25,26.1,31.0,22.0,24.0,24.0,9000\n\ 2021-02-26,24.9,32.0,24.0,28.0,28.0,11000"; let hist = History::parse_filter_csv("AAPL", &csv, |entry| entry.date > datetime::make_date(2021, 2, 24)).unwrap(); assert_eq!(hist.symbol(), "AAPL"); assert_eq!(hist.count(), 2); let entries = hist.entries(); check_entry(&entries[0], datetime::make_date(2021, 2, 25), 26.1, 31.0, 22.0, 24.0, 24.0, 9000); check_entry(&entries[1], datetime::make_date(2021, 2, 26), 24.9, 32.0, 24.0, 28.0, 28.0, 11000); } #[test] fn test_check_csv() { let csv = "2021-02-24,25.0,30.0,20.0,26.0,26.0,10000\n\ 2021-02-25,26.1,31.0,22.0,24.0,24.0,9000\n\ 2021-02-26,24.9,32.0,24.0,28.0,28.0,11000"; assert!(History::check_csv(&csv).is_ok()); let csv = "2021-02-24,25.0,30.0,20.0,26.0,26.0,10000\n\ 2021-02-24,26.1,31.0,22.0,24.0,24.0,9000\n\ 2021-02-26,24.9,32.0,24.0,28.0,28.0,11000"; match History::check_csv(&csv) { Ok(_) => assert!(false), Err(err) => assert_eq!(&format!("{}", err), "Duplicate date 2021-02-24") }; let csv = "2021-02-24,25.0,30.0,20.0,26.0,26.0,10000\n\ 2021-02-25,26.1,31.0,22.0,24.0,24.0,9000\n\ 2021-02-23,24.9,32.0,24.0,28.0,28.0,11000"; match History::check_csv(&csv) { Ok(_) => assert!(false), Err(err) => assert_eq!(&format!("{}", err), "Earlier date 2021-02-23") }; } fn check_entry(entry: &HistoryEntry, date: LocalDate, open: Price, high: Price, low: Price, close: Price, adj_close: Price, volume: u64) { assert_eq!(entry.date, date); assert_eq!(entry.open, open); assert_eq!(entry.high, high); assert_eq!(entry.low, low); assert_eq!(entry.close, close); assert_eq!(entry.adj_close, adj_close); assert_eq!(entry.volume, volume); } }
use std::error::Error; use crate::util::datetime; use crate::util::datetime::LocalDate; use crate::util::price_type::PriceType; use crate::datastore::datastore::DataStore; pub type Price = PriceType; #[inline(always)] pub fn tag() -> &'static str { &"history" } pub struct HistoryEntry { pub date: LocalDate, pub open: Price, pub high: Price, pub low: Price, pub close: Price, pub adj_close: Price, pub volume: u64 } impl HistoryEntry { pub fn new(date: LocalDate, open: Price, high: Price, low: Price, close: Price, adj_close: Price, volume: u64) -> Self { HistoryEntry { date: date, open: open, high: high, low: low, close: close, adj_close: adj_close, volume: volume } } pub fn parse_csv(csv: &str) -> Result<Self, Box<dyn Error>> { let values: Vec<&str> = csv.split(',').map(|field| field.trim()).collect(); if values.len() == HistoryEntry::number_of_fields() { Ok(HistoryEntry { date: datetime::parse_date(&values[0])?, open: values[1].parse::<Price>()?, high: values[2].parse::<Price>()?, low: values[3].parse::<Price>()?, close: values[4].parse::<Price>()?, adj_close: values[5].parse::<Price>()?, volume: values[6].parse::<u64>()? }) } else { Err(format!("HistoryEntry: Invalid csv data length={} expected=7", values.len()).into()) } } #[inline(always)] pub fn number_of_fields() -> usize { return 7 } } pub struct History { symbol: String, entries: Vec<HistoryEntry> } impl History { pub fn new(symbol: &str) -> Self { History { symbol: String::from(symbol), entries: Vec::new() } } pub fn parse_csv(symbol: &str, csv: &str) -> Result<Self, Box<dyn Error>> { let mut hist = History::new(symbol); for line in csv.split('\n') { if line.is_empty() || line.starts_with(char::is_alphabetic) { continue; } hist.entries.push(HistoryEntry::parse_csv(line)?); } Ok(hist) } pub fn parse_filter_csv(symbol: &str, csv: &str, pred: impl Fn(&HistoryEntry) -> bool) -> Result<Self, Box<dyn Error>> { let mut hist = History::new(symbol); for line in csv.split('\n') { if line.is_empty(
hist = History::parse_filter_csv("AAPL", &csv, |entry| entry.date > datetime::make_date(2021, 2, 24)).unwrap(); assert_eq!(hist.symbol(), "AAPL"); assert_eq!(hist.count(), 2); let entries = hist.entries(); check_entry(&entries[0], datetime::make_date(2021, 2, 25), 26.1, 31.0, 22.0, 24.0, 24.0, 9000); check_entry(&entries[1], datetime::make_date(2021, 2, 26), 24.9, 32.0, 24.0, 28.0, 28.0, 11000); } #[test] fn test_check_csv() { let csv = "2021-02-24,25.0,30.0,20.0,26.0,26.0,10000\n\ 2021-02-25,26.1,31.0,22.0,24.0,24.0,9000\n\ 2021-02-26,24.9,32.0,24.0,28.0,28.0,11000"; assert!(History::check_csv(&csv).is_ok()); let csv = "2021-02-24,25.0,30.0,20.0,26.0,26.0,10000\n\ 2021-02-24,26.1,31.0,22.0,24.0,24.0,9000\n\ 2021-02-26,24.9,32.0,24.0,28.0,28.0,11000"; match History::check_csv(&csv) { Ok(_) => assert!(false), Err(err) => assert_eq!(&format!("{}", err), "Duplicate date 2021-02-24") }; let csv = "2021-02-24,25.0,30.0,20.0,26.0,26.0,10000\n\ 2021-02-25,26.1,31.0,22.0,24.0,24.0,9000\n\ 2021-02-23,24.9,32.0,24.0,28.0,28.0,11000"; match History::check_csv(&csv) { Ok(_) => assert!(false), Err(err) => assert_eq!(&format!("{}", err), "Earlier date 2021-02-23") }; } fn check_entry(entry: &HistoryEntry, date: LocalDate, open: Price, high: Price, low: Price, close: Price, adj_close: Price, volume: u64) { assert_eq!(entry.date, date); assert_eq!(entry.open, open); assert_eq!(entry.high, high); assert_eq!(entry.low, low); assert_eq!(entry.close, close); assert_eq!(entry.adj_close, adj_close); assert_eq!(entry.volume, volume); } }
) || line.starts_with(char::is_alphabetic) { continue; } let entry = HistoryEntry::parse_csv(line)?; if pred(&entry) { hist.entries.push(entry); } } Ok(hist) } pub fn ds_select_all(ds: &DataStore, symbol: &str) -> Result<Self, Box<dyn Error>> { let content = ds.select_symbol(tag(), symbol)?; History::parse_csv(symbol, &content) } pub fn ds_select_if(ds: &DataStore, symbol: &str, pred: impl Fn(&HistoryEntry) -> bool) -> Result<Self, Box<dyn Error>> { let content = ds.select_symbol(tag(), symbol)?; History::parse_filter_csv(symbol, &content, pred) } pub fn ds_select_last(ds: &DataStore, symbol: &str) -> Result<Self, Box<dyn Error>> { let content = ds.select_last(tag(), symbol)?; History::parse_csv(symbol, &content) } pub fn check_csv(csv: &str) -> Result<(), Box<dyn Error>> { let hist = History::parse_csv("history_check", csv)?; let cnt = hist.count(); if cnt > 0 { let entries = hist.entries; let mut last_date = entries[0].date; for i in 1..cnt { let curr_date = entries[i].date; datetime::check_dup_or_back_gap(&last_date, &curr_date)?; last_date = curr_date; } } Ok(()) } #[inline(always)] pub fn symbol(&self) -> &str { &self.symbol } #[inline(always)] pub fn entries(&self) -> &Vec<HistoryEntry> { &self.entries } #[inline(always)] pub fn count(&self) -> usize { self.entries.len() } } #[cfg(test)] mod tests { use super::*; #[test] fn test_history_entry() { let csv = "2021-02-25,26.1,31.0,22.0,24.0,24.0,9000"; let entry = HistoryEntry::parse_csv(&csv).unwrap(); check_entry(&entry, datetime::make_date(2021, 2, 25), 26.1, 31.0, 22.0, 24.0, 24.0, 9000); } #[test] fn test_history_entry_with_whitespace() { let csv = "2021-02-25, 26.1,31.0 ,22.0, 24.0 ,24.0,9000\n"; let entry = HistoryEntry::parse_csv(&csv).unwrap(); check_entry(&entry, datetime::make_date(2021, 2, 25), 26.1, 31.0, 22.0, 24.0, 24.0, 9000); } #[test] fn test_history_entry_error() { let csv = "2021-02-25,26.1,31.0,22.0,24.0,24.0"; assert!(HistoryEntry::parse_csv(&csv).is_err()); let csv = "2021-02-25,26.1,31.0,22.0,24.0,24.0,9000,123"; assert!(HistoryEntry::parse_csv(&csv).is_err()); } #[test] fn test_history_parse_csv() { let csv = "2021-02-24,25.0,30.0,20.0,26.0,26.0,10000\n\ 2021-02-25,26.1,31.0,22.0,24.0,24.0,9000\n\ 2021-02-26,24.9,32.0,24.0,28.0,28.0,11000"; let hist = History::parse_csv("AAPL", &csv).unwrap(); assert_eq!(hist.symbol(), "AAPL"); assert_eq!(hist.count(), 3); let entries = hist.entries(); check_entry(&entries[0], datetime::make_date(2021, 2, 24), 25.0, 30.0, 20.0, 26.0, 26.0, 10000); check_entry(&entries[1], datetime::make_date(2021, 2, 25), 26.1, 31.0, 22.0, 24.0, 24.0, 9000); check_entry(&entries[2], datetime::make_date(2021, 2, 26), 24.9, 32.0, 24.0, 28.0, 28.0, 11000); } #[test] fn test_history_parse_csv_with_header() { let csv = "Date,Open,High,Low,Close,Adj Close,Volume\n\ 2021-02-24,25.0,30.0,20.0,26.0,26.0,10000\n\ 2021-02-25,26.1,31.0,22.0,24.0,24.0,9000\n\ 2021-02-26,24.9,32.0,24.0,28.0,28.0,11000"; let hist = History::parse_csv("AAPL", &csv).unwrap(); assert_eq!(hist.symbol(), "AAPL"); assert_eq!(hist.count(), 3); let entries = hist.entries(); check_entry(&entries[0], datetime::make_date(2021, 2, 24), 25.0, 30.0, 20.0, 26.0, 26.0, 10000); check_entry(&entries[1], datetime::make_date(2021, 2, 25), 26.1, 31.0, 22.0, 24.0, 24.0, 9000); check_entry(&entries[2], datetime::make_date(2021, 2, 26), 24.9, 32.0, 24.0, 28.0, 28.0, 11000); } #[test] fn test_history_parse_csv_with_empty_lines() { let csv = "\n\ 2021-02-24,25.0,30.0,20.0,26.0,26.0,10000\n\ \n\ 2021-02-25,26.1,31.0,22.0,24.0,24.0,9000\n\ \n\ 2021-02-26,24.9,32.0,24.0,28.0,28.0,11000\n\ \n"; let hist = History::parse_csv("AAPL", &csv).unwrap(); assert_eq!(hist.symbol(), "AAPL"); assert_eq!(hist.count(), 3); let entries = hist.entries(); check_entry(&entries[0], datetime::make_date(2021, 2, 24), 25.0, 30.0, 20.0, 26.0, 26.0, 10000); check_entry(&entries[1], datetime::make_date(2021, 2, 25), 26.1, 31.0, 22.0, 24.0, 24.0, 9000); check_entry(&entries[2], datetime::make_date(2021, 2, 26), 24.9, 32.0, 24.0, 28.0, 28.0, 11000); } #[test] fn test_history_parse_filter_csv() { let csv = "2021-02-24,25.0,30.0,20.0,26.0,26.0,10000\n\ 2021-02-25,26.1,31.0,22.0,24.0,24.0,9000\n\ 2021-02-26,24.9,32.0,24.0,28.0,28.0,11000"; let hist = History::parse_filter_csv("AAPL", &csv, |entry| entry.date > datetime::make_date(2021, 2, 24)).unwrap(); assert_eq!(hist.symbol(), "AAPL"); assert_eq!(hist.count(), 2); let entries = hist.entries(); check_entry(&entries[0], datetime::make_date(2021, 2, 25), 26.1, 31.0, 22.0, 24.0, 24.0, 9000); check_entry(&entries[1], datetime::make_date(2021, 2, 26), 24.9, 32.0, 24.0, 28.0, 28.0, 11000); } #[test] fn test_history_parse_filter_csv_with_header() { let csv = "Date,Open,High,Low,Close,Adj Close,Volume\n\ 2021-02-24,25.0,30.0,20.0,26.0,26.0,10000\n\ 2021-02-25,26.1,31.0,22.0,24.0,24.0,9000\n\ 2021-02-26,24.9,32.0,24.0,28.0,28.0,11000"; let
random
[ { "content": "pub fn update_stock_from_csv(stock: &mut Stock, csv: &str) -> Result<bool, Box<dyn Error>> {\n\n let hist = History::parse_csv(&stock.symbol, csv)?;\n\n if hist.count() > 0 {\n\n let latest = &hist.entries()[hist.count() - 1];\n\n if latest.adj_close > 0.0 {\n\n stock.set_latest_price(latest.adj_close, latest.date.clone());\n\n return Ok(true)\n\n }\n\n }\n\n Ok(false)\n\n}\n\n\n", "file_path": "src/sp_lib/portfolio/stocks_update.rs", "rank": 1, "score": 277924.6495941565 }, { "content": "// Moving Volume Weighted Average Price\n\npub fn entries_mvwap(entries: &[HistoryEntry], days: usize) -> Result<Vec<(LocalDate, Price)>, Box<dyn Error>> {\n\n if days < 1 {\n\n return Err(format!(\"entries_mvwap: days < 1\").into())\n\n }\n\n if days > entries.len() {\n\n return Err(format!(\"entries_mvwap: days > len\").into())\n\n }\n\n\n\n let base = days - 1;\n\n let size = entries.len();\n\n\n\n let mut notional: Price = 0.0;\n\n let mut volume: u64 = 0;\n\n for i in 0..base {\n\n notional += entries[i].adj_close * entries[i].volume as Price;\n\n volume += entries[i].volume;\n\n }\n\n\n\n let mut prices: Vec<(LocalDate, Price)> = Vec::with_capacity(size - base);\n\n for i in base..size {\n", "file_path": "src/sp_lib/stats/hist_ftns.rs", "rank": 2, "score": 267721.11541135225 }, { "content": "// Rate of Change\n\npub fn entries_roc(entries: &[HistoryEntry], days: usize) -> Result<Vec<(LocalDate, Price)>, Box<dyn Error>> {\n\n if days < 1 {\n\n return Err(format!(\"entries_roc: days < 1\").into())\n\n }\n\n if days > entries.len() {\n\n return Err(format!(\"entries_roc: days > len\").into())\n\n }\n\n\n\n let size = entries.len();\n\n let mut rocs: Vec<(LocalDate, Price)> = Vec::with_capacity(size - days);\n\n for i in days..size {\n\n let p0 = entries[i - days].adj_close;\n\n if price_zero(p0) {\n\n return Err(format!(\"entries_roc: Cannot divide by zero price\").into())\n\n }\n\n rocs.push((entries[i].date.clone(), 100.0 * (entries[i].adj_close - p0) / p0));\n\n }\n\n\n\n Ok(rocs)\n\n}\n\n\n", "file_path": "src/sp_lib/stats/hist_ftns.rs", "rank": 3, "score": 267713.5680952244 }, { "content": "#[inline(always)]\n\npub fn hist_roc(hist: &History, days: usize) -> Result<Vec<(LocalDate, Price)>, Box<dyn Error>> {\n\n entries_roc(hist.entries(), days)\n\n}\n\n\n\n// --------------------------------------------------------------------------------\n\n// Unit Tests\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::util::price_type::price_eql;\n\n use crate::util::datetime::{make_date, date_plus_days, is_weekend};\n\n\n\n #[test]\n\n fn test_entries_vwap() {\n\n let hist = hist_data();\n\n let entries = hist.entries();\n\n assert!(price_eql(entries_vwap(&entries).unwrap(), 145.282762));\n\n assert!(price_eql(entries_vwap(&entries[3..8]).unwrap(), 142.228875));\n\n }\n", "file_path": "src/sp_lib/stats/hist_ftns.rs", "rank": 4, "score": 265800.27802596276 }, { "content": "#[inline(always)]\n\npub fn hist_mvwap(hist: &History, days: usize) -> Result<Vec<(LocalDate, Price)>, Box<dyn Error>> {\n\n entries_mvwap(hist.entries(), days)\n\n}\n\n\n", "file_path": "src/sp_lib/stats/hist_ftns.rs", "rank": 5, "score": 265800.27802596276 }, { "content": "#[inline(always)]\n\npub fn tag() -> &'static str {\n\n &\"dividends\"\n\n}\n\n\n\n// --------------------------------------------------------------------------------\n\n// Dividend Entry\n\n\n\npub struct DividendEntry {\n\n pub date: LocalDate,\n\n pub price: Price\n\n}\n\n\n\nimpl DividendEntry {\n\n pub fn new(date: LocalDate, price: Price) -> Self {\n\n DividendEntry {\n\n date: date,\n\n price: price\n\n }\n\n }\n\n\n", "file_path": "src/sp_lib/datastore/dividends.rs", "rank": 6, "score": 261991.5602990865 }, { "content": "pub fn export_symbol(ds: &datastore::DataStore, symbol: &str, filename: &str) -> Result<usize, Box<dyn Error>> {\n\n let hist_data = history::History::ds_select_all(ds, symbol)?;\n\n let div_data =\n\n if ds.symbol_exists(dividends::tag(), symbol) {\n\n dividends::Dividends::ds_select_all(ds, symbol)?\n\n } else {\n\n dividends::Dividends::new(symbol)\n\n };\n\n\n\n let mut file = File::create(&filename)?;\n\n write!(file, \"date,open,high,low,close,adj_close,volume,dividend\\n\")?;\n\n\n\n let div_entries = div_data.entries();\n\n let div_size = div_entries.len();\n\n let mut idx = 0;\n\n\n\n let mut count: usize = 0;\n\n\n\n for hist_entry in hist_data.entries() {\n\n count += 1;\n", "file_path": "src/sp_lib/datastore/export.rs", "rank": 7, "score": 252553.42883647536 }, { "content": "// Volume Weighted Average Price\n\npub fn entries_vwap(entries: &[HistoryEntry]) -> Result<Price, Box<dyn Error>> {\n\n let mut notional: Price = 0.0;\n\n let mut volume: u64 = 0;\n\n for h in entries {\n\n notional += h.adj_close * h.volume as Price;\n\n volume += h.volume;\n\n }\n\n if volume == 0 {\n\n return Err(format!(\"entries_vwap: Cannot divide by zero total volume\").into())\n\n }\n\n Ok(notional / volume as Price)\n\n}\n\n\n", "file_path": "src/sp_lib/stats/hist_ftns.rs", "rank": 8, "score": 252130.59388592845 }, { "content": "#[inline(always)]\n\npub fn hist_vwap(hist: &History) -> Result<Price, Box<dyn Error>> {\n\n entries_vwap(hist.entries())\n\n}\n\n\n", "file_path": "src/sp_lib/stats/hist_ftns.rs", "rank": 9, "score": 250416.05348338146 }, { "content": "pub fn create_file(name: &str, data: &str) -> bool {\n\n let path_buf = make_path(&name);\n\n let path = path_buf.as_path();\n\n\n\n if remove_path(path) {\n\n if let Ok(mut file) = fs::File::create(path) {\n\n if !data.is_empty() {\n\n if let Err(_) = write!(file, \"{}\", data) {\n\n return false;\n\n }\n\n }\n\n return true;\n\n }\n\n }\n\n\n\n return false;\n\n}\n\n\n", "file_path": "src/sp_lib/util/temp_file.rs", "rank": 10, "score": 243001.43856902956 }, { "content": "pub fn sort_stocks(stocks: &mut StockList, order_by: &str, desc: bool) -> Result<(), Box<dyn Error>> {\n\n match (order_by, desc) {\n\n (\"symbol\", false) => stocks.sort_by(|lhs, rhs| lhs.symbol.cmp(&rhs.symbol)),\n\n (\"symbol\", true) => stocks.sort_by(|lhs, rhs| rhs.symbol.cmp(&lhs.symbol)),\n\n\n\n (\"date\", false) => stocks.sort_by(|lhs, rhs| lhs.date.cmp(&rhs.date)),\n\n (\"date\", true) => stocks.sort_by(|lhs, rhs| rhs.date.cmp(&lhs.date)),\n\n\n\n (\"value\", false) => stocks.sort_by(|lhs, rhs| price_type::price_cmp(lhs.latest_notional(), rhs.latest_notional())),\n\n (\"value\", true) => stocks.sort_by(|lhs, rhs| price_type::price_cmp(rhs.latest_notional(), lhs.latest_notional())),\n\n\n\n (\"price\", false) => stocks.sort_by(|lhs, rhs| price_type::price_cmp(lhs.latest_price, rhs.latest_price)),\n\n (\"price\", true) => stocks.sort_by(|lhs, rhs| price_type::price_cmp(rhs.latest_price, lhs.latest_price)),\n\n\n\n (\"net\", false) => stocks.sort_by(|lhs, rhs| price_type::price_cmp(lhs.net_price(), rhs.net_price())),\n\n (\"net\", true) => stocks.sort_by(|lhs, rhs| price_type::price_cmp(rhs.net_price(), lhs.net_price())),\n\n\n\n (\"size\", false) => stocks.sort_by(|lhs, rhs| lhs.quantity.cmp(&rhs.quantity)),\n\n (\"size\", true) => stocks.sort_by(|lhs, rhs| rhs.quantity.cmp(&lhs.quantity)),\n\n\n", "file_path": "src/sp_lib/portfolio/algorithms.rs", "rank": 11, "score": 224013.91686276064 }, { "content": "pub fn stocktype2str(st: StockType) -> &'static str {\n\n match st {\n\n StockType::Stock => \"stock\",\n\n StockType::ETF => \"etf\"\n\n }\n\n}\n\n\n", "file_path": "src/sp_lib/portfolio/stock_type.rs", "rank": 12, "score": 220796.44798597167 }, { "content": "pub fn events2str(evt: Events) -> &'static str {\n\n match evt {\n\n Events::History => \"history\",\n\n Events::Dividend => \"div\",\n\n Events::Split => \"split\"\n\n }\n\n}\n\n\n\n// --------------------------------------------------------------------------------\n\n// Interval\n\n\n\n#[derive(Copy, Clone)]\n\npub enum Interval {\n\n Daily,\n\n Weekly,\n\n Monthly\n\n}\n\n\n", "file_path": "src/sp_lib/yfinance/types.rs", "rank": 13, "score": 218314.65181530843 }, { "content": "pub fn interval2str(int: Interval) -> &'static str {\n\n match int {\n\n Interval::Daily => \"1d\",\n\n Interval::Weekly => \"1wk\",\n\n Interval::Monthly => \"1mo\"\n\n }\n\n}\n\n\n\n// --------------------------------------------------------------------------------\n\n// Unit Tests\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_events() {\n\n assert_eq!(events2str(Events::History), \"history\");\n\n assert_eq!(events2str(Events::Dividend), \"div\");\n\n assert_eq!(events2str(Events::Split), \"split\");\n\n }\n\n\n\n #[test]\n\n fn test_interval() {\n\n assert_eq!(interval2str(Interval::Daily), \"1d\");\n\n assert_eq!(interval2str(Interval::Weekly), \"1wk\");\n\n assert_eq!(interval2str(Interval::Monthly), \"1mo\");\n\n }\n\n}\n", "file_path": "src/sp_lib/yfinance/types.rs", "rank": 14, "score": 218314.65181530846 }, { "content": "#[inline(always)]\n\npub fn price_zero(px: PriceType) -> bool {\n\n px.abs() < 0.000001\n\n}\n\n\n", "file_path": "src/sp_lib/util/price_type.rs", "rank": 15, "score": 217279.29910743784 }, { "content": "#[inline(always)]\n\npub fn maybe_letter_s(sz: usize) -> &'static str {\n\n if sz == 1 { \"\" } else { \"s\" }\n\n}\n\n\n", "file_path": "src/sp_lib/util/misc.rs", "rank": 16, "score": 214684.7354696059 }, { "content": "pub fn parse_date(date_str: &str) -> Result<LocalDate, Box<dyn Error>> {\n\n match NaiveDate::parse_from_str(&date_str, \"%Y-%m-%d\") {\n\n Ok(dt) => Ok(chrono::Local.ymd(dt.year(), dt.month(), dt.day())),\n\n Err(e) => Err(format!(\"parse_date: {}\", e).into())\n\n }\n\n}\n\n\n", "file_path": "src/sp_lib/util/datetime.rs", "rank": 17, "score": 212293.954380262 }, { "content": "pub fn prices_eql(lhs: &[PriceType], rhs: &[PriceType]) -> bool {\n\n if lhs.len() != rhs.len() {\n\n return false;\n\n }\n\n\n\n for i in 0..lhs.len() {\n\n if !price_eql(lhs[i], rhs[i]) {\n\n return false;\n\n }\n\n }\n\n\n\n return true;\n\n}\n\n\n\n// --------------------------------------------------------------------------------\n\n// Unit Tests\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "src/sp_lib/util/price_type.rs", "rank": 18, "score": 210455.7553696229 }, { "content": "#[inline(always)]\n\npub fn price_eql(lhs: PriceType, rhs: PriceType) -> bool {\n\n price_zero(lhs - rhs)\n\n}\n\n\n", "file_path": "src/sp_lib/util/price_type.rs", "rank": 19, "score": 210455.7553696229 }, { "content": "pub fn update_stock_from_ds(stock: &mut Stock, ds: &DataStore) -> Result<bool, Box<dyn Error>> {\n\n let hist = History::ds_select_last(ds, &stock.symbol)?;\n\n if hist.count() != 1 {\n\n return Err(format!(\"Failed to find last history for {} in datastore {}\", stock.symbol, ds).into())\n\n }\n\n\n\n if ds.symbol_exists(dividends::tag(), &stock.symbol) {\n\n let div = dividends::Dividends::ds_select_if(ds, &stock.symbol, |entry| entry.date > stock.date)?;\n\n stock.cum_dividend = stock.quantity as Price * div.entries().iter().fold(0.0, |cum, d| cum + d.price);\n\n }\n\n\n\n let entry = &hist.entries()[0];\n\n if entry.adj_close > 0.0 {\n\n stock.set_latest_price(entry.adj_close, entry.date.clone());\n\n return Ok(true)\n\n }\n\n Ok(false)\n\n}\n\n\n", "file_path": "src/sp_lib/portfolio/stocks_update.rs", "rank": 20, "score": 208650.9844356277 }, { "content": "#[inline(always)]\n\npub fn count_format(sz: usize, item: &str) -> String {\n\n format!(\"{} {}{}\", sz, item, maybe_letter_s(sz))\n\n}\n\n\n", "file_path": "src/sp_lib/util/misc.rs", "rank": 21, "score": 207385.75127093017 }, { "content": "pub fn filter_stocks(stocks: &mut StockList, filter_expr: &str, keep: bool) {\n\n if let Ok(stock_type) = stock_type::str2stocktype(&filter_expr) {\n\n stocks.retain(|stock| (stock.stype == stock_type) == keep);\n\n }\n\n else {\n\n let symbol_set: HashSet<&str> = filter_expr.split(',').map(|name| name.trim()).collect();\n\n stocks.retain(|stock| symbol_set.contains(stock.symbol.as_str()) == keep);\n\n }\n\n}\n\n\n", "file_path": "src/sp_lib/portfolio/algorithms.rs", "rank": 22, "score": 206885.55492439354 }, { "content": "#[inline(always)]\n\npub fn is_monday(date: &LocalDate) -> bool {\n\n date.weekday() == Weekday::Mon\n\n}\n\n\n", "file_path": "src/sp_lib/util/datetime.rs", "rank": 23, "score": 205115.86298005242 }, { "content": "#[inline(always)]\n\npub fn is_weekend(date: &LocalDate) -> bool {\n\n date.weekday() == Weekday::Sat || date.weekday() == Weekday::Sun\n\n}\n\n\n", "file_path": "src/sp_lib/util/datetime.rs", "rank": 24, "score": 205115.86298005242 }, { "content": "#[inline(always)]\n\npub fn is_friday(date: &LocalDate) -> bool {\n\n date.weekday() == Weekday::Fri\n\n}\n\n\n", "file_path": "src/sp_lib/util/datetime.rs", "rank": 25, "score": 205115.86298005242 }, { "content": "pub fn update_stock(stock: &mut Stock) -> Result<bool, Box<dyn Error>> {\n\n let today = datetime::today();\n\n let back_delta =\n\n if datetime::is_monday(&today) {\n\n -3\n\n } else if datetime::is_weekend(&today) {\n\n -2\n\n } else {\n\n -1\n\n };\n\n let mut query = HistoryQuery::new(\n\n stock.symbol.to_string(),\n\n datetime::date_plus_days(&today, back_delta),\n\n datetime::date_plus_days(&today, 1),\n\n Interval::Daily,\n\n Events::History);\n\n\n\n query.execute()?;\n\n match update_stock_from_csv(stock, &query.result) {\n\n Ok(updated) => Ok(updated),\n\n Err(e) => Err(format!(\"Failed to update {} - {}\", stock.symbol, e).into())\n\n }\n\n}\n\n\n", "file_path": "src/sp_lib/portfolio/stocks_update.rs", "rank": 26, "score": 203377.50670716062 }, { "content": "pub fn str2stocktype(ststr: &str) -> Result<StockType, Box<dyn Error>> {\n\n match ststr.to_lowercase().as_str() {\n\n \"stock\" => Ok(StockType::Stock),\n\n \"etf\" => Ok(StockType::ETF),\n\n _ => Err(format!(\"Unknown stock type '{}'\", ststr).into())\n\n }\n\n}\n\n\n\nimpl fmt::Display for StockType {\n\n fn fmt(self: &StockType, f: &mut fmt::Formatter) -> fmt::Result {\n\n write!(f, \"{}\", stocktype2str(*self))\n\n }\n\n}\n\n\n\n// --------------------------------------------------------------------------------\n\n// Unit Tests\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "src/sp_lib/portfolio/stock_type.rs", "rank": 27, "score": 199962.14033025526 }, { "content": "pub fn app_version() -> &'static str {\n\n \"0.3.0\"\n\n}\n\n\n\n// --------------------------------------------------------------------------------\n\n// Common Help Text\n\n\n", "file_path": "src/sp_lib/util/common_args.rs", "rank": 28, "score": 199351.17283992958 }, { "content": "fn make_stock(sym: &str, stype: StockType, date: LocalDate, qty: u32, base: Price, latest: Price) -> Stock {\n\n let symbol = String::from(sym);\n\n let mut stock = Stock::new(symbol, stype, date, qty, base);\n\n stock.set_latest_price(latest, today_plus_days(0));\n\n stock\n\n}\n", "file_path": "tests/test_portfolio.rs", "rank": 29, "score": 197502.34205450927 }, { "content": "pub fn ds_root_help() -> &'static str {\n\n \"Datastore root path (default: value of HOME environment variable)\"\n\n}\n\n\n", "file_path": "src/sp_lib/util/common_args.rs", "rank": 30, "score": 196354.9504415834 }, { "content": "pub fn ds_name_help() -> &'static str {\n\n \"Datastore name (default: sp_datastore)\"\n\n}\n\n\n", "file_path": "src/sp_lib/util/common_args.rs", "rank": 31, "score": 196354.9504415834 }, { "content": "pub fn stocks_file_help() -> &'static str {\n\n \"CSV file containing stocks in portfolio, formatted as 'symbol,type,date,quantity,base_price' including a header line. \\\n\n Supported type values include stock and etf\"\n\n}\n\n\n\n// --------------------------------------------------------------------------------\n\n// Common Arguments\n\n\n", "file_path": "src/sp_lib/util/common_args.rs", "rank": 32, "score": 196354.9504415834 }, { "content": "pub fn stocks_file(required: bool) -> Arg<'static, 'static> {\n\n Arg::with_name(\"stocks_file\")\n\n .short(\"s\")\n\n .long(\"stocks\")\n\n .help(stocks_file_help())\n\n .required(required)\n\n .takes_value(true)\n\n}\n\n\n\n// --------------------------------------------------------------------------------\n\n// Common Parsed Matches\n\n\n", "file_path": "src/sp_lib/util/common_args.rs", "rank": 33, "score": 195754.4192770028 }, { "content": "fn sp_ds_symbol() -> &'static str {\n\n &\"TEST\"\n\n}\n\n\n", "file_path": "tests/test_datastore.rs", "rank": 34, "score": 193513.4489815228 }, { "content": "pub fn remove_file(name: &str) -> bool {\n\n let path_buf = make_path(&name);\n\n let path = path_buf.as_path();\n\n remove_path(&path)\n\n}\n\n\n", "file_path": "src/sp_lib/util/temp_file.rs", "rank": 35, "score": 191270.99109442273 }, { "content": "fn url_request(url: &String, result: &mut String) -> Result<(), curl::Error> {\n\n let mut handle = easy::Easy::new();\n\n\n\n handle.url(url)?;\n\n\n\n let mut transfer = handle.transfer();\n\n transfer.write_function(|new_data| {\n\n let txt = str::from_utf8(new_data).unwrap_or(\"\");\n\n result.push_str(txt);\n\n Ok(txt.len())\n\n })?;\n\n\n\n transfer.perform()?;\n\n\n\n Ok(())\n\n}\n", "file_path": "src/sp_lib/yfinance/query.rs", "rank": 36, "score": 190300.64692701263 }, { "content": "pub fn update_stocks_from_ds(stocks: &mut StockList, ds: &DataStore) -> Result<usize, Box<dyn Error>> {\n\n let mut count: usize = 0;\n\n for stock in stocks.iter_mut() {\n\n if update_stock_from_ds(stock, ds)? {\n\n count += 1;\n\n }\n\n }\n\n Ok(count)\n\n}\n\n\n", "file_path": "src/sp_lib/portfolio/stocks_update.rs", "rank": 37, "score": 189170.78929733176 }, { "content": "fn sp_ds_data(which: &str, idx: i32) -> &'static str {\n\n if which == history::tag() {\n\n if idx == 1 {\n\n return &\"Date,Open,High,Low,Close,Adj Close,Volume\\n\\\n\n 2021-02-22,10.0,12.0,8.0,11.0,11.0,10000\\n\\\n\n 2021-02-23,11.0,12.5,8.5,11.5,11.5,9000\";\n\n }\n\n else if idx == 2 {\n\n return &\"Date,Open,High,Low,Close,Adj Close,Volume\\n\\\n\n 2021-02-24,11.5,14.0,11.0,12.5,12.5,11000\\n\\\n\n 2021-02-25,12.5,13.5,10.5,12.0,12.0,10000\\n\\\n\n 2021-02-26,12.0,14.0,11.0,14.0,14.0,12000\";\n\n }\n\n\n\n }\n\n else if which == dividends::tag() {\n\n if idx == 1 {\n\n return &\"Date,Dividends\\n\\\n\n 2021-02-23,1.2\";\n\n }\n\n }\n\n &\"\"\n\n}\n\n\n", "file_path": "tests/test_datastore.rs", "rank": 38, "score": 186065.54373235934 }, { "content": "pub fn value_export(stocks: &StockList, filename: &str) -> Result<(), Box<dyn Error>> {\n\n let mut file = File::create(&filename)?;\n\n write!(file, \"Symbol,Buy Date,Upd Date,Days Held,Size,Base,Cur,Net,Pct,Base Value,Cur Value,Net Value,Cum Div\\n\")?;\n\n for stock in stocks.iter() {\n\n write!(file, \"{},{},{},{},{},{:.2},{:.2},{:.2},{:.2},{:.2},{:.2},{:.2},{:.2}\\n\",\n\n stock.symbol,\n\n stock.date.format(\"%Y-%m-%d\"),\n\n stock.latest_date.format(\"%Y-%m-%d\"),\n\n stock.days_held,\n\n stock.quantity,\n\n stock.base_price,\n\n stock.latest_price,\n\n stock.net_price(),\n\n stock.pct_change(),\n\n stock.base_notional(),\n\n stock.latest_notional(),\n\n stock.net_notional(),\n\n stock.cum_dividend)?;\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/sp_lib/portfolio/reports.rs", "rank": 39, "score": 184343.3455846879 }, { "content": "pub fn update_stocks(stocks: &mut StockList) -> Result<usize, Box<dyn Error>> {\n\n let mut count: usize = 0;\n\n for stock in stocks.iter_mut() {\n\n if update_stock(stock)? {\n\n count += 1;\n\n }\n\n }\n\n Ok(count)\n\n}\n\n\n", "file_path": "src/sp_lib/portfolio/stocks_update.rs", "rank": 40, "score": 182416.06262128148 }, { "content": "#[inline(always)]\n\npub fn direntry_filename(entry: &fs::DirEntry) -> String {\n\n match entry.file_name().to_str() {\n\n Some(fname) => String::from(fname),\n\n None => String::from(\"?\")\n\n }\n\n}\n\n\n\n// --------------------------------------------------------------------------------\n\n// Unit Tests\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::env;\n\n use crate::util::temp_file;\n\n\n\n #[test]\n\n fn test_maybe_letter_s() {\n\n assert_eq!(maybe_letter_s(0), \"s\");\n\n assert_eq!(maybe_letter_s(1), \"\");\n", "file_path": "src/sp_lib/util/misc.rs", "rank": 41, "score": 172622.59717769184 }, { "content": "type Price = history::Price;\n\n\n", "file_path": "tests/test_datastore.rs", "rank": 42, "score": 169660.3144733676 }, { "content": "// Group by stock symbol, and calcuate aggregate quantity and current value.\n\npub fn stock_aggregate(stocks: &StockList) -> HashMap<String, (u32, Price)> {\n\n stock_groupby(\n\n stocks,\n\n |_| (0, 0.0),\n\n |stock, size_price| {\n\n let sp = *size_price;\n\n (sp.0 + stock.quantity, sp.1 + stock.latest_notional())\n\n })\n\n}\n\n\n", "file_path": "src/sp_lib/portfolio/algorithms.rs", "rank": 43, "score": 169506.17570534348 }, { "content": "pub fn update_stocks_with_cache(stocks: &mut StockList, cache_file: &Path) -> Result<usize, Box<dyn Error>> {\n\n let today = datetime::today();\n\n let mut cache = StocksCache::from_cache_file(cache_file)?;\n\n let mut count: usize = 0;\n\n for stock in stocks.iter_mut() {\n\n match cache.get_mut(&stock.symbol) {\n\n Some(cache_entry) => {\n\n if cache_entry.is_updated(&today) {\n\n stock.set_latest_price(cache_entry.latest_price, cache_entry.latest_date.clone());\n\n count += 1;\n\n }\n\n else {\n\n if update_stock(stock)? {\n\n count += 1;\n\n cache_entry.update(stock.latest_price, &stock.latest_date);\n\n }\n\n }\n\n },\n\n None => {\n\n if update_stock(stock)? {\n", "file_path": "src/sp_lib/portfolio/stocks_update.rs", "rank": 44, "score": 167122.6458063267 }, { "content": "pub fn price_cmp(lhs: PriceType, rhs: PriceType) -> Ordering {\n\n if lhs < rhs {\n\n return Ordering::Less;\n\n }\n\n else if lhs > rhs {\n\n return Ordering::Greater;\n\n }\n\n return Ordering::Equal;\n\n}\n\n\n", "file_path": "src/sp_lib/util/price_type.rs", "rank": 45, "score": 164199.0245913549 }, { "content": "pub fn value_report(stocks: &StockList, groupby: bool) {\n\n println!(\"Stocks Value Report\");\n\n println!(\"-------------------\");\n\n println!(\" Date: {}\", datetime::today().format(\"%Y-%m-%d\"));\n\n println!(\"Number of Stocks: {}\", stocks.len());\n\n println!(\" Base Value: {:.2}\", algorithms::base_notional(&stocks));\n\n println!(\" Latest Value: {:.2}\", algorithms::latest_notional(&stocks));\n\n println!(\" Net Value: {:.2}\", algorithms::net_notional(&stocks));\n\n println!(\" Percent Change: {:.2}\", algorithms::pct_change(&stocks));\n\n println!(\" Cum Dividend: {:.2}\", algorithms::cumulative_dividend(&stocks));\n\n println!(\"\");\n\n\n\n println!(\"{:8} {:10} {:10} {:6} {:8} {:8} {:8} {:8} {:8} {:12} {:12} {:10} {:8}\",\n\n \"Symbol\",\n\n \"Buy Date\",\n\n \"Upd Date\",\n\n \"Days\",\n\n \"Size\",\n\n \"Base\",\n\n \"Cur\",\n", "file_path": "src/sp_lib/portfolio/reports.rs", "rank": 46, "score": 162567.14229436775 }, { "content": "fn sp_ds_name() -> &'static str {\n\n &\"test_datastore\"\n\n}\n\n\n", "file_path": "tests/test_datastore.rs", "rank": 47, "score": 162124.6671620651 }, { "content": "pub fn parsed_ds_root(parsed_args: &ArgMatches) -> Result<String, Box<dyn Error>> {\n\n match parsed_args.value_of(\"ds_root\") {\n\n Some(value) => Ok(String::from(value)),\n\n None => Ok(env::var(\"HOME\")?)\n\n }\n\n}\n\n\n", "file_path": "src/sp_lib/util/common_args.rs", "rank": 48, "score": 159212.71690349863 }, { "content": "pub fn ds_name() -> Arg<'static, 'static> {\n\n Arg::with_name(\"ds_name\")\n\n .short(\"n\")\n\n .long(\"name\")\n\n .help(ds_name_help())\n\n .takes_value(true)\n\n}\n\n\n", "file_path": "src/sp_lib/util/common_args.rs", "rank": 49, "score": 158235.83123641447 }, { "content": "pub fn ds_root() -> Arg<'static, 'static> {\n\n Arg::with_name(\"ds_root\")\n\n .short(\"r\")\n\n .long(\"root\")\n\n .help(ds_root_help())\n\n .takes_value(true)\n\n}\n\n\n", "file_path": "src/sp_lib/util/common_args.rs", "rank": 50, "score": 158235.83123641447 }, { "content": "pub fn stock_base_dates(stocks: &StockList) -> HashMap<String, datetime::LocalDate> {\n\n stock_groupby(\n\n stocks,\n\n |stock| stock.date.clone(),\n\n |stock, cur_date| if stock.date < *cur_date { stock.date.clone() } else { *cur_date })\n\n}\n", "file_path": "src/sp_lib/portfolio/algorithms.rs", "rank": 51, "score": 154757.46612576884 }, { "content": "#[inline(always)]\n\npub fn date2timestamp(date: &LocalDate) -> i64 {\n\n date.and_hms(0, 0, 0).timestamp()\n\n}\n\n\n", "file_path": "src/sp_lib/util/datetime.rs", "rank": 52, "score": 152945.07058425498 }, { "content": "pub fn check_dup_or_back_gap(old_date: &LocalDate, new_date: &LocalDate) -> Result<(), Box<dyn Error>> {\n\n if old_date == new_date {\n\n return Err(format!(\"Duplicate date {}\", new_date.format(\"%Y-%m-%d\")).into())\n\n }\n\n else if new_date < old_date {\n\n return Err(format!(\"Earlier date {}\", new_date.format(\"%Y-%m-%d\")).into())\n\n }\n\n Ok(())\n\n}\n\n\n\n// --------------------------------------------------------------------------------\n\n// Unit Tests\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_today() {\n\n let date = today();\n", "file_path": "src/sp_lib/util/datetime.rs", "rank": 53, "score": 150906.6245036175 }, { "content": "#[inline(always)]\n\npub fn date_plus_days(date: &LocalDate, days: i64) -> LocalDate {\n\n *date + Duration::days(days)\n\n}\n\n\n", "file_path": "src/sp_lib/util/datetime.rs", "rank": 54, "score": 145458.43067039605 }, { "content": "pub fn cumulative_dividend(stocks: &StockList) -> Price {\n\n stocks.iter().map(|stock| stock.cum_dividend).sum()\n\n}\n\n\n", "file_path": "src/sp_lib/portfolio/algorithms.rs", "rank": 55, "score": 140671.83772195177 }, { "content": "pub fn net_notional(stocks: &StockList) -> Price {\n\n stocks.iter().map(|stock| stock.net_notional()).sum()\n\n}\n\n\n", "file_path": "src/sp_lib/portfolio/algorithms.rs", "rank": 56, "score": 140671.83772195177 }, { "content": "pub fn base_notional(stocks: &StockList) -> Price {\n\n stocks.iter().map(|stock| stock.base_notional()).sum()\n\n}\n\n\n", "file_path": "src/sp_lib/portfolio/algorithms.rs", "rank": 57, "score": 140671.83772195177 }, { "content": "pub fn latest_notional(stocks: &StockList) -> Price {\n\n stocks.iter().map(|stock| stock.latest_notional()).sum()\n\n}\n\n\n", "file_path": "src/sp_lib/portfolio/algorithms.rs", "rank": 58, "score": 140671.83772195177 }, { "content": "pub fn make_path(name: &str) -> PathBuf {\n\n let mut pbuf = env::temp_dir();\n\n pbuf.push(name);\n\n pbuf\n\n}\n\n\n", "file_path": "src/sp_lib/util/temp_file.rs", "rank": 59, "score": 137753.39574811907 }, { "content": "fn check_prices(actual: &Vec<&str>, expect: &Vec<&str>) {\n\n assert_eq!(actual.len(), 7);\n\n assert_eq!(expect.len(), 5);\n\n\n\n for i in 1..6 {\n\n let px = format!(\"{:.2}\", actual[i].parse().unwrap_or(0.0));\n\n assert_eq!(px, expect[i-1]);\n\n }\n\n}\n", "file_path": "tests/test_yfinance.rs", "rank": 60, "score": 136135.64678625597 }, { "content": "#[inline(always)]\n\npub fn earliest_date() -> LocalDate {\n\n chrono::Local.ymd(1970, 1, 1)\n\n}\n\n\n", "file_path": "src/sp_lib/util/datetime.rs", "rank": 61, "score": 135942.6861434771 }, { "content": "pub fn parsed_ds_name(parsed_args: &ArgMatches) -> String {\n\n String::from(\n\n match parsed_args.value_of(\"ds_name\") {\n\n Some(value) => value,\n\n None => \"sp_datastore\"\n\n })\n\n}\n\n\n", "file_path": "src/sp_lib/util/common_args.rs", "rank": 62, "score": 134948.12262950046 }, { "content": "type Table = HashMap<String, CacheEntry>;\n\n\n\npub struct StocksCache {\n\n table: Table\n\n}\n\n\n\nimpl StocksCache {\n\n pub fn new() -> StocksCache {\n\n StocksCache {\n\n table: Table::new()\n\n }\n\n }\n\n\n\n #[inline(always)]\n\n pub fn add(self: &mut StocksCache, symbol: String, entry: CacheEntry) {\n\n self.table.insert(symbol, entry);\n\n }\n\n\n\n #[allow(dead_code)]\n\n #[inline(always)]\n", "file_path": "src/sp_lib/portfolio/stocks_cache.rs", "rank": 63, "score": 134550.21055520666 }, { "content": "#[inline(always)]\n\npub fn max<Entry>(entries: &[Entry],\n\n extract: impl Fn(&Entry) -> f64) -> ReduceResult {\n\n reduce(&entries[1..],\n\n extract(&entries[0]),\n\n |ret, entry| {\n\n let value = extract(entry);\n\n Ok(if value > ret { value } else { ret })\n\n })\n\n}\n\n\n", "file_path": "src/sp_lib/stats/reduce_ftns.rs", "rank": 64, "score": 133981.83120739134 }, { "content": "#[inline(always)]\n\npub fn sum<Entry>(entries: &[Entry],\n\n extract: impl Fn(&Entry) -> f64) -> ReduceResult {\n\n reduce(entries,\n\n 0.0,\n\n |ret, entry| Ok(ret + extract(entry)))\n\n}\n\n\n", "file_path": "src/sp_lib/stats/reduce_ftns.rs", "rank": 65, "score": 133981.83120739134 }, { "content": "#[inline(always)]\n\npub fn min<Entry>(entries: &[Entry],\n\n extract: impl Fn(&Entry) -> f64) -> ReduceResult {\n\n reduce(&entries[1..],\n\n extract(&entries[0]),\n\n |ret, entry| {\n\n let value = extract(entry);\n\n Ok(if value < ret { value } else { ret })\n\n })\n\n}\n\n\n", "file_path": "src/sp_lib/stats/reduce_ftns.rs", "rank": 66, "score": 133981.83120739134 }, { "content": "#[inline(always)]\n\npub fn mean<Entry>(entries: &[Entry],\n\n extract: impl Fn(&Entry) -> f64) -> ReduceResult {\n\n Ok(sum(entries, extract)? / entries.len() as f64)\n\n}\n\n\n", "file_path": "src/sp_lib/stats/reduce_ftns.rs", "rank": 67, "score": 133981.83120739134 }, { "content": "pub fn variance<Entry>(entries: &[Entry],\n\n extract: impl Fn(&Entry) -> f64) -> ReduceResult {\n\n let avg = mean(entries, &extract)?;\n\n let mut variance = reduce(entries,\n\n 0.0,\n\n |v, e| Ok(v + (extract(e) - avg).powf(2.0)))?;\n\n if entries.len() > 1 {\n\n variance = variance / (entries.len() - 1) as f64;\n\n }\n\n Ok(variance)\n\n}\n\n\n", "file_path": "src/sp_lib/stats/reduce_ftns.rs", "rank": 68, "score": 133981.83120739134 }, { "content": "#[inline(always)]\n\npub fn stddev<Entry>(entries: &[Entry],\n\n extract: impl Fn(&Entry) -> f64) -> ReduceResult {\n\n Ok(variance(entries, extract)?.sqrt())\n\n}\n\n\n\n// --------------------------------------------------------------------------------\n\n// Unit Tests\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::datastore::history::History;\n\n\n\n #[test]\n\n fn test_reduce() {\n\n let data = get_data();\n\n let entries = data.entries();\n\n let result = reduce(entries, 0.0, |r, e| Ok(r + e.open)).unwrap();\n\n assert_eq!(result, 53.75);\n\n }\n", "file_path": "src/sp_lib/stats/reduce_ftns.rs", "rank": 69, "score": 133981.83120739134 }, { "content": "pub fn reduce<Entry>(entries: &[Entry],\n\n init: f64,\n\n ftn: impl Fn(f64, &Entry) -> ReduceResult) -> ReduceResult {\n\n let mut ret = init;\n\n for entry in entries {\n\n ret = ftn(ret, entry)?;\n\n }\n\n Ok(ret)\n\n}\n\n\n", "file_path": "src/sp_lib/stats/reduce_ftns.rs", "rank": 70, "score": 133981.83120739134 }, { "content": "pub fn parsed_stocks_file(parsed_args: &ArgMatches) -> Option<String> {\n\n match parsed_args.value_of(\"stocks_file\") {\n\n Some(value) => Some(String::from(value)),\n\n None => None\n\n }\n\n}\n", "file_path": "src/sp_lib/util/common_args.rs", "rank": 71, "score": 129849.05835635692 }, { "content": "#[inline(always)]\n\npub fn today() -> LocalDate {\n\n Local::today()\n\n}\n\n\n", "file_path": "src/sp_lib/util/datetime.rs", "rank": 72, "score": 128854.11003918755 }, { "content": "#[inline(always)]\n\npub fn count_days(from_date: &LocalDate, to_date: &LocalDate) -> i64 {\n\n to_date.signed_duration_since(from_date.clone()).num_days()\n\n}\n\n\n", "file_path": "src/sp_lib/util/datetime.rs", "rank": 73, "score": 125624.19639564349 }, { "content": "#[inline(always)]\n\npub fn today_plus_days(days: i64) -> LocalDate {\n\n Local::today() + Duration::days(days)\n\n}\n\n\n", "file_path": "src/sp_lib/util/datetime.rs", "rank": 74, "score": 114926.74895467116 }, { "content": "#[test]\n\nfn test_stock_update_from_csv_no_data() {\n\n let csv = \"Date,Open,High,Low,Close,Adj Close,Volume\";\n\n let mut stock = Stock::new(String::from(\"STCK\"), StockType::Stock, make_date(2021, 2, 1), 100, 24.0);\n\n assert!(!update_stock_from_csv(&mut stock, &csv).unwrap());\n\n assert!(price_eql(stock.latest_price, 0.00));\n\n assert_eq!(stock.latest_date, earliest_date());\n\n}\n\n\n", "file_path": "tests/test_portfolio.rs", "rank": 75, "score": 112683.29251366144 }, { "content": "#[inline(always)]\n\npub fn make_date(year: i32, month: u32, day: u32) -> LocalDate {\n\n chrono::Local.ymd(year, month, day)\n\n}\n\n\n", "file_path": "src/sp_lib/util/datetime.rs", "rank": 76, "score": 112330.07751826834 }, { "content": "fn sp_ds_root() -> String {\n\n format!(\"{}\", env::temp_dir().display())\n\n}\n\n\n", "file_path": "tests/test_datastore.rs", "rank": 77, "score": 110583.29367897706 }, { "content": "#[test]\n\nfn test_stock_update_from_csv_incomplete_data() {\n\n let csv = \"Date,Open,High,Low,Close,Adj Close,Volume\\n\\\n\n 2021-02-24,25.0,30.0\";\n\n let mut stock = Stock::new(String::from(\"STCK\"), StockType::Stock, make_date(2021, 2, 1), 100, 24.0);\n\n assert!(update_stock_from_csv(&mut stock, &csv).is_err());\n\n assert!(price_eql(stock.latest_price, 0.00));\n\n assert_eq!(stock.latest_date, earliest_date());\n\n}\n\n\n", "file_path": "tests/test_portfolio.rs", "rank": 78, "score": 109997.50353814848 }, { "content": "#[test]\n\nfn test_stock_update_from_csv_zero_price() {\n\n let csv = \"Date,Open,High,Low,Close,Adj Close,Volume\\n\\\n\n 2021-02-24,25.0,30.0,20.0,26.0,26.0,10000\\n\\\n\n 2021-02-25,26.10,31.0,22.0,24.0,24.0,9000\\n\\\n\n 2021-02-26,24.90,32.0,24.0,28.0,0.00,11000\";\n\n let mut stock = Stock::new(String::from(\"STCK\"), StockType::Stock, make_date(2021, 2, 1), 100, 24.0);\n\n assert!(!update_stock_from_csv(&mut stock, &csv).unwrap());\n\n assert!(price_eql(stock.latest_price, 0.00));\n\n assert_eq!(stock.latest_date, earliest_date());\n\n}\n\n\n", "file_path": "tests/test_portfolio.rs", "rank": 79, "score": 109773.8548641563 }, { "content": "fn sp_ds_drop(which: &str) {\n\n let ds = datastore::DataStore::new(&sp_ds_root(), sp_ds_name());\n\n assert!(ds.exists());\n\n assert!(ds.drop_symbol(which, sp_ds_symbol()).is_ok());\n\n}\n", "file_path": "tests/test_datastore.rs", "rank": 80, "score": 106152.49734538863 }, { "content": "type FtnResult = Result<(), Box<dyn Error>>;\n\n\n\npub struct DataStore {\n\n root: PathBuf,\n\n name: String,\n\n base_path: PathBuf\n\n}\n\n\n\nimpl DataStore {\n\n pub fn new(root: &str, name: &str) -> Self {\n\n DataStore {\n\n root: PathBuf::from(root),\n\n name: String::from(name),\n\n base_path: DataStore::make_base_path(&root, &name)\n\n }\n\n }\n\n\n\n #[inline(always)]\n\n pub fn root(&self) -> &Path {\n\n &self.root\n", "file_path": "src/sp_lib/datastore/datastore.rs", "rank": 81, "score": 104550.89824263088 }, { "content": "fn sp_ds_insert(which: &str, idx: i32) {\n\n let ds = datastore::DataStore::new(&sp_ds_root(), sp_ds_name());\n\n assert!(ds.exists());\n\n assert!(ds.insert_symbol(which, sp_ds_symbol(), sp_ds_data(which, idx)).is_ok());\n\n}\n\n\n", "file_path": "tests/test_datastore.rs", "rank": 82, "score": 97431.70195464857 }, { "content": "fn remove_path(path: &Path) -> bool {\n\n if path.exists() {\n\n if let Err(_) = fs::remove_file(path) {\n\n return false;\n\n }\n\n }\n\n return true;\n\n}\n\n\n\n// --------------------------------------------------------------------------------\n\n// Unit Tests\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use std::fs::File;\n\n use std::io::BufReader;\n\n\n\n #[test]\n\n fn test_temp_file_empty() {\n", "file_path": "src/sp_lib/util/temp_file.rs", "rank": 83, "score": 97399.51286920656 }, { "content": "pub fn stock_groupby<T>(stocks: &StockList,\n\n init: fn (&Stock) -> T,\n\n ftn: fn(&Stock, &T) -> T) -> HashMap<String, T> {\n\n let mut groupby: HashMap<String, T> = HashMap::new();\n\n for stock in stocks.iter() {\n\n let entry = groupby.entry(stock.symbol.to_string()).or_insert(init(stock));\n\n *entry = ftn(stock, entry);\n\n }\n\n groupby\n\n}\n\n\n", "file_path": "src/sp_lib/portfolio/algorithms.rs", "rank": 84, "score": 89236.3549426533 }, { "content": "pub fn pct_change(stocks: &StockList) -> f64 {\n\n let base: Price = stocks.iter().map(|stock| stock.base_price).sum();\n\n let net: Price = stocks.iter().map(|stock| stock.net_price()).sum();\n\n 100.0 * net / base\n\n}\n\n\n", "file_path": "src/sp_lib/portfolio/algorithms.rs", "rank": 85, "score": 89236.3549426533 }, { "content": "#[test]\n\nfn test_value_export() {\n\n let mut list = StockList::new();\n\n list.push(make_stock(\"DELL\", StockType::Stock, today_plus_days(-2), 100, 75.50, 80.0));\n\n list.push(make_stock(\"AAPL\", StockType::Stock, today_plus_days(-3), 100, 120.25, 125.25));\n\n list.push(make_stock(\"ICLN\", StockType::ETF, today_plus_days(0), 100, 24.10, 24.15));\n\n\n\n let temp_name = \"sp_test_value_export.csv\";\n\n let csv_filename = temp_file::make_path(&temp_name);\n\n value_export(&list, &csv_filename.to_str().unwrap()).unwrap();\n\n\n\n let csv_content = fs::read_to_string(&csv_filename).unwrap();\n\n let today_str = today_plus_days(0).format(\"%Y-%m-%d\");\n\n let expected = format!(\"Symbol,Buy Date,Upd Date,Days Held,Size,Base,Cur,Net,Pct,Base Value,Cur Value,Net Value,Cum Div\\n\\\n\n DELL,{},{},2,100,75.50,80.00,4.50,5.96,7550.00,8000.00,450.00,0.00\\n\\\n\n AAPL,{},{},3,100,120.25,125.25,5.00,4.16,12025.00,12525.00,500.00,0.00\\n\\\n\n ICLN,{},{},0,100,24.10,24.15,0.05,0.21,2410.00,2415.00,5.00,0.00\\n\",\n\n today_plus_days(-2).format(\"%Y-%m-%d\"),\n\n today_str,\n\n today_plus_days(-3).format(\"%Y-%m-%d\"),\n\n today_str,\n\n today_str,\n\n today_str);\n\n assert_eq!(csv_content, expected);\n\n\n\n assert!(temp_file::remove_file(&temp_name));\n\n}\n\n\n", "file_path": "tests/test_portfolio.rs", "rank": 86, "score": 85297.29353425355 }, { "content": "#[test]\n\nfn test_history_query() {\n\n let start = datetime::make_date(2021, 2, 11);\n\n let end = datetime::make_date(2021, 2, 13);\n\n let mut query = HistoryQuery::new(String::from(\"AAPL\"), start, end, Interval::Daily, Events::History);\n\n\n\n assert_eq!(query.execute(), Ok(()));\n\n assert!(query.result.len() > 0);\n\n\n\n let result_vec: Vec<&str> = query.result.lines().collect();\n\n assert_eq!(result_vec.len(), 3);\n\n assert_eq!(result_vec[0], \"Date,Open,High,Low,Close,Adj Close,Volume\");\n\n\n\n let prices_vec: Vec<&str> = result_vec[1].split(\",\").collect();\n\n assert_eq!(prices_vec.len(), 7);\n\n assert_eq!(prices_vec[0], \"2021-02-11\");\n\n assert!(prices_vec[6].parse::<u32>().unwrap() >= 64000000);\n\n check_prices(&prices_vec, &vec![\"135.90\", \"136.39\", \"133.77\", \"135.13\", \"134.50\"]);\n\n\n\n let prices_vec: Vec<&str> = result_vec[2].split(\",\").collect();\n\n assert_eq!(prices_vec.len(), 7);\n\n assert_eq!(prices_vec[0], \"2021-02-12\");\n\n assert!(prices_vec[6].parse::<u32>().unwrap() >= 60000000);\n\n check_prices(&prices_vec, &vec![\"134.35\", \"135.53\", \"133.69\", \"135.37\", \"134.74\"]);\n\n}\n\n\n", "file_path": "tests/test_yfinance.rs", "rank": 87, "score": 84877.16191352645 }, { "content": "fn sp_ds_export_symbol() {\n\n let ds = datastore::DataStore::new(&sp_ds_root(), sp_ds_name());\n\n assert!(ds.exists());\n\n\n\n let csv_name = \"sp_ds_export_symbol.csv\";\n\n let csv_filename = temp_file::make_path(&csv_name);\n\n\n\n temp_file::remove_file(&csv_name);\n\n\n\n assert!(export::export_symbol(&ds, sp_ds_symbol(), &csv_filename.to_str().unwrap()).is_ok());\n\n\n\n let expect = \"date,open,high,low,close,adj_close,volume,dividend\\n\\\n\n 2021-02-22,10.00,12.00,8.00,11.00,11.00,10000,0.00\\n\\\n\n 2021-02-23,11.00,12.50,8.50,11.50,11.50,9000,1.20\\n\\\n\n 2021-02-24,11.50,14.00,11.00,12.50,12.50,11000,0.00\\n\\\n\n 2021-02-25,12.50,13.50,10.50,12.00,12.00,10000,0.00\\n\\\n\n 2021-02-26,12.00,14.00,11.00,14.00,14.00,12000,0.00\\n\";\n\n let actual = fs::read_to_string(&csv_filename).unwrap();\n\n assert_eq!(actual, expect);\n\n\n\n assert!(temp_file::remove_file(&csv_name));\n\n}\n\n\n", "file_path": "tests/test_datastore.rs", "rank": 88, "score": 83275.52805042973 }, { "content": "#[test]\n\nfn test_stock_update_from_csv() {\n\n let csv = \"Date,Open,High,Low,Close,Adj Close,Volume\\n\\\n\n 2021-02-26,24.90,32.0,24.0,28.0,28.25,11000\";\n\n let mut stock = Stock::new(String::from(\"STCK\"), StockType::Stock, make_date(2021, 2, 1), 100, 24.0);\n\n assert!(update_stock_from_csv(&mut stock, &csv).unwrap());\n\n assert!(price_eql(stock.latest_price, 28.25));\n\n assert_eq!(stock.latest_date, make_date(2021, 2, 26));\n\n}\n\n\n", "file_path": "tests/test_portfolio.rs", "rank": 89, "score": 83258.85152690051 }, { "content": "#[test]\n\nfn test_stock_base_dates() {\n\n fn test_dates(list: &StockList) {\n\n let sym_dates = stock_base_dates(&list);\n\n assert_eq!(sym_dates.len(), 3);\n\n assert_eq!(*sym_dates.get(\"AAPL\").unwrap(), today_plus_days(-3));\n\n assert_eq!(*sym_dates.get(\"DELL\").unwrap(), today_plus_days(-2));\n\n assert_eq!(*sym_dates.get(\"ICLN\").unwrap(), today_plus_days(0));\n\n }\n\n\n\n let mut list = StockList::new();\n\n list.push(make_stock(\"DELL\", StockType::Stock, today_plus_days(-2), 100, 79.21, 79.71));\n\n list.push(make_stock(\"AAPL\", StockType::Stock, today_plus_days(-3), 200, 120.25, 125.25));\n\n list.push(make_stock(\"ICLN\", StockType::ETF, today_plus_days(0), 300, 24.10, 24.12));\n\n list.push(make_stock(\"AAPL\", StockType::Stock, today_plus_days(0), 100, 125.50, 125.75));\n\n test_dates(&list);\n\n\n\n let mut list = StockList::new();\n\n list.push(make_stock(\"DELL\", StockType::Stock, today_plus_days(-2), 100, 79.21, 79.71));\n\n list.push(make_stock(\"AAPL\", StockType::Stock, today_plus_days(0), 100, 125.50, 125.75));\n\n list.push(make_stock(\"ICLN\", StockType::ETF, today_plus_days(0), 300, 24.10, 24.12));\n\n list.push(make_stock(\"AAPL\", StockType::Stock, today_plus_days(-3), 200, 120.25, 125.25));\n\n test_dates(&list);\n\n}\n\n\n", "file_path": "tests/test_portfolio.rs", "rank": 90, "score": 83023.67897025401 }, { "content": "fn sp_ds_select_history() {\n\n let ds = datastore::DataStore::new(&sp_ds_root(), sp_ds_name());\n\n assert!(ds.exists());\n\n\n\n fn check_history(entry: &history::HistoryEntry, csv: &str) {\n\n let values: Vec<&str> = csv.split(',').collect();\n\n assert_eq!(values.len(), 7);\n\n assert_eq!(entry.date, datetime::parse_date(&values[0]).unwrap());\n\n assert_eq!(entry.open, values[1].parse::<Price>().unwrap());\n\n assert_eq!(entry.high, values[2].parse::<Price>().unwrap());\n\n assert_eq!(entry.low, values[3].parse::<Price>().unwrap());\n\n assert_eq!(entry.close, values[4].parse::<Price>().unwrap());\n\n assert_eq!(entry.adj_close, values[5].parse::<Price>().unwrap());\n\n assert_eq!(entry.volume, values[6].parse::<u64>().unwrap());\n\n }\n\n\n\n // No Filter\n\n match history::History::ds_select_all(&ds, sp_ds_symbol()) {\n\n Ok(hist) => {\n\n assert_eq!(hist.symbol(), sp_ds_symbol());\n", "file_path": "tests/test_datastore.rs", "rank": 91, "score": 82885.2042278648 }, { "content": "use std::cmp::Ordering;\n\n\n\n// TODO: Use fixed point number to represent price.\n\npub type PriceType = f64;\n\n\n", "file_path": "src/sp_lib/util/price_type.rs", "rank": 92, "score": 66207.9735701514 }, { "content": " fn test_price_eql() {\n\n assert!(price_eql(0.0, 0.0));\n\n assert!(price_eql(0.000001, 0.000001));\n\n assert!(price_eql(-0.000001, -0.000001));\n\n\n\n assert!(!price_eql(1.0, 1.1));\n\n assert!(!price_eql(0.000001, 0.000002));\n\n assert!(!price_eql(-0.000001, -0.000002));\n\n\n\n assert!(price_eql(0.000001, 0.0000009));\n\n assert!(price_eql(-0.000001, -0.0000009));\n\n }\n\n\n\n #[test]\n\n fn test_prices_eql() {\n\n let lhs: Vec<PriceType> = vec![0.1, 2.1, 3.7, 0.05];\n\n let rhs1: Vec<PriceType> = vec![0.1, 2.1, 3.7, 0.05];\n\n let rhs2: Vec<PriceType> = vec![0.1, 2.1, 3.7, 0.25];\n\n let rhs3: Vec<PriceType> = vec![0.1, 2.1, 3.7];\n\n\n", "file_path": "src/sp_lib/util/price_type.rs", "rank": 93, "score": 66202.36907784731 }, { "content": "\n\n #[test]\n\n fn test_price_cmp() {\n\n assert_eq!(price_cmp(10.50, 1.0), Ordering::Greater);\n\n assert_eq!(price_cmp(1.0, 10.50), Ordering::Less);\n\n assert_eq!(price_cmp(1.0, 1.0), Ordering::Equal);\n\n }\n\n\n\n #[test]\n\n fn test_price_zero() {\n\n assert!(price_zero(0.0));\n\n assert!(price_zero(0.0000009));\n\n assert!(price_zero(-0.0000009));\n\n\n\n assert!(!price_zero(1.0));\n\n assert!(!price_zero(0.000001));\n\n assert!(!price_zero(-0.000001));\n\n }\n\n\n\n #[test]\n", "file_path": "src/sp_lib/util/price_type.rs", "rank": 94, "score": 66198.42717773547 }, { "content": " assert!(prices_eql(&lhs, &rhs1));\n\n assert!(!prices_eql(&lhs, &rhs2));\n\n assert!(!prices_eql(&lhs, &rhs3));\n\n\n\n assert!(prices_eql(&lhs[1..3], &rhs1[1..3]));\n\n assert!(prices_eql(&lhs[1..3], &rhs2[1..3]));\n\n assert!(prices_eql(&lhs[1..3], &rhs3[1..3]));\n\n }\n\n}\n", "file_path": "src/sp_lib/util/price_type.rs", "rank": 95, "score": 66198.23979865153 }, { "content": "struct StatAgg {\n\n tot_size: u64,\n\n hist_size: u64,\n\n div_size: u64,\n\n hist_count: usize,\n\n div_count: usize\n\n}\n\n\n\npub struct Application {\n\n args: Arguments,\n\n sym_dates: HashMap<String, datetime::LocalDate>,\n\n ds: datastore::DataStore\n\n}\n\n\n\nimpl Application {\n\n pub fn new() -> Self {\n\n let args = Arguments::new();\n\n let ds = datastore::DataStore::new(args.ds_root(), args.ds_name());\n\n Application {\n\n args: args,\n", "file_path": "src/sp_dstool/application.rs", "rank": 96, "score": 64980.91456708222 }, { "content": "def history_tag() -> str:\n", "file_path": "scripts/python/sp_util.py", "rank": 97, "score": 58867.03034060495 }, { "content": "fn main() {\n\n let mut app = Application::new();\n\n if let Err(err) = app.run() {\n\n eprintln!(\"Error: {}\", err);\n\n process::exit(1);\n\n }\n\n}\n", "file_path": "src/sp_dstool/main.rs", "rank": 98, "score": 52923.444515964366 }, { "content": "fn main() {\n\n let mut app = Application::new();\n\n if let Err(err) = app.run() {\n\n eprintln!(\"Error: {}\", err);\n\n process::exit(1);\n\n }\n\n}\n", "file_path": "src/sp_report/main.rs", "rank": 99, "score": 52923.444515964366 } ]
Rust
src/main.rs
miyachan/lnx
a0fed83e43df6b898ff4d85dfb0f6ecd844af04c
#[macro_use] extern crate log; #[macro_use] extern crate serde_json; use std::fs::File; use std::io::BufReader; use std::sync::Arc; use anyhow::{Error, Result}; use axum::handler::{delete, get, post, Handler}; use axum::http::header; use axum::Router; use fern::colors::{Color, ColoredLevelConfig}; use hyper::http::HeaderValue; use hyper::server::conn::Http; use log::LevelFilter; use structopt::StructOpt; use tokio::net::TcpListener; use tokio_rustls::rustls::internal::pemfile::{certs, pkcs8_private_keys}; use tokio_rustls::rustls::{NoClientAuth, ServerConfig}; use tokio_rustls::TlsAcceptor; use tower::ServiceBuilder; use tower_http::auth::RequireAuthorizationLayer; use tower_http::set_header::SetResponseHeaderLayer; mod auth; mod responders; mod routes; use engine::SearchEngine; use tower::util::MapResponseLayer; use tower_http::add_extension::AddExtensionLayer; #[derive(Debug, StructOpt)] #[structopt(name = "lnx", about = "A ultra-fast, adaptable search engine.")] struct Settings { #[structopt(long, default_value = "info", env)] log_level: LevelFilter, #[structopt(long, env)] pretty_logs: Option<bool>, #[structopt(long, short, default_value = "127.0.0.1", env)] host: String, #[structopt(long, short, default_value = "8000", env)] port: u16, #[structopt(long, env)] tls_key_file: Option<String>, #[structopt(long, env)] tls_cert_file: Option<String>, #[structopt(long, short = "auth", env, hide_env_values = true)] authentication_key: Option<String>, #[structopt(long, short = "threads", env)] runtime_threads: Option<usize>, #[structopt(long, env)] log_file: Option<String>, #[structopt(long, env, takes_value = false)] enable_fast_fuzzy: bool, } fn main() { let settings = match setup() { Ok(s) => s, Err(e) => { eprintln!("error during server setup: {:?}", e); return; } }; let threads = settings.runtime_threads.unwrap_or_else(|| num_cpus::get()); info!("starting runtime with {} threads", threads); let maybe_runtime = tokio::runtime::Builder::new_multi_thread() .worker_threads(threads) .enable_all() .build(); let result = match maybe_runtime { Ok(runtime) => runtime.block_on(start(settings)), Err(e) => { error!("error during runtime creation: {:?}", e); return; } }; if let Err(e) = result { error!("error during server runtime: {:?}", e); } } fn setup_logger(level: LevelFilter, log_file: &Option<String>, pretty: bool) -> Result<()> { let mut colours = ColoredLevelConfig::new(); if pretty { colours = colours .info(Color::Green) .warn(Color::Yellow) .error(Color::BrightRed) .debug(Color::Magenta) .trace(Color::Cyan); } let mut builder = fern::Dispatch::new() .format(move |out, message, record| { out.finish(format_args!( "{} | {} | {:<5} - {}", chrono::Local::now().format("[%Y-%m-%d][%H:%M:%S]"), record.target(), colours.color(record.level()), message, )) }) .level(level) .level_for( "sqlx", if level == LevelFilter::Info { LevelFilter::Warn } else { level }, ) .chain(std::io::stdout()); if let Some(file) = log_file { builder = builder.chain(fern::log_file(file)?); } builder.apply()?; Ok(()) } fn setup() -> Result<Settings> { let config: Settings = Settings::from_args(); setup_logger( config.log_level, &config.log_file, config.pretty_logs.unwrap_or(true), )?; Ok(config) } async fn start(settings: Settings) -> Result<()> { info!("checking tls files"); let tls = check_tls_files(&settings)?; info!("setting up the authorization manager"); let (authorization_manager, tokens) = auth::AuthManager::connect("./lnx-data/data").await?; let authorization_manager = Arc::new(authorization_manager); info!("setting up the search engine"); let engine = Arc::new(SearchEngine::create("./lnx-data/meta", settings.enable_fast_fuzzy).await?); let super_user_middleware = ServiceBuilder::new() .layer(RequireAuthorizationLayer::custom( auth::SuperUserAuthIfEnabled::bearer( settings .authentication_key .as_ref() .map(|v| v.as_str()) .unwrap_or_else(|| ""), settings.authentication_key.is_some(), &"Missing token bearer authorization header.", )?, )) .layer( SetResponseHeaderLayer::<HeaderValue, hyper::Body>::overriding( header::SERVER, HeaderValue::from_static("lnx"), ), ) .layer(AddExtensionLayer::new(authorization_manager)) .layer(MapResponseLayer::new(routes::map_status)) .into_inner(); let super_user_app = Router::new() .route("/tokens/revoke", post(routes::revoke_token)) .route("/tokens/permissions", post(routes::modify_permissions)) .route("/tokens/create", post(routes::create_token)) .route("/tokens/clear", delete(routes::revoke_all)) .layer(super_user_middleware); let search_auth = auth::UserAuthIfEnabled::bearer( tokens.clone(), auth::AuthFlags::SEARCH, settings.authentication_key.is_some(), &"Invalid permissions or missing token bearer", )?; let documents_auth = auth::UserAuthIfEnabled::bearer( tokens.clone(), auth::AuthFlags::MODIFY_DOCUMENTS, settings.authentication_key.is_some(), &"Invalid permissions or missing token bearer", )?; let index_auth = auth::UserAuthIfEnabled::bearer( tokens.clone(), auth::AuthFlags::MODIFY_INDEXES, settings.authentication_key.is_some(), &"Invalid permissions or missing token bearer", )?; let index_middleware = ServiceBuilder::new() .layer( SetResponseHeaderLayer::<HeaderValue, hyper::Body>::overriding( header::SERVER, HeaderValue::from_static("lnx"), ), ) .layer(AddExtensionLayer::new(engine)) .layer(MapResponseLayer::new(routes::map_status)) .into_inner(); let app = Router::new() .route( "/indexes/:index_name/search", get(routes::search_index.layer(RequireAuthorizationLayer::custom(search_auth.clone()))) .post( routes::search_index_json.layer(RequireAuthorizationLayer::custom(search_auth)), ), ) .route( "/indexes/:index_name/commit", post( routes::commit_index_changes .layer(RequireAuthorizationLayer::custom(documents_auth.clone())), ), ) .route( "/indexes/:index_name/rollback", post( routes::rollback_index_changes .layer(RequireAuthorizationLayer::custom(documents_auth.clone())), ), ) .route( "/indexes/:index_name", delete( routes::delete_index.layer(RequireAuthorizationLayer::custom(index_auth.clone())), ), ) .route( "/indexes", post(routes::create_index.layer(RequireAuthorizationLayer::custom(index_auth.clone()))), ) .route( "/indexes/:index_name/documents", post( routes::add_document .layer(RequireAuthorizationLayer::custom(documents_auth.clone())), ) .delete( routes::delete_documents .layer(RequireAuthorizationLayer::custom(documents_auth.clone())), ), ) .route( "/indexes/:index_name/documents/:document_id", get(routes::get_document .layer(RequireAuthorizationLayer::custom(documents_auth.clone()))), ) .route( "/indexes/:index_name/documents/clear", delete( routes::delete_all_documents .layer(RequireAuthorizationLayer::custom(documents_auth.clone())), ), ) .layer(index_middleware) .nest("/admin", super_user_app); let addr = format!("{}:{}", &settings.host, settings.port); let handle = match tls { Some(tls) => tokio::spawn(async move { info!("starting https server @ https://{}", addr); let acceptor = TlsAcceptor::from(tls); let listener = TcpListener::bind(&addr).await?; loop { let (stream, _addr) = listener.accept().await?; let acceptor = acceptor.clone(); let ap = app.clone(); tokio::spawn(async move { if let Ok(stream) = acceptor.accept(stream).await { if let Err(e) = Http::new().serve_connection(stream, ap).await { warn!("failed to serve connection: {:?}", e); }; } }); } }), None => tokio::spawn(async move { info!("starting http server @ http://{}", addr); axum::Server::bind(&addr.parse()?) .serve(app.into_make_service()) .await?; Ok::<(), Error>(()) }), }; tokio::signal::ctrl_c().await?; info!("shutting down server..."); handle.abort(); Ok(()) } fn check_tls_files(settings: &Settings) -> Result<Option<Arc<ServerConfig>>> { match (&settings.tls_key_file, &settings.tls_cert_file) { (Some(fp1), Some(fp2)) => Ok(Some(tls_server_config(fp1, fp2)?)), (None, None) => Ok(None), _ => { return Err(Error::msg( "missing a required TLS field, both key and cert must be provided.", )) } } } fn tls_server_config(key: &str, cert: &str) -> Result<Arc<ServerConfig>> { let mut config = ServerConfig::new(NoClientAuth::new()); let mut key_reader = BufReader::new(File::open(key)?); let mut cert_reader = BufReader::new(File::open(cert)?); let key = pkcs8_private_keys(&mut key_reader) .map_err(|_| Error::msg("failed to extract private keys"))? .remove(0); let certs = certs(&mut cert_reader).map_err(|_| Error::msg("failed to extract certificates"))?; config.set_single_cert(certs, key)?; config.set_protocols(&[b"h2".to_vec(), b"http/1.1".to_vec()]); Ok(Arc::new(config)) }
#[macro_use] extern crate log; #[macro_use] extern crate serde_json; use std::fs::File; use std::io::BufReader; use std::sync::Arc; use anyhow::{Error, Result}; use axum::handler::{delete, get, post, Handler}; use axum::http::header; use axum::Router; use fern::colors::{Color, ColoredLevelConfig}; use hyper::http::HeaderValue; use hyper::server::conn::Http; use log::LevelFilter; use structopt::StructOpt; use tokio::net::TcpListener; use tokio_rustls::rustls::internal::pemfile::{certs, pkcs8_private_keys}; use tokio_rustls::rustls::{NoClientAuth, ServerConfig}; use tokio_rustls::TlsAcceptor; use tower::ServiceBuilder; use tower_http::auth::RequireAuthorizationLayer; use tower_http::set_header::SetResponseHeaderLayer; mod auth; mod responders; mod routes; use engine::SearchEngine; use tower::util::MapResponseLayer; use tower_http::add_extension::AddExtensionLayer; #[derive(Debug, StructOpt)] #[structopt(name = "lnx", about = "A ultra-fast, adaptable search engine.")] struct Settings { #[structopt(long, default_value = "info", env)] log_level: LevelFilter, #[structopt(long, env)] pretty_logs: Option<bool>, #[structopt(long, short, default_value = "127.0.0.1", env)] host: String, #[structopt(long, short, default_value = "8000", env)] port: u16, #[structopt(long, env)] tls_key_file: Option<String>, #[structopt(long, env)] tls_cert_file: Option<String>, #[structopt(long, short = "auth", env, hide_env_values = true)] authentication_key: Option<String>, #[structopt(long, short = "threads", env)] runtime_threads: Option<usize>, #[structopt(long, env)] log_file: Option<String>, #[structopt(long, env, takes_value = false)] enable_fast_fuzzy: bool, } fn main() { let settings = match setup() { Ok(s) => s, Err(e) => { eprintln!("error during server setup: {:?}", e); return; } }; let threads = settings.runtime_threads.unwrap_or_else(|| num_cpus::get()); info!("starting runtime with {} threads", threads); let maybe_runtime = tokio::runtime::Builder::new_multi_thread() .worker_threads(threads) .enable_all() .build(); let result = match maybe_runtime { Ok(runtime) => runtime.block_on(start(settings)), Err(e) => { error!("error during runtime creation: {:?}", e); return; } }; if let Err(e) = result { error!("error during server runtime: {:?}", e); } } fn setup_logger(level: LevelFilter, log_file: &Option<String>, pretty: bool) -> Result<()> { let mut colours = ColoredLevelConfig::new(); if pretty { colours = colours .info(Color::Green) .warn(Color::Yellow) .error(Color::BrightRed) .debug(Color::Magenta) .trace(Color::Cyan); } let mut builder = fern::Dispatch::new() .format(move |out, message, record| { out.finish(format_args!( "{} | {} | {:<5} - {}", chrono::Local::now().format("[%Y-%m-%d][%H:%M:%S]"), record.target(), colours.color(record.level()), message, )) }) .level(level) .level_for( "sqlx", if level == LevelFilter::Info { LevelFilter::Warn } else { level }, ) .chain(std::io::stdout()); if let Some(file) = log_file { builder = builder.chain(fern::log_file(file)?); } builder.apply()?; Ok(()) } fn setup() -> Result<Settings> { let config: Settings = Settings::from_args(); setup_logger( config.log_level, &config.log_file, config.pretty_logs.unwrap_or(true), )?; Ok(config) } async fn start(settings: Settings) -> Result<()> { info!("checking tls files"); let tls = check_tls_files(&settings)?; info!("setting up the authorization manager"); let (authorization_manager, tokens) = auth::AuthManager::connect("./lnx-data/data").await?; let authorization_manager = Arc::new(authorization_manager); info!("setting up the search engine"); let engine = Arc::new(SearchEngine::create("./lnx-data/meta", settings.enable_fast_fuzzy).await?); let super_user_middleware = ServiceBuilder::new() .layer(RequireAuthorizationLayer::custom( auth::SuperUserAuthIfEnabled::bearer( settings .authentication_key .as_ref() .map(|v| v.as_str()) .unwrap_or_else(|| ""), settings.authentication_key.is_some(), &"Missing token bearer authorization header.", )?, )) .layer( SetResponseHeaderLayer::<HeaderValue, hyper::Body>::overriding( header::SERVER, HeaderValue::from_static("lnx"), ), ) .layer(AddExtensionLayer::new(authorization_manager)) .layer(MapResponseLayer::new(routes::map_status)) .into_inner(); let super_user_app = Router::new() .route("/tokens/revoke", post(routes::revoke_token)) .route("/tokens/permissions", post(routes::modify_permissions)) .route("/tokens/create", post(routes::create_token)) .route("/tokens/clear", delete(routes::revoke_all)) .layer(super_user_middleware); let search_auth = auth::UserAuthIfEnabled::bearer( tokens.clone(), auth::AuthFlags::SEARCH, settings.authentication_key.is_some(), &"Invalid permissions or missing token bearer", )?; let documents_auth = auth::UserAuthIfEnabled::bearer( tokens.clone(), auth::AuthFlags::MODIFY_DOCUMENTS, settings.authentication_key.is_some(), &"Invalid permissions or missing token bearer", )?; let index_auth = auth::UserAuthIfEnabled::bearer( tokens.clone(), auth::AuthFlags::MODIFY_INDEXES, settings.authentication_key.is_some(), &"Invalid permissions or missing token bearer", )?; let index_middleware = ServiceBuilder::new() .layer( SetResponseHeaderLayer::<HeaderValue, hyper::Body>::overriding( header::SERVER, HeaderValue::from_static("lnx"), ), ) .layer(AddExtensionLayer::new(engine)) .layer(MapResponseLayer::new(routes::map_status)) .into_inner(); let app = Router::new() .route( "/indexes/:index_name/search", get(routes::search_index.layer(RequireAuthorizationLayer::custom(search_auth.clone()))) .post( routes::search_index_json.layer(RequireAuthorizationLayer::custom(search_auth)), ), ) .route( "/indexes/:index_name/commit", post( routes::commit_index_changes .layer(RequireAuthorizationLayer::custom(documents_auth.clone())), ), ) .route( "/indexes/:index_name/rollback", post( routes::rollback_index_changes .layer(RequireAuthorizationLayer::custom(documents_auth.clone())), ), ) .route( "/indexes/:index_name", delete( routes::delete_index.layer(RequireAuthorizationLayer::custom(index_auth.clone())), ), ) .route( "/indexes", post(routes::create_index.layer(RequireAuthorizationLayer::custom(index_auth.clone()))), ) .route( "/indexes/:index_name/documents", post( routes::add_document .layer(RequireAuthorizationLayer::custom(documents_auth.clone())), ) .delete( routes::delete_documents .layer(RequireAuthorizationLayer::custom(documents_auth.clone())), ), ) .route( "/indexes/:index_name/documents/:document_id", get(routes::get_document .layer(RequireAuthorizationLayer::custom(documents_auth.clone()))), ) .route( "/indexes/:index_name/documents/clear", delete( routes::delete_all_documents .layer(RequireAuthorizationLayer::custom(documents_auth.clone())), ), ) .layer(index_middleware) .nest("/admin", super_user_app); let addr = format!("{}:{}", &settings.host, settings.port); let handle = match tls { Some(tls) => tokio::spawn(async move { info!("starting https server @ https://{}", addr); let acceptor = TlsAcceptor::from(tls); let listener = TcpListener::bind(&addr).await?; loop { let (stream, _addr) = listener.accept().await?; let acceptor = acceptor.clone(); let ap = app.clone(); tokio::spawn(async move { if let Ok(stream) = acceptor.accept(stream).await { if let Err(e) = Http::new().serve_connection(stream, ap).await { warn!("failed to serve connection: {:?}", e); }; } }); } }), None => tokio::spawn(async move { info!("starting http server @ http://{}", addr); axum::Server::bind(&addr.parse()?) .serve(app.into_make_service()) .await?; Ok::<(), Error>(()) }), }; tokio::signal::ctrl_c().await?; info!("shutting down server..."); handle.abort(); Ok(()) } fn check_tls_
fn tls_server_config(key: &str, cert: &str) -> Result<Arc<ServerConfig>> { let mut config = ServerConfig::new(NoClientAuth::new()); let mut key_reader = BufReader::new(File::open(key)?); let mut cert_reader = BufReader::new(File::open(cert)?); let key = pkcs8_private_keys(&mut key_reader) .map_err(|_| Error::msg("failed to extract private keys"))? .remove(0); let certs = certs(&mut cert_reader).map_err(|_| Error::msg("failed to extract certificates"))?; config.set_single_cert(certs, key)?; config.set_protocols(&[b"h2".to_vec(), b"http/1.1".to_vec()]); Ok(Arc::new(config)) }
files(settings: &Settings) -> Result<Option<Arc<ServerConfig>>> { match (&settings.tls_key_file, &settings.tls_cert_file) { (Some(fp1), Some(fp2)) => Ok(Some(tls_server_config(fp1, fp2)?)), (None, None) => Ok(None), _ => { return Err(Error::msg( "missing a required TLS field, both key and cert must be provided.", )) } } }
function_block-function_prefixed
[ { "content": "fn main() -> Result<()> {\n\n // Tell Cargo that if the given file changes, to rerun this build script.\n\n println!(\"cargo:rerun-if-changed=./datasets\");\n\n\n\n let _ = fs::remove_dir_all(\"./_dist\");\n\n fs::create_dir_all(\"./_dist\")?;\n\n\n\n compress_frequency_dicts()?;\n\n compress_stop_words()?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "engine/build.rs", "rank": 1, "score": 199504.893613458 }, { "content": "/// Executes a search for a given query with a given searcher, limit and schema.\n\n///\n\n/// This will process and time the execution time to build into the exportable\n\n/// data.\n\nfn search(\n\n query: Box<dyn Query>,\n\n searcher: LeasedItem<Searcher>,\n\n executor: &Executor,\n\n limit: usize,\n\n offset: usize,\n\n schema: Schema,\n\n order_by: Option<Field>,\n\n) -> Result<QueryResults> {\n\n let start = std::time::Instant::now();\n\n\n\n let collector = TopDocs::with_limit(limit).and_offset(offset);\n\n\n\n let (hits, count) = if let Some(field) = order_by {\n\n match schema.get_field_entry(field).field_type() {\n\n FieldType::I64(_) => {\n\n let out: (Vec<(i64, DocAddress)>, usize) =\n\n order_and_search!(searcher, collector, field, &query, executor)?;\n\n (process_search!(searcher, schema, out.0), out.1)\n\n },\n", "file_path": "engine/src/index/reader.rs", "rank": 5, "score": 149207.60526371418 }, { "content": "fn main() -> anyhow::Result<()> {\n\n let _ = std::env::set_var(\"RUST_LOG\", \"info\");\n\n pretty_env_logger::init();\n\n\n\n let cmd: Commands = Commands::from_args();\n\n\n\n match cmd {\n\n Commands::Bench {\n\n address,\n\n target,\n\n mode,\n\n data_file,\n\n concurrency,\n\n threads,\n\n output_dir,\n\n search_terms,\n\n no_prep,\n\n index,\n\n } => {\n\n let ctx = benchmark::Context {\n", "file_path": "lnxcli/src/main.rs", "rank": 6, "score": 139652.59708314782 }, { "content": "fn compress_frequency_dicts() -> Result<()> {\n\n if !path::Path::new(\"./datasets/dictionaries\").exists() {\n\n return Ok(());\n\n }\n\n\n\n let mut data = vec![];\n\n for entry in fs::read_dir(\"./datasets/dictionaries\")? {\n\n let entry = entry?;\n\n let path = entry.path();\n\n if path.is_dir() {\n\n continue;\n\n }\n\n\n\n let mut file = fs::read(path)?;\n\n data.append(&mut file);\n\n }\n\n\n\n let mut encoder = GzEncoder::new(Vec::new(), Compression::best());\n\n encoder.write_all(&data)?;\n\n let data = encoder.finish()?;\n\n\n\n fs::write(\"./_dist/dictionary\", &data)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "engine/build.rs", "rank": 7, "score": 139599.6266781818 }, { "content": "fn compress_stop_words() -> Result<()> {\n\n if !path::Path::new(\"./datasets/stop_words\").exists() {\n\n return Ok(());\n\n }\n\n\n\n let mut data = vec![];\n\n for entry in fs::read_dir(\"./datasets/stop_words\")? {\n\n let entry = entry?;\n\n let path = entry.path();\n\n if path.is_dir() {\n\n continue;\n\n }\n\n\n\n let mut file = fs::read(path)?;\n\n data.append(&mut file);\n\n }\n\n\n\n let mut encoder = GzEncoder::new(Vec::new(), Compression::best());\n\n encoder.write_all(&data)?;\n\n let data = encoder.finish()?;\n\n\n\n fs::write(\"./_dist/stop_words\", &data)?;\n\n\n\n Ok(())\n\n}\n", "file_path": "engine/build.rs", "rank": 8, "score": 139599.6266781818 }, { "content": "fn get_client_and_addr(address: Arc<String>, index: &str) -> (RequestClient, TargetUri) {\n\n let search_addr = Arc::new(format!(\"{}/indexes/{}/search\", address, index));\n\n let client = reqwest::Client::new();\n\n\n\n (client, search_addr)\n\n}\n\n\n\npub(crate) async fn start_standard<T: Future<Output = Result<u16>>>(\n\n address: Arc<String>,\n\n mut sample: SamplerHandle,\n\n terms: Vec<String>,\n\n index: &str,\n\n callback: fn(RequestClient, TargetUri, Query) -> T,\n\n) -> Result<()> {\n\n let (client, search_addr) = get_client_and_addr(address, index);\n\n sample.start_timing();\n\n\n\n for term in terms.iter() {\n\n let start = Instant::now();\n\n let status = callback(client.clone(), search_addr.clone(), term.clone()).await?;\n", "file_path": "lnxcli/benchmark/src/shared.rs", "rank": 9, "score": 125042.19204177582 }, { "content": "pub fn correct_doc_fields(doc: &mut structures::Document, indexed_text_fields: &Vec<String>) {\n\n let mut changes = vec![];\n\n\n\n for target in indexed_text_fields {\n\n let id = hash(target);\n\n\n\n let maybe_values = doc.0.get(target);\n\n if let Some(values) = maybe_values {\n\n match values {\n\n DocumentItem::Single(value) => {\n\n if let DocumentValue::Text(ref data) = value {\n\n let corrected = correct_sentence(data, 1);\n\n changes.push((format!(\"_{}\", id), DocumentItem::Single(DocumentValue::Text(corrected))));\n\n }\n\n },\n\n DocumentItem::Multi(values) => {\n\n let mut local_changes = vec![];\n\n for val in values {\n\n if let DocumentValue::Text(ref data) = val {\n\n let corrected = correct_sentence(data, 1);\n", "file_path": "engine/src/helpers.rs", "rank": 10, "score": 120959.78782634079 }, { "content": "fn add_value(key: &String, field: Field, field_type: &FieldType, value: DocumentValue, doc: &mut InternalDocument) -> Result<()> {\n\n match (value, field_type) {\n\n (DocumentValue::I64(v), FieldType::I64(_)) => doc.add_i64(field, v),\n\n (DocumentValue::U64(v), FieldType::U64(_)) => doc.add_u64(field, v),\n\n (DocumentValue::F64(v), FieldType::F64(_)) => doc.add_f64(field, v),\n\n (DocumentValue::Text(v), FieldType::Str(_)) => doc.add_text(field, v),\n\n (DocumentValue::Datetime(v), FieldType::Str(_)) => doc.add_text(field, v.to_string()),\n\n (DocumentValue::Datetime(v), FieldType::Date(_)) => doc.add_date(field, &v),\n\n (DocumentValue::I64(v), FieldType::Date(_)) => {\n\n match chrono::NaiveDateTime::from_timestamp_opt(v, 0) {\n\n Some(dt) => {\n\n let dt = chrono::DateTime::from_utc(dt, Utc);\n\n doc.add_date(field, &dt)\n\n },\n\n None =>\n\n return Err(Error::msg(format!(\"filed {:?} is type {:?} in schema but did not get a valid value (invalid timestamp)\", &key, field_type))),\n\n }\n\n },\n\n (DocumentValue::U64(v), FieldType::Date(_)) => {\n\n match chrono::NaiveDateTime::from_timestamp_opt(v as i64, 0) {\n", "file_path": "engine/src/structures.rs", "rank": 12, "score": 112180.15092844318 }, { "content": "type FieldSearch = HashMap<String, String>;\n\n\n\n/// Extracts a leased index or returns a json response\n\n/// with a 400 status code.\n\nmacro_rules! get_index_or_reject {\n\n ($engine:expr, $name:expr) => {{\n\n match $engine.get_index($name).await {\n\n None => {\n\n warn!(\"rejected request due to unknown index {:?}\", $name);\n\n return json_response(\n\n StatusCode::BAD_REQUEST,\n\n &format!(\"no index exists with name '{}\", $name),\n\n );\n\n }\n\n Some(index) => index,\n\n }\n\n }};\n\n}\n\n\n\n/// Checks for any errors in the given operation.\n", "file_path": "src/routes.rs", "rank": 14, "score": 104773.40066456782 }, { "content": "/// Generates a MoreLikeThisQuery which matches similar documents\n\n/// as the given reference document.\n\nfn parse_more_like_this(ref_document: DocAddress) -> Result<Box<dyn Query>> {\n\n let query = MoreLikeThisQuery::builder()\n\n .with_min_doc_frequency(1)\n\n .with_max_doc_frequency(10)\n\n .with_min_term_frequency(1)\n\n .with_min_word_length(2)\n\n .with_max_word_length(18)\n\n .with_boost_factor(1.0)\n\n .with_stop_words(crate::stop_words::get_stop_words()?)\n\n .with_document(ref_document);\n\n\n\n Ok(Box::new(query))\n\n}\n\n\n\n/// Represents a single query result.\n\n#[derive(Serialize)]\n\npub struct QueryHit {\n\n /// The address of the given document, this can be used for\n\n /// 'more like this' queries.\n\n pub(super) document_id: String,\n", "file_path": "engine/src/index/reader.rs", "rank": 15, "score": 97954.6811996439 }, { "content": "/// Generates a query from any of the 3 possible systems to\n\n/// query documents.\n\nfn parse_query(\n\n index: &tantivy::Index,\n\n parser: Arc<QueryParser>,\n\n search_fields: Arc<Vec<(Field, Score)>>,\n\n query: Option<Either<String, HashMap<String, String>>>,\n\n ref_document: Option<DocAddress>,\n\n mode: QueryMode,\n\n use_fast_fuzzy: bool,\n\n strip_stop_words: bool,\n\n) -> Result<Box<dyn Query>> {\n\n let start = std::time::Instant::now();\n\n let out = match (mode, &query, ref_document) {\n\n (QueryMode::Normal, None, _) => Err(Error::msg(\n\n \"query mode was `Normal` but query string is `None`\",\n\n )),\n\n (QueryMode::Normal, Some(Either::Left(query)), _) => Ok(parser.parse_query(query)?),\n\n (QueryMode::Normal, Some(Either::Right(query)), _) => {\n\n let queries = query.iter().map(|(field, query)| {\n\n let field = match index.schema().get_field(field) {\n\n Some(f) => vec![f],\n", "file_path": "engine/src/index/reader.rs", "rank": 16, "score": 94630.36851597395 }, { "content": "/// Uses the fast fuzzy system to match similar documents with\n\n/// typo tolerance.\n\n///\n\n/// Unlike the standard fuzzy query which uses Levenshtein distance\n\n/// this system uses pre-computation via symspell which is considerably\n\n/// quicker than the standard method.\n\n///\n\n/// However, this requires additional private fields in the schema to\n\n/// be affective with relevancy as names often get corrected to dictionary\n\n/// words which alters the behaviour of the ranking.\n\n/// To counter act this, the system runs the same correction on indexed\n\n/// text fields to counter act this name handling issue.\n\nfn parse_fast_fuzzy_query(\n\n query: &str,\n\n search_fields: Arc<Vec<(Field, Score)>>,\n\n strip_stop_words: bool,\n\n) -> Result<Box<dyn Query>> {\n\n debug!(\"using fast fuzzy system for {}\", &query);\n\n if query.is_empty() {\n\n return Ok(Box::new(EmptyQuery {}));\n\n }\n\n\n\n let stop_words = crate::stop_words::get_hashset_words()?;\n\n let mut parts: Vec<(Occur, Box<dyn Query>)> = Vec::new();\n\n let sentence = correct_sentence(query, 1);\n\n let words: Vec<&str> = sentence.split(\" \").collect();\n\n let mut ignore_stop_words = false;\n\n if strip_stop_words && words.len() > 1 {\n\n for word in words.iter() {\n\n if !stop_words.contains(*word) {\n\n ignore_stop_words = true;\n\n break;\n", "file_path": "engine/src/index/reader.rs", "rank": 17, "score": 88156.82450923612 }, { "content": " /// Whether or not to use fast fuzzy searching.\n\n use_fast_fuzzy: bool,\n\n}\n\n\n\nimpl IndexHandler {\n\n /// Gets a tantivy Index either from an existing directory or\n\n /// makes a new system.\n\n async fn get_index_from_loader(loader: &LoadedIndex) -> Result<(Index, Option<String>)> {\n\n if let IndexStorageType::FileSystem = &loader.storage_type {\n\n let path = format!(\"{}/{}\", INDEX_DATA_PATH, &loader.name);\n\n if std::path::Path::new(&path).exists() {\n\n info!(\n\n \"[ SETUP @ {} ] using existing schema metadata\",\n\n &loader.name\n\n );\n\n return Ok((Index::open_in_dir(&path)?, Some(path.clone())));\n\n }\n\n }\n\n\n\n let index = IndexBuilder::default().schema(loader.schema.clone());\n", "file_path": "engine/src/index/mod.rs", "rank": 18, "score": 82881.53440594254 }, { "content": " /// This uses a concurrency permit while completing the operation.\n\n pub async fn get_doc(&self, doc_address: u64) -> Result<QueryHit> {\n\n let mut doc = self.reader.get_doc(doc_address).await?;\n\n\n\n let id = doc.0.remove(\"_id\").ok_or_else(|| {\n\n Error::msg(\n\n \"document has been missed labeled (missing identifier tag), the dataset is invalid\",\n\n )\n\n })?;\n\n\n\n if let Value::U64(v) = id[0] {\n\n Ok(QueryHit {\n\n document_id: format!(\"{}\", v),\n\n doc,\n\n ratio: serde_json::json!(100.0),\n\n })\n\n } else {\n\n Err(Error::msg(\n\n \"document has been missed labeled (missing identifier tag), the dataset is invalid\",\n\n ))\n", "file_path": "engine/src/index/mod.rs", "rank": 19, "score": 82879.49957679708 }, { "content": " (index.open_or_create(dir)?, Some(path.clone()))\n\n },\n\n };\n\n\n\n Ok(out)\n\n }\n\n\n\n /// Creates a new index handler from a given loaded index.\n\n ///\n\n /// This constructs both the Tantivy index, thread pool and worker thread.\n\n ///\n\n /// ### Important note about performance:\n\n /// The concurrency limit should be set according to the machine\n\n /// this system is being deployed on hence being a required field.\n\n /// The amount of threads spawned is equal the the (`max_concurrency` * `reader_threads`) + `1`\n\n /// as well as the tokio runtime threads.\n\n pub(crate) async fn build_loaded(loader: LoadedIndex) -> Result<Self> {\n\n let (index, dir) = Self::get_index_from_loader(&loader).await?;\n\n let schema_copy = index.schema();\n\n\n", "file_path": "engine/src/index/mod.rs", "rank": 20, "score": 82878.72507775023 }, { "content": "\n\nstatic INDEX_DATA_PATH: &str = \"./lnx/index-data\";\n\n\n\n/// A search engine index.\n\n///\n\n/// Each index maintains a rayon thread pool which searches are executed\n\n/// as well as an worker thread which is used to interact with the index writer.\n\n///\n\n/// The amount of threads `n` is determined by the the `max_concurrency` parameter.\n\n///\n\n/// ### Mutating the index behaviour:\n\n/// This system simple schedules the operations in the order they are invoked\n\n/// however, this system does not wait for the operation to be completed.\n\n/// This essentially follows the behaviour of eventual consistency; The operations\n\n/// are guaranteed to be applied within some time in the near future.\n\npub struct IndexHandler {\n\n /// The name of the index.\n\n pub(crate) name: String,\n\n\n\n /// The internal tantivy index.\n", "file_path": "engine/src/index/mod.rs", "rank": 21, "score": 82873.22203905243 }, { "content": "use std::sync::Arc;\n\n\n\nuse anyhow::{Error, Result};\n\nuse parking_lot::Mutex;\n\nuse tantivy::directory::MmapDirectory;\n\nuse tantivy::query::QueryParser;\n\nuse tantivy::schema::{Schema, Value, FieldType};\n\nuse tantivy::{Document, Index, IndexBuilder, ReloadPolicy, Term};\n\nuse tokio::fs;\n\nuse tokio::task::JoinHandle;\n\n\n\nuse crate::correction;\n\nuse crate::helpers::{self, hash};\n\nuse crate::index::reader::QueryHit;\n\nuse crate::structures::{self, IndexStorageType, LoadedIndex, QueryPayload, DocumentValue};\n\nuse chrono::Utc;\n\n\n\npub(super) mod reader;\n\npub(super) mod writer;\n\npub(super) mod executor;\n", "file_path": "engine/src/index/mod.rs", "rank": 22, "score": 82872.98477940749 }, { "content": "\n\n let out = match &loader.storage_type {\n\n IndexStorageType::TempDir => {\n\n info!(\n\n \"[ SETUP @ {} ] creating index in a temporary directory\",\n\n &loader.name\n\n );\n\n (index.create_from_tempdir()?, None)\n\n },\n\n IndexStorageType::Memory => {\n\n info!(\"[ SETUP @ {} ] creating index in memory\", &loader.name);\n\n (index.create_in_ram()?, None)\n\n },\n\n IndexStorageType::FileSystem => {\n\n info!(\"[ SETUP @ {} ] creating index in directory\", &loader.name);\n\n\n\n let path = format!(\"{}/{}\", INDEX_DATA_PATH, &loader.name);\n\n fs::create_dir_all(&path).await?;\n\n\n\n let dir = MmapDirectory::open(&path)?;\n", "file_path": "engine/src/index/mod.rs", "rank": 23, "score": 82871.72380555731 }, { "content": " drop(tx);\n\n\n\n for handle in handles {\n\n let documents = handle.await??;\n\n for mut doc in documents {\n\n let id = uuid::Uuid::new_v4();\n\n doc.add_u64(field, hash(&id));\n\n\n\n self.writer\n\n .send_op(writer::WriterOp::AddDocument(doc))\n\n .await?;\n\n }\n\n }\n\n\n\n Ok(())\n\n }\n\n\n\n /// Submits the delete all operation to the index writer.\n\n ///\n\n /// This will delete all documents in the index which were\n", "file_path": "engine/src/index/mod.rs", "rank": 24, "score": 82870.52963888897 }, { "content": " /// Any additions and deletions will become visible to readers once\n\n /// the operation is complete.\n\n pub async fn commit(&self) -> Result<()> {\n\n self.writer.send_op(writer::WriterOp::Commit).await\n\n }\n\n\n\n /// Submits the rollback operation to the index writer.\n\n ///\n\n /// This will undo / drop any changes made between the last commit\n\n /// and the rollback operation.\n\n pub async fn rollback(&self) -> Result<()> {\n\n self.writer.send_op(writer::WriterOp::Rollback).await\n\n }\n\n\n\n /// Searches the index with the given query.\n\n pub async fn search(&self, payload: QueryPayload) -> Result<reader::QueryResults> {\n\n self.reader.search(payload).await\n\n }\n\n\n\n /// Clears all documents from the index and commits.\n", "file_path": "engine/src/index/mod.rs", "rank": 25, "score": 82870.0876258854 }, { "content": " }\n\n\n\n #[inline]\n\n pub fn schema(&self) -> Schema {\n\n self.schema.clone()\n\n }\n\n\n\n #[inline]\n\n pub fn indexed_fields(&self) -> &Vec<String> {\n\n &self.indexed_text_fields\n\n }\n\n\n\n /// Builds a `Term` from a given field and value.\n\n ///\n\n /// This assumes that the value type matches up with the field type.\n\n pub fn get_term(&self, field: &str, value: DocumentValue) -> Result<Term> {\n\n let field = self\n\n .schema\n\n .get_field(field)\n\n .map(|v| Ok(v))\n", "file_path": "engine/src/index/mod.rs", "rank": 26, "score": 82870.05881757627 }, { "content": " fields, &ref_field\n\n )));\n\n },\n\n };\n\n }\n\n\n\n let mut parser = QueryParser::for_index(&index, raw_search_fields);\n\n if loader.set_conjunction_by_default {\n\n parser.set_conjunction_by_default();\n\n }\n\n\n\n for (field, boost) in search_fields.iter() {\n\n if *boost != 0.0f32 {\n\n parser.set_field_boost(*field, *boost);\n\n }\n\n }\n\n\n\n let writer = index.writer_with_num_threads(loader.writer_threads, loader.writer_buffer)?;\n\n info!(\n\n \"[ WRITER @ {} ] index writer has been allocated with {} threads and {} byte allocation\",\n", "file_path": "engine/src/index/mod.rs", "rank": 27, "score": 82870.03823853735 }, { "content": " pub async fn clear_and_commit(&self) -> Result<()> {\n\n self.clear_documents().await?;\n\n self.writer.send_op(writer::WriterOp::Commit).await?;\n\n\n\n Ok(())\n\n }\n\n\n\n /// Shuts down the index system cleaning up all pools.\n\n pub async fn shutdown(&self) -> Result<()> {\n\n self.writer.send_op(writer::WriterOp::__Shutdown).await?;\n\n\n\n debug!(\"[ ENGINE ] waiting on reader shutdown...\");\n\n self.reader.shutdown().await?;\n\n\n\n debug!(\"[ ENGINE ] waiting on writer shutdown...\");\n\n self.alive.recv().await?;\n\n\n\n let item = self._index.lock().take();\n\n drop(item); // lets see if this closes the dir?\n\n\n\n debug!(\"[ ENGINE ] cleaning up directory\");\n\n if let Some(dir) = self.dir.as_ref() {\n\n fs::remove_dir_all(dir).await?;\n\n }\n\n Ok(())\n\n }\n\n}\n", "file_path": "engine/src/index/mod.rs", "rank": 28, "score": 82869.52194350895 }, { "content": " }\n\n }\n\n\n\n /// Submits a document to be processed by the index writer.\n\n pub async fn add_document(&self, mut document: structures::Document) -> Result<()> {\n\n let field = self.schema.get_field(\"_id\").ok_or_else(|| {\n\n Error::msg(\n\n \"system has not correctly initialised this schema,\\\n\n are you upgrading from a older version? If yes, you need to re-create the schema.\",\n\n )\n\n })?;\n\n\n\n if correction::enabled() && self.use_fast_fuzzy {\n\n helpers::correct_doc_fields(&mut document, self.indexed_fields());\n\n }\n\n\n\n let mut doc = document.parse_into_document(&self.schema)?;\n\n\n\n let id = uuid::Uuid::new_v4();\n\n doc.add_u64(field, hash(&id));\n", "file_path": "engine/src/index/mod.rs", "rank": 29, "score": 82869.49989691918 }, { "content": " _index: Mutex<Option<Index>>,\n\n\n\n /// The internal tantivy schema.\n\n schema: Schema,\n\n\n\n /// A writer actor to handle the index writer.\n\n writer: writer::IndexWriterHandler,\n\n\n\n /// The index reader handler\n\n reader: reader::IndexReaderHandler,\n\n\n\n /// An indicator if the system is still alive or not\n\n alive: async_channel::Receiver<()>,\n\n\n\n /// The optional storage directory of the index.\n\n dir: Option<String>,\n\n\n\n /// The set of fields which are indexed.\n\n indexed_text_fields: Vec<String>,\n\n\n", "file_path": "engine/src/index/mod.rs", "rank": 30, "score": 82869.48156852125 }, { "content": " }\n\n },\n\n (DocumentValue::U64(v), FieldType::Date(_)) => {\n\n match chrono::NaiveDateTime::from_timestamp_opt(v as i64, 0) {\n\n Some(dt) => {\n\n let dt = chrono::DateTime::from_utc(dt, Utc);\n\n Term::from_field_date(field, &dt)\n\n },\n\n None =>\n\n return Err(Error::msg(format!(\"filed {:?} is type {:?} in schema but did not get a valid value (invalid timestamp)\", &field, field_type))),\n\n }\n\n },\n\n _ => return Err(Error::msg(format!(\"filed {:?} is type {:?} in schema but did not get a valid value\", &field, field_type)))\n\n };\n\n\n\n Ok(term)\n\n }\n\n\n\n /// Gets a document with a given document address.\n\n ///\n", "file_path": "engine/src/index/mod.rs", "rank": 31, "score": 82869.2447660211 }, { "content": " /// added since the last commit.\n\n pub async fn clear_documents(&self) -> Result<()> {\n\n self.writer.send_op(writer::WriterOp::DeleteAll).await\n\n }\n\n\n\n /// Submits the delete term operation to the index writer.\n\n ///\n\n /// This will delete all documents matching the term which were\n\n /// added since the last commit.\n\n pub async fn delete_documents_with_term(&self, term: Term) -> Result<()> {\n\n self.writer\n\n .send_op(writer::WriterOp::DeleteTerm(term))\n\n .await\n\n }\n\n\n\n /// Submits the commit operation to the index writer.\n\n ///\n\n /// This will finalize any operations and save the changes, flushing them\n\n /// to disk.\n\n ///\n", "file_path": "engine/src/index/mod.rs", "rank": 32, "score": 82869.1350531709 }, { "content": "\n\n self.writer\n\n .send_op(writer::WriterOp::AddDocument(doc))\n\n .await\n\n }\n\n\n\n /// Submits many documents to the index writer.\n\n ///\n\n /// This does have significant performance improvements when using\n\n /// the fast fuzzy system, as this does parallel correction vs\n\n /// linear.\n\n ///\n\n /// If fast fuzzy is not enabled however, this just calls add_docs in a loop.\n\n pub async fn add_many_documents(&self, documents: Vec<structures::Document>) -> Result<()> {\n\n let field = self.schema.get_field(\"_id\").ok_or_else(|| {\n\n Error::msg(\n\n \"system has not correctly initialised this schema,\\\n\n are you upgrading from a older version? If yes, you need to re-create the schema.\",\n\n )\n\n })?;\n", "file_path": "engine/src/index/mod.rs", "rank": 33, "score": 82868.79613432499 }, { "content": " reader,\n\n loader.reader_threads as usize,\n\n parser,\n\n loader.fuzzy_search_fields,\n\n schema_copy,\n\n loader.use_fast_fuzzy,\n\n loader.strip_stop_words,\n\n )?;\n\n\n\n Ok(Self {\n\n name: loader.name,\n\n schema: index.schema(),\n\n _index: Mutex::new(Some(index)),\n\n writer: worker_handler,\n\n reader: reader_handler,\n\n alive: receiver,\n\n dir,\n\n indexed_text_fields: loader.indexed_text_fields,\n\n use_fast_fuzzy: loader.use_fast_fuzzy,\n\n })\n", "file_path": "engine/src/index/mod.rs", "rank": 34, "score": 82868.42702455404 }, { "content": "\n\n if !(correction::enabled() && self.use_fast_fuzzy) {\n\n debug!(\n\n \"[ PRE-PROCESSING @ {} ] using default fuzzy mode, ignoring pre-processing.\",\n\n &self.name\n\n );\n\n for doc in documents {\n\n self.add_document(doc).await?;\n\n }\n\n return Ok(());\n\n }\n\n debug!(\n\n \"[ PRE-PROCESSING @ {} ] running spell correction documents\",\n\n &self.name\n\n );\n\n\n\n let fields = Arc::new(self.indexed_fields().clone());\n\n let schema = self.schema.clone();\n\n let (tx, rx) = crossbeam::channel::unbounded();\n\n let handles: Vec<JoinHandle<Result<Vec<Document>>>> = (0..num_cpus::get())\n", "file_path": "engine/src/index/mod.rs", "rank": 35, "score": 82868.3746047528 }, { "content": " search_fields.push((field, 0.0f32));\n\n };\n\n },\n\n (Some(field), None) => {\n\n if let Some(boost) = loader.boost_fields.get(&ref_field) {\n\n debug!(\"boosting field for query parser {} {}\", &ref_field, boost);\n\n search_fields.push((field, *boost));\n\n } else {\n\n search_fields.push((field, 0.0f32));\n\n };\n\n },\n\n (None, _) => {\n\n let fields: Vec<String> = schema_copy\n\n .fields()\n\n .map(|(_, v)| v.name().to_string())\n\n .collect();\n\n\n\n return Err(Error::msg(format!(\n\n \"you defined the schema with the following fields: {:?} \\\n\n and declared the a search_field {:?} but this does not exist in the defined fields.\",\n", "file_path": "engine/src/index/mod.rs", "rank": 36, "score": 82867.68751359911 }, { "content": " &loader.name ,loader.writer_threads, loader.writer_buffer\n\n );\n\n\n\n let reader = index\n\n .reader_builder()\n\n .num_searchers(loader.max_concurrency as usize)\n\n .reload_policy(ReloadPolicy::OnCommit)\n\n .try_into()?;\n\n info!(\n\n \"[ READER @ {} ] index reader has been allocated with {} searchers\",\n\n &loader.name, loader.max_concurrency\n\n );\n\n\n\n let (sender, receiver) = async_channel::bounded(1);\n\n let worker_handler =\n\n writer::IndexWriterHandler::create(loader.name.clone(), writer, sender);\n\n\n\n let reader_handler = reader::IndexReaderHandler::create(\n\n loader.name.clone(),\n\n loader.max_concurrency as usize,\n", "file_path": "engine/src/index/mod.rs", "rank": 37, "score": 82867.06430475164 }, { "content": " // We need to extract out the fields from name to id.\n\n let mut raw_search_fields = vec![];\n\n let mut search_fields = vec![];\n\n for ref_field in loader.search_fields {\n\n let id = format!(\"_{}\", hash(&ref_field));\n\n\n\n // This checks if a search field is a indexed text field (it has a private field)\n\n // that's used internally, since we pre-compute the correction behaviour before\n\n // hand, we want to actually target those fields not the inputted fields.\n\n match (\n\n schema_copy.get_field(&ref_field),\n\n schema_copy.get_field(&id),\n\n ) {\n\n (Some(_), Some(field)) => {\n\n raw_search_fields.push(field);\n\n\n\n if let Some(boost) = loader.boost_fields.get(&ref_field) {\n\n debug!(\"boosting field for query parser {} {}\", &ref_field, boost);\n\n search_fields.push((field, *boost));\n\n } else {\n", "file_path": "engine/src/index/mod.rs", "rank": 38, "score": 82866.32964031809 }, { "content": " .unwrap_or_else(|| Err(Error::msg(\"unknown field\")))?;\n\n\n\n let entry = self.schema.get_field_entry(field);\n\n let field_type = entry.field_type();\n\n\n\n let term = match (value, field_type) {\n\n (DocumentValue::I64(v), FieldType::I64(_)) => Term::from_field_i64(field, v),\n\n (DocumentValue::U64(v), FieldType::U64(_)) => Term::from_field_u64(field, v),\n\n (DocumentValue::F64(v), FieldType::F64(_)) => Term::from_field_f64(field, v),\n\n (DocumentValue::Text(v), FieldType::Str(_)) => Term::from_field_text(field, &v),\n\n (DocumentValue::Datetime(v), FieldType::Str(_)) => Term::from_field_text(field, &v.to_string()),\n\n (DocumentValue::Datetime(v), FieldType::Date(_)) => Term::from_field_date(field, &v),\n\n (DocumentValue::I64(v), FieldType::Date(_)) => {\n\n match chrono::NaiveDateTime::from_timestamp_opt(v, 0) {\n\n Some(dt) => {\n\n let dt = chrono::DateTime::from_utc(dt, Utc);\n\n Term::from_field_date(field, &dt)\n\n },\n\n None =>\n\n return Err(Error::msg(format!(\"filed {:?} is type {:?} in schema but did not get a valid value (invalid timestamp)\", &field, field_type))),\n", "file_path": "engine/src/index/mod.rs", "rank": 39, "score": 82864.43928131017 }, { "content": " .map(|_| {\n\n let fields = fields.clone();\n\n let schema = schema.clone();\n\n let receiver = rx.clone();\n\n tokio::task::spawn_blocking(move || -> Result<Vec<Document>> {\n\n let mut processed_documents = vec![];\n\n while let Ok(mut doc) = receiver.recv() {\n\n helpers::correct_doc_fields(&mut doc, fields.as_ref());\n\n let doc = doc.parse_into_document(&schema)?;\n\n processed_documents.push(doc);\n\n }\n\n\n\n Ok(processed_documents)\n\n })\n\n })\n\n .collect();\n\n\n\n for doc in documents {\n\n let _ = tx.send(doc)?;\n\n }\n", "file_path": "engine/src/index/mod.rs", "rank": 40, "score": 82863.28850594368 }, { "content": "type SharedEngine = Arc<SearchEngine>;\n", "file_path": "src/routes.rs", "rank": 41, "score": 81228.24386736957 }, { "content": "/// Creates a fuzzy matching query, this allows for an element\n\n/// of fault tolerance with spelling. This is the default\n\n/// config as it its the most plug and play setup.\n\nfn parse_fuzzy_query(query: &str, search_fields: Arc<Vec<(Field, Score)>>) -> Box<dyn Query> {\n\n debug!(\"using default fuzzy system for {}\", &query);\n\n let mut parts: Vec<(Occur, Box<dyn Query>)> = Vec::new();\n\n\n\n for search_term in query.to_lowercase().split(\" \") {\n\n debug!(\"making fuzzy term for {}\", &search_term);\n\n if search_term.is_empty() {\n\n continue;\n\n }\n\n\n\n for (field, boost) in search_fields.iter() {\n\n let query = Box::new(FuzzyTermQuery::new_prefix(\n\n Term::from_field_text(*field, search_term),\n\n 1,\n\n true,\n\n ));\n\n\n\n if *boost > 0.0f32 {\n\n parts.push((Occur::Should, Box::new(BoostQuery::new(query, *boost))));\n\n continue;\n\n }\n\n\n\n parts.push((Occur::Should, query))\n\n }\n\n }\n\n\n\n Box::new(BooleanQuery::from(parts))\n\n}\n\n\n", "file_path": "engine/src/index/reader.rs", "rank": 42, "score": 73938.56205456109 }, { "content": "pub fn run(ctx: Context) -> anyhow::Result<()> {\n\n info!(\"starting runtime with {} threads\", ctx.threads);\n\n let runtime = tokio::runtime::Builder::new_multi_thread()\n\n .enable_all()\n\n .worker_threads(ctx.threads)\n\n .build()?;\n\n\n\n runtime.block_on(start(ctx))\n\n}\n\n\n\nasync fn start(ctx: Context) -> anyhow::Result<()> {\n\n let mut sample_system = sampler::Sampler::new(ctx.output.clone());\n\n let ctx = Arc::new(ctx);\n\n\n\n let target = ctx.target;\n\n let mode = ctx.mode;\n\n\n\n if !ctx.no_prep {\n\n prep_systems(target, &ctx.address, &ctx.data_file, &ctx.index).await?;\n\n }\n", "file_path": "lnxcli/benchmark/src/lib.rs", "rank": 43, "score": 70292.52048914353 }, { "content": "pub fn run(ctx: Context) -> anyhow::Result<()> {\n\n let runtime = tokio::runtime::Builder::new_multi_thread()\n\n .enable_all()\n\n .build()?;\n\n\n\n runtime.block_on(start(ctx))\n\n}\n\n\n\nasync fn start(ctx: Context) -> anyhow::Result<()> {\n\n if !ctx.target_server.starts_with(\"http\") {\n\n return Err(Error::msg(\"target server must include the http protocol.\"));\n\n }\n\n\n\n if !ctx.no_prep {\n\n prep(&ctx).await?;\n\n }\n\n\n\n let _ = routes::TARGET_URL.set(format!(\n\n \"{}/indexes/{}/search\",\n\n &ctx.target_server, &ctx.index\n", "file_path": "lnxcli/demo/src/lib.rs", "rank": 44, "score": 70292.52048914353 }, { "content": "use std::io::Write;\n\nuse std::{fs, path};\n\n\n\nuse anyhow::Result;\n\nuse flate2::write::GzEncoder;\n\nuse flate2::Compression;\n\n\n", "file_path": "engine/build.rs", "rank": 45, "score": 62013.53901382555 }, { "content": "/// Converts an arbitrary Response<Body> into Response<BoxBody>\n\nfn to_box_body(resp: Response<Body>) -> Response<BoxBody> {\n\n let (parts, body) = resp.into_parts();\n\n let body = box_body(body);\n\n\n\n Response::from_parts(parts, body)\n\n}\n\n\n", "file_path": "src/routes.rs", "rank": 46, "score": 56259.295001065235 }, { "content": " Ok(())\n\n }\n\n\n\n /// Searches the index with a given query.\n\n ///\n\n /// The index will use fuzzy matching based on levenshtein distance\n\n /// if set to true.\n\n pub(super) async fn search(&self, payload: QueryPayload) -> Result<QueryResults> {\n\n let _permit = self.limiter.acquire().await?;\n\n\n\n let (resolve, waiter) = oneshot::channel();\n\n\n\n let doc_id = match (self.schema.get_field(\"_id\"), payload.document) {\n\n (None, _) => Err(Error::msg(\n\n \"missing a required private field, this is a bug.\",\n\n )),\n\n (_, None) => Ok(None),\n\n (Some(field), Some(doc_id)) => Ok(Some(Term::from_field_u64(field, doc_id))),\n\n }?;\n\n\n", "file_path": "engine/src/index/reader.rs", "rank": 47, "score": 54055.60154275264 }, { "content": " None => {\n\n return Ok(None)\n\n }\n\n };\n\n\n\n let mut parser = QueryParser::for_index(index, field);\n\n parser.set_conjunction_by_default();\n\n match parser.parse_query(query) {\n\n Ok(q) => Ok(Some(q)),\n\n Err(err) => Err(anyhow::Error::new(err))\n\n }\n\n })\n\n .filter_map(|s| s.transpose())\n\n .collect::<Result<Vec<_>, _>>()?;\n\n Ok(Box::new(BooleanQuery::intersection(queries)) as Box<dyn Query>)\n\n },\n\n (QueryMode::Fuzzy, None, _) => Err(Error::msg(\n\n \"query mode was `Fuzzy` but query string is `None`\",\n\n )),\n\n (QueryMode::Fuzzy, Some(Either::Left(query)), _) => {\n", "file_path": "engine/src/index/reader.rs", "rank": 48, "score": 54045.91896189418 }, { "content": " }\n\n\n\n // Unlock waiters so that they dont deadlock the system.\n\n while let Some(waiter) = self.waiters.pop() {\n\n let _ = waiter.send(());\n\n }\n\n\n\n let _ = self.shutdown.try_send(());\n\n info!(\"[ WRITER @ {} ] shutdown complete!\", &self.index_name);\n\n }\n\n\n\n /// Purges all pending operations from the receiver.\n\n fn process_messages(&mut self) -> bool {\n\n while let Ok(msg) = self.rx.try_recv() {\n\n debug!(\n\n \"[ WRITER @ {} ] handling operation {:?}\",\n\n &self.index_name, msg\n\n );\n\n match self.handle_msg(msg) {\n\n Err(e) => error!(\n", "file_path": "engine/src/index/writer.rs", "rank": 49, "score": 54044.65786379583 }, { "content": " \"[ WRITER @ {} ] failed handling writer operation on index due to error: {:?}\",\n\n &self.index_name, e,\n\n ),\n\n Ok(true) => return true,\n\n _ => {},\n\n }\n\n }\n\n\n\n false\n\n }\n\n\n\n fn handle_msg(&mut self, op: WriterOp) -> Result<bool> {\n\n let (transaction_id, type_) = match op {\n\n WriterOp::__Shutdown => return Ok(true),\n\n WriterOp::Commit => (self.writer.commit()?, \"COMMIT\"),\n\n WriterOp::Rollback => (self.writer.rollback()?, \"ROLLBACK\"),\n\n WriterOp::AddDocument(docs) => (self.writer.add_document(docs), \"ADD-DOCUMENT\"),\n\n WriterOp::DeleteAll => (self.writer.delete_all_documents()?, \"DELETE-ALL\"),\n\n WriterOp::DeleteTerm(term) => (self.writer.delete_term(term), \"DELETE-TERM\"),\n\n };\n", "file_path": "engine/src/index/writer.rs", "rank": 50, "score": 54043.93655097363 }, { "content": " /// Deletes any documents matching the given term.\n\n DeleteTerm(Term),\n\n\n\n /// Removes all documents from the index.\n\n DeleteAll,\n\n\n\n /// Shutdown the handler.\n\n __Shutdown,\n\n}\n\n\n\n/// A background task that applies write operations to the index.\n\n///\n\n/// This system uses the actor model receiving a stream of messages\n\n/// and processes them in order of being sent.\n\n///\n\n/// Messages are ran in a new thread.\n\npub struct IndexWriterWorker {\n\n index_name: String,\n\n writer: IndexWriter,\n\n waiters: Arc<SegQueue<oneshot::Sender<()>>>,\n", "file_path": "engine/src/index/writer.rs", "rank": 51, "score": 54043.32150035774 }, { "content": "\n\n /// Sends a message to the writer worker\n\n ///\n\n /// If there is space in the queue this will complete immediately\n\n /// otherwise this will wait until it's woken up again.\n\n pub(super) async fn send_op(&self, op: WriterOp) -> anyhow::Result<()> {\n\n let mut op = op;\n\n loop {\n\n op = match self.writer_sender.try_send(op) {\n\n Ok(()) => return Ok(()),\n\n Err(channel::TrySendError::Disconnected(_)) => {\n\n return Err(Error::msg(\"writer worker has shutdown\"))\n\n },\n\n Err(channel::TrySendError::Full(v)) => v,\n\n };\n\n\n\n debug!(\n\n \"[ WRITER @ {} ] operation queue full, waiting for wakeup\",\n\n &self.index_name\n\n );\n\n\n\n let (resolve, waiter) = oneshot::channel();\n\n self.writer_waiters.push(resolve);\n\n let _ = waiter.await;\n\n }\n\n }\n\n}\n", "file_path": "engine/src/index/writer.rs", "rank": 52, "score": 54043.102752477746 }, { "content": "\n\n\n\nuse crate::correction::{self, correct_sentence};\n\nuse crate::structures::{QueryMode, QueryPayload};\n\nuse crate::index::executor::ExecutorPool;\n\nuse std::borrow::Borrow;\n\n\n\n/// Attempts to get a document otherwise sending an error\n\n/// back to the resolve channel.\n\nmacro_rules! try_get_doc {\n\n ($resolve:expr, $searcher:expr, $doc:expr, $executor:expr) => {{\n\n let res = $searcher.search_with_executor(\n\n &TermQuery::new($doc, IndexRecordOption::Basic),\n\n &TopDocs::with_limit(1),\n\n $executor,\n\n );\n\n\n\n let res: Vec<(f32, DocAddress)> = match res {\n\n Err(e) => {\n\n let _ = $resolve.send(Err(Error::from(e)));\n", "file_path": "engine/src/index/reader.rs", "rank": 53, "score": 54042.76241544285 }, { "content": " ///\n\n /// This counts as a concurrent action.\n\n pub(super) async fn get_doc(&self, doc_address: u64) -> Result<NamedFieldDocument> {\n\n let _permit = self.limiter.acquire().await?;\n\n\n\n let (resolve, waiter) = oneshot::channel();\n\n let searcher = self.reader.searcher();\n\n let executor = self.executor_pool.acquire()?;\n\n let field = self\n\n .schema\n\n .get_field(\"_id\")\n\n .ok_or_else(|| Error::msg(\"missing a required private field, this is a bug.\"))?;\n\n\n\n self.thread_pool.spawn(move || {\n\n let term = Term::from_field_u64(field, doc_address);\n\n let doc = try_get_doc!(resolve, searcher, term, executor.borrow());\n\n let doc = searcher.doc(doc).map_err(Error::from);\n\n let _ = resolve.send(doc);\n\n });\n\n\n", "file_path": "engine/src/index/reader.rs", "rank": 54, "score": 54041.049539696636 }, { "content": " let result = waiter.await??;\n\n let doc = self.schema.to_named_doc(&result);\n\n\n\n Ok(doc)\n\n }\n\n\n\n /// Shuts down the thread pools and acquires all permits\n\n /// shutting the index down.\n\n ///\n\n /// Thread pools are shutdown asynchronously via Rayon's handling.\n\n pub(super) async fn shutdown(&self) -> Result<()> {\n\n // Wait till all searches have been completed.\n\n let _ = self\n\n .limiter\n\n .acquire_many(self.max_concurrency as u32)\n\n .await?;\n\n self.limiter.close();\n\n\n\n self.executor_pool.shutdown();\n\n\n", "file_path": "engine/src/index/reader.rs", "rank": 55, "score": 54040.55348453233 }, { "content": "\n\n let start = std::time::Instant::now();\n\n self.thread_pool.spawn(move || {\n\n let ref_document = match doc_id {\n\n None => None,\n\n Some(doc) => {\n\n let doc = try_get_doc!(resolve, searcher, doc, executor.borrow());\n\n Some(doc)\n\n },\n\n };\n\n\n\n let query = match parse_query(\n\n searcher.index(),\n\n parser,\n\n search_fields,\n\n match (payload.query.is_some(), payload.map.is_empty()) {\n\n (true, _) => Some(Either::Left(payload.query.unwrap())),\n\n (_, false) => Some(Either::Right(payload.map)),\n\n _ => None\n\n },\n", "file_path": "engine/src/index/reader.rs", "rank": 56, "score": 54040.29032162795 }, { "content": " schema_copy: Schema,\n\n use_fast_fuzzy: bool,\n\n strip_stop_words: bool,\n\n ) -> Result<Self> {\n\n if use_fast_fuzzy {\n\n warn!(\"[ READER @ {} ] 'Normal' queries will behave differently with TEXT type fields due to fast-fuzzy.\", &index_name);\n\n }\n\n\n\n let limiter = Semaphore::new(max_concurrency);\n\n\n\n let name = index_name.clone();\n\n let thread_pool = {\n\n rayon::ThreadPoolBuilder::new()\n\n .num_threads(max_concurrency)\n\n .thread_name(move |n| format!(\"index-{}-worker-{}\", name.clone(), n))\n\n .build()?\n\n };\n\n\n\n let executor_pool = ExecutorPool::create(\n\n &index_name,\n", "file_path": "engine/src/index/reader.rs", "rank": 57, "score": 54040.03670039557 }, { "content": " use_fast_fuzzy: bool,\n\n\n\n /// Whether or not to strip out stop words in fuzzy queries.\n\n ///\n\n /// This only applies to the fast-fuzzy query system.\n\n strip_stop_words: bool,\n\n}\n\n\n\nimpl IndexReaderHandler {\n\n /// Creates a new reader handler from an existing tantivy index reader.\n\n ///\n\n /// This will spawn a thread pool with `n` amount of threads equal\n\n /// to the set `max_concurrency`.\n\n pub(super) fn create(\n\n index_name: String,\n\n max_concurrency: usize,\n\n reader: IndexReader,\n\n reader_threads: usize,\n\n parser: QueryParser,\n\n search_fields: Vec<(Field, Score)>,\n", "file_path": "engine/src/index/reader.rs", "rank": 58, "score": 54039.80059046625 }, { "content": " );\n\n Executor::multi_thread(threads_per_reader, \"index-reader-\")?\n\n } else {\n\n info!(\n\n \"[ READER {} @ {} ] executor startup, mode: single-threaded (no-op)\",\n\n i, index_name,\n\n );\n\n Executor::single_thread()\n\n };\n\n\n\n if let Err(_) = executors.push(executor) {\n\n panic!(\"executor pool was full yet executor was not added on creation, this is a bug.\")\n\n };\n\n }\n\n\n\n let executors = Arc::new(executors);\n\n\n\n Ok(Self {\n\n executors,\n\n index_name: Arc::new(index_name.to_string())\n", "file_path": "engine/src/index/executor.rs", "rank": 59, "score": 54038.29705899792 }, { "content": " ref_document,\n\n payload.mode,\n\n use_fast_fuzzy,\n\n strip_stop_words,\n\n ) {\n\n Err(e) => {\n\n info!(\"rejecting parse\");\n\n let _ = resolve.send(Err(e));\n\n return;\n\n },\n\n Ok(q) => q,\n\n };\n\n\n\n let res = search(query, searcher, executor.borrow(), limit, offset, schema, order_by);\n\n let _ = resolve.send(res);\n\n });\n\n\n\n let mut res = waiter.await??;\n\n let time_taken = start.elapsed();\n\n info!(\n", "file_path": "engine/src/index/reader.rs", "rank": 60, "score": 54038.07061491585 }, { "content": "use anyhow::{Result, Error};\n\nuse std::sync::Arc;\n\nuse std::borrow::Borrow;\n\nuse crossbeam::queue::ArrayQueue;\n\nuse tantivy::Executor;\n\n\n\n#[derive(Clone)]\n\npub(super) struct ExecutorPool {\n\n index_name: Arc<String>,\n\n executors: Arc<ArrayQueue<Executor>>,\n\n}\n\n\n\nimpl ExecutorPool {\n\n pub(super) fn create(index_name: &str, pool_size: usize, threads_per_reader: usize) -> Result<Self> {\n\n let executors = ArrayQueue::new(pool_size);\n\n for i in 0..pool_size {\n\n let executor = if threads_per_reader > 1 {\n\n info!(\n\n \"[ READER {} @ {} ] executor startup, mode: multi-threaded, threads: {}\",\n\n i, index_name, threads_per_reader\n", "file_path": "engine/src/index/executor.rs", "rank": 61, "score": 54037.57635509249 }, { "content": "\n\n trace!(\n\n \"[ WRITER @ {} ][ TRANSACTION {} ] completed operation {}\",\n\n &self.index_name, transaction_id, type_\n\n );\n\n\n\n Ok(false)\n\n }\n\n}\n\n\n\n/// A simple wrapper handler around a set of queues and a worker.\n\n///\n\n/// This manages creating the waiters and scheduling the operations\n\n/// in a new thread.\n\npub(super) struct IndexWriterHandler {\n\n index_name: String,\n\n writer_waiters: Arc<SegQueue<oneshot::Sender<()>>>,\n\n writer_sender: crossbeam::channel::Sender<WriterOp>,\n\n}\n\n\n", "file_path": "engine/src/index/writer.rs", "rank": 62, "score": 54037.384067076 }, { "content": " let qry = if use_fast_fuzzy {\n\n parse_fast_fuzzy_query(query, search_fields, strip_stop_words)?\n\n } else {\n\n parse_fuzzy_query(query, search_fields)\n\n };\n\n Ok(qry)\n\n },\n\n (QueryMode::Fuzzy, Some(Either::Right(_)), _) => Err(Error::msg(\n\n \"query mode was `Fuzzy` but query string is `None`\",\n\n )),\n\n (QueryMode::MoreLikeThis, _, None) => Err(Error::msg(\n\n \"query mode was `MoreLikeThis` but reference document is `None`\",\n\n )),\n\n (QueryMode::MoreLikeThis, _, Some(ref_document)) => Ok(parse_more_like_this(ref_document)?),\n\n\n\n };\n\n\n\n debug!(\n\n \"constructing query {:?} or ref_doc {:?} with mode={:?} took {:?}\",\n\n query,\n\n ref_document,\n\n &mode,\n\n start.elapsed(),\n\n );\n\n\n\n return out;\n\n}\n\n\n", "file_path": "engine/src/index/reader.rs", "rank": 63, "score": 54037.089207710815 }, { "content": "/// threads, when setting the `max_concurrency` and `reader_threads` the total\n\n/// will result in `max_concurrency` * `reader_threads` threads spawned.\n\npub(super) struct IndexReaderHandler {\n\n /// The name of the index the handler belongs to.\n\n name: String,\n\n\n\n /// The internal tantivy index reader.\n\n reader: IndexReader,\n\n\n\n /// The reader thread pool executors.\n\n ///\n\n /// This creates n amount of executors equal to the max_concurrency\n\n /// **WARNING:** THIS CAN CAUSE AN *INSANE* AMOUNT OF THREADS TO BE SPAWNED.\n\n ///\n\n /// If the number of reader threads is > 1 this is a MultiThreaded executor\n\n /// otherwise it's SingleThreaded.\n\n executor_pool: ExecutorPool,\n\n\n\n /// A concurrency semaphore.\n\n limiter: Semaphore,\n", "file_path": "engine/src/index/reader.rs", "rank": 64, "score": 54035.763816487604 }, { "content": " max_concurrency,\n\n reader_threads,\n\n )?;\n\n\n\n Ok(Self {\n\n name: index_name,\n\n reader,\n\n executor_pool,\n\n limiter,\n\n max_concurrency,\n\n thread_pool,\n\n parser: Arc::new(parser),\n\n search_fields: Arc::new(search_fields),\n\n schema: schema_copy,\n\n use_fast_fuzzy,\n\n strip_stop_words,\n\n })\n\n }\n\n\n\n /// Gets a document with a given address.\n", "file_path": "engine/src/index/reader.rs", "rank": 65, "score": 54035.38317492471 }, { "content": " };\n\n\n\n let elapsed = start.elapsed();\n\n\n\n debug!(\n\n \"thread runtime took {:?}s with limit: {} and {} results total\",\n\n elapsed, limit, count\n\n );\n\n\n\n Ok(QueryResults {\n\n time_taken: 0f32, // filled in by handler later\n\n hits,\n\n count,\n\n })\n\n}\n", "file_path": "engine/src/index/reader.rs", "rank": 66, "score": 54034.705567983736 }, { "content": " })\n\n }\n\n\n\n pub(super) fn shutdown(&self) {\n\n while let Some(exec) = self.executors.pop() {\n\n drop(exec);\n\n }\n\n }\n\n\n\n pub(super) fn acquire(&self) -> Result<ExecutorHandle> {\n\n debug!(\"[ EXECUTOR-POOL @ {} ] taking executor from pool\", self.index_name.as_ref());\n\n if let Some(executor) = self.executors.pop() {\n\n Ok(ExecutorHandle {\n\n inner: Some(executor),\n\n queue: self.executors.clone(),\n\n index_name: self.index_name.clone(),\n\n })\n\n } else {\n\n Err(Error::msg(\"no executors were available even though the max concurrent was not reached\"))\n\n }\n", "file_path": "engine/src/index/executor.rs", "rank": 67, "score": 54034.65840653265 }, { "content": "\n\n /// The maximum concurrency of searches at one time.\n\n max_concurrency: usize,\n\n\n\n /// The execution thread pool.\n\n thread_pool: rayon::ThreadPool,\n\n\n\n /// The configured query parser pre-weighted.\n\n parser: Arc<QueryParser>,\n\n\n\n /// The set of indexed fields to search in a given query.\n\n search_fields: Arc<Vec<(Field, Score)>>,\n\n\n\n /// A cheaply cloneable schema reference.\n\n schema: Schema,\n\n\n\n /// Whether or not to use the fast fuzzy symspell correction system or not.\n\n ///\n\n /// This greatly improves the performance of searching at the cost\n\n /// of document indexing time and memory usage (standard dict set uses 1.2GB generally).\n", "file_path": "engine/src/index/reader.rs", "rank": 68, "score": 54034.613461500216 }, { "content": " };\n\n\n\n std::thread::Builder::new()\n\n .name(format!(\"index-worker-{}\", &index_name))\n\n .spawn(move || {\n\n let id = std::thread::current().id();\n\n info!(\n\n \"[ WRITER @ {} ] writer thread started with id {:?}\",\n\n name, id\n\n );\n\n worker.start()\n\n })\n\n .expect(\"spawn worker thread\");\n\n\n\n Self {\n\n index_name,\n\n writer_sender: tx,\n\n writer_waiters: waiters,\n\n }\n\n }\n", "file_path": "engine/src/index/writer.rs", "rank": 69, "score": 54034.59679737453 }, { "content": "\n\nmacro_rules! order_and_search {\n\n ( $search:expr, $collector:expr, $field:expr, $query:expr, $executor:expr) => {{\n\n let collector = $collector.order_by_fast_field($field);\n\n $search.search_with_executor($query, &(collector, Count), $executor)\n\n }};\n\n}\n\n\n\nmacro_rules! process_search {\n\n ( $search:expr, $schema:expr, $top_docs:expr ) => {{\n\n let mut hits = Vec::with_capacity($top_docs.len());\n\n for (ratio, ref_address) in $top_docs {\n\n let retrieved_doc = $search.doc(ref_address)?;\n\n let mut doc = $schema.to_named_doc(&retrieved_doc);\n\n let id = doc.0\n\n .remove(\"_id\")\n\n .ok_or_else(|| Error::msg(\"document has been missed labeled (missing identifier tag), the dataset is invalid\"))?;\n\n\n\n if let Value::U64(v) = id[0] {\n\n hits.push(QueryHit {\n", "file_path": "engine/src/index/reader.rs", "rank": 70, "score": 54033.714282887144 }, { "content": " }\n\n}\n\n\n\npub(super) struct ExecutorHandle {\n\n index_name: Arc<String>,\n\n inner: Option<Executor>,\n\n queue: Arc<ArrayQueue<Executor>>,\n\n}\n\n\n\nimpl Borrow<Executor> for ExecutorHandle {\n\n fn borrow(&self) -> &Executor {\n\n let ref_ = self.inner.as_ref();\n\n ref_.expect(\"extract executor\")\n\n }\n\n}\n\n\n\nimpl Drop for ExecutorHandle {\n\n fn drop(&mut self) {\n\n debug!(\"[ EXECUTOR-POOL @ {} ] returning executor to pool\", self.index_name.as_ref());\n\n if let Some(inner) = self.inner.take() {\n\n let maybe_err = self.queue.push(inner);\n\n if maybe_err.is_err() {\n\n panic!(\"failed to return executor to pool\");\n\n }\n\n };\n\n }\n\n}", "file_path": "engine/src/index/executor.rs", "rank": 71, "score": 54033.12458539712 }, { "content": " document_id: format!(\"{}\", v),\n\n doc,\n\n ratio: serde_json::json!(ratio),\n\n });\n\n } else {\n\n return Err(Error::msg(\"document has been missed labeled (missing identifier tag), the dataset is invalid\"))\n\n }\n\n }\n\n\n\n hits\n\n }};\n\n}\n\n\n", "file_path": "engine/src/index/reader.rs", "rank": 72, "score": 54032.68234529049 }, { "content": " \"[ SEARCH @ {} ] took {:?} with limit={}, mode={} and {} results total\",\n\n &self.name,\n\n time_taken,\n\n limit,\n\n if let QueryMode::Fuzzy = mode {\n\n if use_fast_fuzzy {\n\n \"FastFuzzy\".to_string()\n\n } else {\n\n \"Fuzzy\".to_string()\n\n }\n\n } else {\n\n format!(\"{:?}\", mode)\n\n },\n\n res.count\n\n );\n\n\n\n res.time_taken = time_taken.as_secs_f32();\n\n\n\n Ok(res)\n\n }\n\n}\n\n\n", "file_path": "engine/src/index/reader.rs", "rank": 73, "score": 54032.67247815219 }, { "content": "impl IndexWriterHandler {\n\n /// Creates a new writer handler from a given index name and\n\n /// a given index writer.\n\n ///\n\n /// This creates a bounded queue with a capacity of 20 and\n\n /// spawns a worker in a new thread.\n\n pub(super) fn create(\n\n index_name: String,\n\n writer: IndexWriter,\n\n shutdown: async_channel::Sender<()>,\n\n ) -> Self {\n\n let name = index_name.clone();\n\n let waiters = Arc::new(SegQueue::new());\n\n let (tx, rx) = channel::bounded(20);\n\n let worker = IndexWriterWorker {\n\n index_name: index_name.clone(),\n\n writer,\n\n waiters: waiters.clone(),\n\n rx,\n\n shutdown,\n", "file_path": "engine/src/index/writer.rs", "rank": 74, "score": 54032.25017269785 }, { "content": " rx: channel::Receiver<WriterOp>,\n\n shutdown: async_channel::Sender<()>,\n\n}\n\n\n\nimpl IndexWriterWorker {\n\n /// Starts processing messages until a shutdown operation is sent.\n\n ///\n\n /// This processes operations in waves before waking up waiters,\n\n /// this means all operations currently in the queue will be processed\n\n /// first before any waiters are woken up to send more data.\n\n fn start(mut self) {\n\n loop {\n\n if self.process_messages() {\n\n break;\n\n };\n\n\n\n // Wake up waiters once a message has been removed.\n\n while let Some(waiter) = self.waiters.pop() {\n\n let _ = waiter.send(());\n\n }\n", "file_path": "engine/src/index/writer.rs", "rank": 75, "score": 54031.98963741954 }, { "content": "use std::sync::Arc;\n\n\n\nuse anyhow::{Error, Result};\n\nuse serde::Serialize;\n\nuse tantivy::collector::{Count, TopDocs};\n\nuse tantivy::query::{\n\n BooleanQuery,\n\n BoostQuery,\n\n EmptyQuery,\n\n FuzzyTermQuery,\n\n MoreLikeThisQuery,\n\n Occur,\n\n Query,\n\n QueryParser,\n\n TermQuery,\n\n};\n\nuse tantivy::schema::{Field, FieldType, IndexRecordOption, NamedFieldDocument, Schema, Value};\n\nuse tantivy::{DocAddress, Executor, IndexReader, LeasedItem, Score, Searcher, Term};\n\nuse tokio::sync::{oneshot, Semaphore};\n\nuse hashbrown::HashMap;\n", "file_path": "engine/src/index/reader.rs", "rank": 76, "score": 54031.7523232126 }, { "content": "use std::sync::Arc;\n\n\n\nuse anyhow::{Error, Result};\n\nuse crossbeam::channel;\n\nuse crossbeam::queue::SegQueue;\n\nuse tantivy::{Document, IndexWriter, Term};\n\nuse tokio::sync::oneshot;\n\n\n\n/// A writing operation to be sent to the `IndexWriterWorker`.\n\n#[derive(Debug)]\n\npub(super) enum WriterOp {\n\n /// Commits the current changes and flushes to storage.\n\n Commit,\n\n\n\n /// Removes any changes since the last commit.\n\n Rollback,\n\n\n\n /// Adds a document to the index.\n\n AddDocument(Document),\n\n\n", "file_path": "engine/src/index/writer.rs", "rank": 77, "score": 54030.8530815709 }, { "content": " let order_by = if let Some(ref field) = payload.order_by {\n\n // We choose to ignore the order by if the field doesnt exist.\n\n // While this may be surprising to be at first as long as it's\n\n // document this should be fine.\n\n self.schema.get_field(field)\n\n } else {\n\n None\n\n };\n\n\n\n let schema = self.schema.clone();\n\n let parser = self.parser.clone();\n\n let limit = payload.limit;\n\n let offset = payload.offset;\n\n let mode = payload.mode;\n\n let use_fast_fuzzy = self.use_fast_fuzzy && correction::enabled();\n\n\n\n let strip_stop_words = self.strip_stop_words;\n\n let search_fields = self.search_fields.clone();\n\n let searcher = self.reader.searcher();\n\n let executor = self.executor_pool.acquire()?;\n", "file_path": "engine/src/index/reader.rs", "rank": 78, "score": 54029.1460277678 }, { "content": " }\n\n }\n\n }\n\n\n\n for search_term in words.iter() {\n\n debug!(\"making fast-fuzzy term for {}\", &search_term);\n\n if ignore_stop_words && stop_words.contains(*search_term) {\n\n continue;\n\n }\n\n\n\n for (field, boost) in search_fields.iter() {\n\n let term = Term::from_field_text(*field, *search_term);\n\n let query = Box::new(TermQuery::new(term, IndexRecordOption::WithFreqs));\n\n\n\n if *boost > 0.0f32 {\n\n parts.push((Occur::Should, Box::new(BoostQuery::new(query, *boost))));\n\n continue;\n\n }\n\n\n\n parts.push((Occur::Should, query));\n\n }\n\n }\n\n\n\n Ok(Box::new(BooleanQuery::from(parts)))\n\n}\n\n\n", "file_path": "engine/src/index/reader.rs", "rank": 79, "score": 54028.958069930166 }, { "content": " return;\n\n },\n\n Ok(res) => res,\n\n };\n\n\n\n if res.len() == 0 {\n\n let _ = $resolve.send(Err(Error::msg(\"no document exists with this id\")));\n\n return;\n\n }\n\n\n\n res[0].1\n\n }};\n\n}\n\n\n\n#[derive(Debug)]\n", "file_path": "engine/src/index/reader.rs", "rank": 80, "score": 54028.93474441763 }, { "content": "\n\n /// The content of the document itself.\n\n pub(super) doc: NamedFieldDocument,\n\n\n\n /// The ratio calculated for the search term and doc.\n\n pub(super) ratio: serde_json::Value,\n\n}\n\n\n\n/// Represents the overall query result(s)\n\n#[derive(Serialize)]\n\npub struct QueryResults {\n\n /// The retrieved documents.\n\n hits: Vec<QueryHit>,\n\n\n\n /// The total amount of documents matching the search\n\n count: usize,\n\n\n\n /// The amount of time taken to search in seconds.\n\n time_taken: f32,\n\n}\n", "file_path": "engine/src/index/reader.rs", "rank": 81, "score": 54028.39566028018 }, { "content": " FieldType::U64(_) => {\n\n let out: (Vec<(u64, DocAddress)>, usize) =\n\n order_and_search!(searcher, collector, field, &query, executor)?;\n\n (process_search!(searcher, schema, out.0), out.1)\n\n },\n\n FieldType::F64(_) => {\n\n let out: (Vec<(f64, DocAddress)>, usize) =\n\n order_and_search!(searcher, collector, field, &query, executor)?;\n\n (process_search!(searcher, schema, out.0), out.1)\n\n },\n\n FieldType::Date(_) => {\n\n let out: (Vec<(i64, DocAddress)>, usize) =\n\n order_and_search!(searcher, collector, field, &query, executor)?;\n\n (process_search!(searcher, schema, out.0), out.1)\n\n },\n\n _ => return Err(Error::msg(\"field is not a fast field\")),\n\n }\n\n } else {\n\n let (out, count) = searcher.search_with_executor(&query, &(collector, Count), executor)?;\n\n (process_search!(searcher, schema, out), count)\n", "file_path": "engine/src/index/reader.rs", "rank": 82, "score": 54027.21510161867 }, { "content": "/// Modifies select responses.\n\n///\n\n/// If a response has a status code of 404, 405 or 500 a custom\n\n/// response is used.\n\npub fn map_status(resp: Response<BoxBody>) -> Response<BoxBody> {\n\n let status = resp.status();\n\n if status == StatusCode::NOT_FOUND {\n\n return to_box_body(json_response(StatusCode::NOT_FOUND, \"route not found\"));\n\n } else if status == StatusCode::METHOD_NOT_ALLOWED {\n\n return to_box_body(json_response(\n\n StatusCode::METHOD_NOT_ALLOWED,\n\n \"method not allowed\",\n\n ));\n\n } else if status == StatusCode::INTERNAL_SERVER_ERROR {\n\n return to_box_body(json_response(\n\n StatusCode::INTERNAL_SERVER_ERROR,\n\n \"internal server error while handling request\",\n\n ));\n\n }\n\n\n\n resp\n\n}\n", "file_path": "src/routes.rs", "rank": 83, "score": 52193.48706093754 }, { "content": "#[derive(Serialize)]\n\nstruct QueryPayload {\n\n q: String,\n\n}\n\n\n\nasync fn search(client: RequestClient, uri: TargetUri, query: Query) -> anyhow::Result<u16> {\n\n let r = client\n\n .post(uri.as_ref())\n\n .json(&QueryPayload { q: query })\n\n .send()\n\n .await?;\n\n\n\n Ok(r.status().as_u16())\n\n}\n", "file_path": "lnxcli/benchmark/src/meilisearch.rs", "rank": 84, "score": 49274.3683542675 }, { "content": "#[derive(Debug)]\n\nenum Either<A, B> {\n\n Left(A),\n\n Right(B),\n\n}\n\n\n\n/// A async manager around the tantivy index reader.\n\n///\n\n/// This system executes the read operations in a given thread pool\n\n/// managed by rayon which will allow a concurrency upto the set\n\n/// `max_concurrency`.\n\n///\n\n/// If the system is at it's maximum concurrency already and search\n\n/// is called again, it will temporarily suspend operations until\n\n/// a reader has been freed.\n\n///\n\n/// This system will also spawn `n` executors with `y` reader threads\n\n/// where `n` is the max concurrency set and `y` is the reader threads.\n\n///\n\n/// #### WARNING: HIGH THREAD USAGE\n\n/// This system has the potential to spawn and incredibly large amount of\n", "file_path": "engine/src/index/reader.rs", "rank": 85, "score": 48909.11789036685 }, { "content": "#[derive(Debug, Deserialize)]\n\nstruct EnqueueResponseData {\n\n #[serde(rename = \"updateId\")]\n\n update_id: usize,\n\n}\n\n\n", "file_path": "lnxcli/benchmark/src/meilisearch.rs", "rank": 86, "score": 48077.57688265258 }, { "content": "/// Produces a standard Response<BoxBody> from a given status code and value.\n\n///\n\n/// The value is expected to implement `Serialize`, this function will\n\n/// panic if the value is unable to be serialized.\n\n///\n\n/// The response is automatically tagged with the `application/json` datatype.\n\npub fn json_response<T: Serialize + ?Sized>(status: StatusCode, value: &T) -> Response<Body> {\n\n let val = &json!({\n\n \"status\": status.as_u16(),\n\n \"data\": value,\n\n });\n\n\n\n let buff = serde_json::to_vec(val).expect(\"serialize data\");\n\n\n\n let mut resp = Response::builder()\n\n .status(status)\n\n .body(axum::body::Body::from(buff))\n\n .unwrap();\n\n\n\n resp.headers_mut().typed_insert(ContentType::json());\n\n\n\n resp\n\n}\n", "file_path": "src/responders.rs", "rank": 87, "score": 46096.51690918356 }, { "content": "def get_random_words(data: list, k=5000) -> list:\n\n titles = []\n\n for item in data:\n\n titles.extend(item['title'].split(\" \"))\n\n\n\n searches = []\n\n for _ in range(k):\n\n searches.append(\" \".join(random.choices(titles, k=random.randint(1, 10))))\n\n\n", "file_path": "tests/load/load_movies.py", "rank": 88, "score": 44198.63844121666 }, { "content": "def get_random_words(data: list, k=5000) -> list:\n\n titles = []\n\n for item in data:\n\n titles.extend(item['title'].split(\" \"))\n\n\n\n searches = []\n\n for _ in range(k):\n\n searches.append(\" \".join(random.choices(titles, k=random.randint(1, 10))))\n\n\n", "file_path": "tests/load/generate_samples.py", "rank": 89, "score": 44198.63844121666 }, { "content": "\"\"\"\n\nThis is a really, really, really basic python script to load up\n\nall the documents from /datasets/movies into meilisearch and lnx\n\nso I can get a vague idea on where lnx needs to be tweaked.\n\n\n\n*You should not use this as a benchmark*\n\n\"\"\"\n\n\n\nimport json\n\nimport requests\n\nimport time\n\nimport random\n\n\n\n\n\ndef load_movies() -> list:\n\n with open(\"../../benchmarks/datasets/reference/movies/movies.json\", encoding=\"UTF-8\") as file:\n\n return json.load(file)\n\n\n\n\n\ndef list_all(item: dict):\n\n item = item.copy()\n\n for key in item.keys():\n\n if isinstance(item[key], list):\n\n continue\n\n\n\n item[key] = [item[key]]\n\n\n\n return item\n\n\n\n\n\ndef convert_for_lnx(data):\n\n return list(map(list_all, data))\n\n\n\n\n\ndef chunks(data: list):\n\n return zip(*[iter(data)]*2500)\n\n\n\n\n\ndef get_random_words(data: list, k=5000) -> list:\n\n titles = []\n\n for item in data:\n\n titles.extend(item['title'].split(\" \"))\n\n\n\n searches = []\n\n for _ in range(k):\n\n searches.append(\" \".join(random.choices(titles, k=random.randint(1, 10))))\n\n\n\n return searches\n\n\n\n\n\ndef load_lnx():\n\n data = load_movies()\n\n movies = convert_for_lnx(data)\n\n\n\n r = requests.delete(\"http://127.0.0.1:8000/indexes/movies/documents/clear\")\n\n print(r.json())\n\n r.raise_for_status()\n\n\n\n start = time.perf_counter()\n\n for chunk in chunks(movies):\n\n r = requests.post(\"http://127.0.0.1:8000/indexes/movies/documents\", json=chunk)\n\n r.raise_for_status()\n\n\n\n requests.post(\"http://127.0.0.1:8000/indexes/movies/commit\")\n\n stop = time.perf_counter() - start\n\n\n\n print(f\"finished loading data to lnx, took: {stop * 1000}ms overall\")\n\n\n\n session = requests.Session()\n\n samples = get_random_words(data)\n\n\n\n start = time.perf_counter()\n\n for sample in samples:\n\n r = session.get(\"http://127.0.0.1:8000/indexes/movies/search\", params={\"query\": sample})\n\n r.raise_for_status()\n\n stop = time.perf_counter() - start\n\n print(f\"Samples took: {stop * 1000}ms overall, with {(stop * 1000) / len(samples)}ms per iteration\")\n\n\n\n\n\ndef load_meilisearch():\n\n movies = load_movies()\n\n\n\n r = requests.delete(\"http://127.0.0.1:7700/indexes/movies/documents\")\n\n print(r.json())\n\n r.raise_for_status()\n\n\n\n start = time.perf_counter()\n\n for chunk in chunks(movies):\n\n r = requests.post(\"http://127.0.0.1:7700/indexes/movies/documents\", json=chunk)\n\n r.raise_for_status()\n\n\n\n stop = time.perf_counter() - start\n\n print(f\"finished loading data to meili, took: {stop * 1000}ms overall\")\n\n\n\n input(\"wait for meili\")\n\n\n\n session = requests.Session()\n\n samples = get_random_words(movies)\n\n\n\n start = time.perf_counter()\n\n for sample in samples:\n\n r = session.post(\"http://127.0.0.1:7700/indexes/movies/search\", json={\"q\": sample})\n\n r.raise_for_status()\n\n stop = time.perf_counter() - start\n\n print(f\"Samples took: {stop * 1000}ms overall, with {(stop * 1000) / len(samples)}ms per iteration\")\n\n\n\n\n\ndef start_loading():\n\n choice = input(\"load for meilisearch or lnx?\\n>>> \")\n\n\n\n if choice.lower() not in (\"lnx\", \"meilisearch\"):\n\n print(\"invalid choice options: 'meilisearch' or 'lnx'\")\n\n return\n\n\n\n if choice == \"lnx\":\n\n load_lnx()\n\n else:\n\n load_meilisearch()\n\n\n\n\n\nif __name__ == '__main__':\n\n start_loading()\n\n\n", "file_path": "tests/load/load_movies.py", "rank": 90, "score": 40463.5594501233 }, { "content": "import json\n\nimport random\n\n\n\n\n\ndef load_movies() -> list:\n\n with open(\"../../benchmarks/datasets/reference/movies/movies.json\", encoding=\"UTF-8\") as file:\n\n return json.load(file)\n\n\n\n\n\ndef get_random_words(data: list, k=5000) -> list:\n\n titles = []\n\n for item in data:\n\n titles.extend(item['title'].split(\" \"))\n\n\n\n searches = []\n\n for _ in range(k):\n\n searches.append(\" \".join(random.choices(titles, k=random.randint(1, 10))))\n\n\n\n return searches\n\n\n\n\n\nif __name__ == '__main__':\n\n movies = load_movies()\n\n random_searches = get_random_words(movies, k=10)\n\n\n\n with open(\"../../benchmarks/datasets/search_samples/samples2.json\", \"w+\", encoding=\"UTF-8\") as file:\n\n json.dump(random_searches, file)\n", "file_path": "tests/load/generate_samples.py", "rank": 91, "score": 40453.83751195699 }, { "content": "import json\n\n\n\n\n\ndef load_movies() -> list:\n\n with open(\"../../benchmarks/datasets/reference/movies/movies.json\", encoding=\"UTF-8\") as file:\n\n return json.load(file)\n\n\n\n\n\ndef list_all(item: dict):\n\n item = item.copy()\n\n for key in item.keys():\n\n if isinstance(item[key], list):\n\n continue\n\n\n\n item[key] = [item[key]]\n\n\n\n return item\n\n\n\n\n\ndef convert_for_lnx(data):\n\n return list(map(list_all, data))\n\n\n\n\n\nif __name__ == '__main__':\n\n movies = load_movies()\n\n movies = convert_for_lnx(movies)\n\n\n\n with open(\"../../benchmarks/datasets/converted/lnx_movies.json\", \"w+\", encoding=\"UTF-8\") as file:\n\n json.dump(movies, file)\n", "file_path": "tests/load/convert_movies.py", "rank": 92, "score": 40453.83751195699 }, { "content": "import json\n\nimport requests\n\n\n\n\n\nfile = open(\"./arxiv-metadata-oai-snapshot.json\", encoding=\"UTF-8\")\n\n\n\ndata = []\n\nwhile True:\n\n while len(data) < 10_000:\n\n doc = json.loads(file.readline())\n\n\n\n del doc['journal-ref']\n\n del doc['doi']\n\n del doc['report-no']\n\n del doc['categories']\n\n del doc['license']\n\n del doc['versions']\n\n del doc['authors_parsed']\n\n del doc['update_date']\n\n\n\n doc['id'] = [doc['id'].strip()]\n\n doc['submitter'] = [doc['submitter'].strip()] if doc['submitter'] else []\n\n doc['authors'] = [doc['authors'].strip()] if doc['authors'] else []\n\n doc['title'] = [doc['title'].strip()] if doc['title'] else []\n\n doc['comments'] = [doc['comments'].strip()] if doc['comments'] else []\n\n doc['abstract'] = [doc['abstract'].strip()] if doc['abstract'] else []\n\n\n\n data.append(doc)\n\n\n\n r = requests.post(\"http://127.0.0.1:8000/indexes/demo/documents\", json=data)\n\n r.raise_for_status()\n\n\n\n r = requests.post(\"http://127.0.0.1:8000/indexes/demo/commit\")\n\n r.raise_for_status()\n\n data = []\n\n print(\"added 10k docs\")\n\n\n\n\n\n\n", "file_path": "benchmarks/datasets/reference/arxiv/process.py", "rank": 93, "score": 37274.8409928035 }, { "content": "use std::sync::Arc;\n\n\n\nuse anyhow::{Error, Result};\n\nuse hashbrown::HashMap;\n\nuse tokio::sync::RwLock;\n\n\n\nuse crate::correction::enable_load_dictionaries;\n\nuse crate::index::IndexHandler;\n\nuse crate::storage::StorageManager;\n\nuse crate::structures::IndexDeclaration;\n\n\n\npub type LeasedIndex = Arc<IndexHandler>;\n\n\n\n/// A manager for a collection of indexes.\n\n///\n\n/// This will store index definitions as persistent json files and will\n\n/// load any existing indexes at creation time.\n\npub struct SearchEngine {\n\n storage: StorageManager,\n\n indexes: RwLock<HashMap<String, Arc<IndexHandler>>>,\n", "file_path": "engine/src/engine.rs", "rank": 94, "score": 35645.06537826463 }, { "content": "}\n\n\n\nimpl SearchEngine {\n\n /// Creates a new search engine loading the existing index metadata\n\n /// from the given directory.\n\n pub async fn create(dir: &str, enable_fast_fuzzy: bool) -> Result<Self> {\n\n crate::stop_words::init_stop_words()?;\n\n\n\n if enable_fast_fuzzy {\n\n info!(\"fuzzy search has been enabled! Beginning startup procedure.\");\n\n tokio::task::spawn_blocking(move || enable_load_dictionaries()).await??;\n\n }\n\n\n\n let storage = StorageManager::with_directory(dir.to_string()).await?;\n\n let loaded_indexes = storage.load_all().await?;\n\n\n\n // load previously defined indexes on restart.\n\n let mut indexes = HashMap::with_capacity(loaded_indexes.len());\n\n for loader in loaded_indexes {\n\n let name = loader.name.clone();\n", "file_path": "engine/src/engine.rs", "rank": 95, "score": 35643.952329243126 }, { "content": " let index = IndexHandler::build_loaded(loader).await?;\n\n\n\n indexes.insert(name, Arc::new(index));\n\n }\n\n\n\n Ok(Self {\n\n storage,\n\n indexes: RwLock::new(indexes),\n\n })\n\n }\n\n\n\n /// Adds a declared index to the search engine.\n\n ///\n\n /// This will set it in the index storage and then build the index handlers.\n\n pub async fn add_index(&self, index: IndexDeclaration, override_if_exists: bool) -> Result<()> {\n\n let remove = {\n\n let lock = self.indexes.read().await;\n\n if lock.contains_key(&index.name) {\n\n if !override_if_exists {\n\n debug!(\"[ ENGINE ] index already exists, ignoring override\");\n", "file_path": "engine/src/engine.rs", "rank": 96, "score": 35643.71980040488 }, { "content": " /// down the index.\n\n pub async fn remove_index(&self, index_name: &str) -> Result<()> {\n\n let value = { self.indexes.write().await.remove(index_name) };\n\n\n\n if value.is_none() {\n\n return Err(Error::msg(\"this index does not exit\"));\n\n }\n\n\n\n let value = value.unwrap();\n\n\n\n self.storage.remove_index_meta(&value.name).await?;\n\n\n\n // This just shuts down the system, we still require the ref\n\n // count to actually fully drop the index.\n\n value.shutdown().await?;\n\n\n\n Ok(())\n\n }\n\n\n\n /// Gets an index from the search engine.\n", "file_path": "engine/src/engine.rs", "rank": 97, "score": 35642.40424494384 }, { "content": " pub async fn get_index(&self, index_name: &str) -> Option<LeasedIndex> {\n\n let lock = self.indexes.read().await;\n\n Some(lock.get(index_name)?.clone())\n\n }\n\n\n\n pub async fn reset(&self) -> Result<()> {\n\n let mut lock = self.indexes.write().await;\n\n\n\n for (name, v) in lock.drain() {\n\n info!(\"[ CONTROLLER ] clearing {}\", &name);\n\n v.shutdown().await?;\n\n }\n\n\n\n self.storage.clear_all().await?;\n\n\n\n Ok(())\n\n }\n\n}\n", "file_path": "engine/src/engine.rs", "rank": 98, "score": 35639.96362515542 }, { "content": " let copy_index = index.clone();\n\n let loaded = copy_index.into_schema();\n\n let name = loaded.name.clone();\n\n let index_handler = Arc::new(IndexHandler::build_loaded(loaded).await?);\n\n\n\n // We must make sure to only save the metadata if the original making succeeded.\n\n self.storage.store_index_meta(&index).await?;\n\n\n\n {\n\n let mut lock = self.indexes.write().await;\n\n lock.insert(name, index_handler);\n\n debug!(\"[ ENGINE ] index re-create success\");\n\n };\n\n\n\n Ok(())\n\n }\n\n\n\n /// Removes an index from the engine.\n\n ///\n\n /// This will wait until all searches are complete before shutting\n", "file_path": "engine/src/engine.rs", "rank": 99, "score": 35638.19661772 } ]
Rust
src/forex.rs
iamsauravsharma/alpha_vantage
80fbfb6ea798c4f80b0151d8c6b85ff4793cbe0d
use std::collections::HashMap; use serde::Deserialize; use crate::{ api::{ApiClient, OutputSize, TimeSeriesInterval}, deserialize::from_str, error::{detect_common_helper_error, Error, Result}, }; #[derive(Debug, Clone, Default)] struct MetaData { information: String, from_symbol: String, to_symbol: String, last_refreshed: String, interval: Option<String>, output_size: Option<String>, time_zone: String, } #[derive(Default, Debug, Clone)] pub struct Entry { time: String, open: f64, high: f64, low: f64, close: f64, } impl Entry { #[must_use] pub fn time(&self) -> &str { &self.time } #[must_use] pub fn open(&self) -> f64 { self.open } #[must_use] pub fn high(&self) -> f64 { self.high } #[must_use] pub fn low(&self) -> f64 { self.low } #[must_use] pub fn close(&self) -> f64 { self.close } } #[derive(Debug, Default)] pub struct Forex { meta_data: MetaData, forex: Vec<Entry>, } impl Forex { #[must_use] pub fn information(&self) -> &str { self.return_meta_string("information") } #[must_use] pub fn symbol_from(&self) -> &str { self.return_meta_string("from symbol") } #[must_use] pub fn symbol_to(&self) -> &str { self.return_meta_string("to symbol") } #[must_use] pub fn last_refreshed(&self) -> &str { self.return_meta_string("last refreshed") } #[must_use] pub fn time_zone(&self) -> &str { self.return_meta_string("time zone") } #[must_use] pub fn interval(&self) -> Option<&str> { self.operate_option_meta_value("interval") } #[must_use] pub fn output_size(&self) -> Option<&str> { self.operate_option_meta_value("output size") } #[must_use] pub fn entry(&self) -> &Vec<Entry> { &self.forex } fn return_meta_string(&self, which_val: &str) -> &str { match which_val { "information" => &self.meta_data.information, "from symbol" => &self.meta_data.from_symbol, "to symbol" => &self.meta_data.to_symbol, "time zone" => &self.meta_data.time_zone, "last refreshed" => &self.meta_data.last_refreshed, _ => "", } } fn operate_option_meta_value(&self, which_val: &str) -> Option<&str> { let value = match which_val { "interval" => &self.meta_data.interval, "output size" => &self.meta_data.output_size, _ => &None, }; value.as_deref() } } #[derive(Clone, Debug, Deserialize)] struct EntryHelper { #[serde(rename = "1. open", deserialize_with = "from_str")] open: f64, #[serde(rename = "2. high", deserialize_with = "from_str")] high: f64, #[serde(rename = "3. low", deserialize_with = "from_str")] low: f64, #[serde(rename = "4. close", deserialize_with = "from_str")] close: f64, } #[derive(Debug, Deserialize)] pub(crate) struct ForexHelper { #[serde(rename = "Error Message")] error_message: Option<String>, #[serde(rename = "Information")] information: Option<String>, #[serde(rename = "Note")] note: Option<String>, #[serde(rename = "Meta Data")] meta_data: Option<HashMap<String, String>>, #[serde(flatten)] forex: Option<HashMap<String, HashMap<String, EntryHelper>>>, } impl ForexHelper { pub(crate) fn convert(self) -> Result<Forex> { let mut forex_struct = Forex::default(); detect_common_helper_error(self.information, self.error_message, self.note)?; if self.meta_data.is_none() || self.forex.is_none() { return Err(Error::EmptyResponse); } let meta_data = self.meta_data.unwrap(); let information = &meta_data["1. Information"]; let from_symbol = &meta_data["2. From Symbol"]; let to_symbol = &meta_data["3. To Symbol"]; let last_refreshed = meta_data.get("4. Last Refreshed"); let mut last_refreshed_value = last_refreshed.cloned(); if last_refreshed_value.is_none() { let last_refreshed = meta_data.get("5. Last Refreshed"); last_refreshed_value = last_refreshed.cloned(); } let last_refreshed_value = last_refreshed_value.expect("Last refreshed value contains None"); let time_zone = meta_data.get("5. Time Zone"); let mut time_zone_value = time_zone.cloned(); if time_zone_value.is_none() { let time_zone = meta_data.get("6. Time Zone"); time_zone_value = time_zone.cloned(); } if time_zone_value.is_none() { let time_zone = meta_data.get("7. Time Zone"); time_zone_value = time_zone.cloned(); } let time_zone_value = time_zone_value.expect("Time zone contains None value"); let output_size = meta_data.get("4. Output Size"); let mut output_size_value = output_size.cloned(); if output_size_value.is_none() { let output_size = meta_data.get("6. Output Size"); output_size_value = output_size.cloned(); } let interval = meta_data.get("5. Interval"); let interval_value = interval.cloned(); forex_struct.meta_data = MetaData { information: information.to_string(), from_symbol: from_symbol.to_string(), to_symbol: to_symbol.to_string(), last_refreshed: last_refreshed_value, interval: interval_value, output_size: output_size_value, time_zone: time_zone_value, }; let mut forex_entries: Vec<Entry> = Vec::new(); for hash in self.forex.unwrap().values() { for val in hash.keys() { let mut entry = Entry { time: val.to_string(), ..Entry::default() }; let entry_helper = hash .get(val) .expect("Cannot get a val from hash map") .clone(); entry.open = entry_helper.open; entry.high = entry_helper.high; entry.low = entry_helper.low; entry.close = entry_helper.close; forex_entries.push(entry); } } forex_struct.forex = forex_entries; Ok(forex_struct) } } pub trait VecEntry { fn find(&self, time: &str) -> Option<Entry>; fn latest(&self) -> Entry; fn latestn(&self, n: usize) -> Result<Vec<Entry>>; } impl VecEntry for Vec<Entry> { #[must_use] fn find(&self, time: &str) -> Option<Entry> { for entry in self { if entry.time == time { return Some(entry.clone()); } } None } #[must_use] fn latest(&self) -> Entry { let mut latest = Entry::default(); let mut new_time = String::new(); for entry in self { if new_time < entry.time { latest = entry.clone(); new_time = entry.time.clone(); } } latest } fn latestn(&self, n: usize) -> Result<Vec<Entry>> { let mut time_list = Vec::new(); for entry in self { time_list.push(entry.time.clone()); } time_list.sort(); time_list.reverse(); let time_list_count: usize = time_list.len(); let mut full_list = Self::new(); for i in 0..n { let time = time_list.get(i); if let Some(time) = time { let entry = self .find(time) .unwrap_or_else(|| panic!("Failed to find time value for index {}", i)); full_list.push(entry); } else { return Err(Error::DesiredNumberOfEntryNotPresent(time_list_count)); } } Ok(full_list) } } pub struct ForexBuilder<'a> { api_client: &'a ApiClient, function: ForexFunction, from_symbol: &'a str, to_symbol: &'a str, interval: Option<TimeSeriesInterval>, output_size: Option<OutputSize>, } impl<'a> ForexBuilder<'a> { #[must_use] pub fn new( api_client: &'a ApiClient, function: ForexFunction, from_symbol: &'a str, to_symbol: &'a str, ) -> Self { Self { api_client, function, from_symbol, to_symbol, interval: None, output_size: None, } } #[must_use] pub fn interval(mut self, interval: TimeSeriesInterval) -> Self { self.interval = Some(interval); self } #[must_use] pub fn output_size(mut self, output_size: OutputSize) -> Self { self.output_size = Some(output_size); self } fn create_url(&self) -> String { let function = match self.function { ForexFunction::IntraDay => "FX_INTRADAY", ForexFunction::Daily => "FX_DAILY", ForexFunction::Weekly => "FX_WEEKLY", ForexFunction::Monthly => "FX_MONTHLY", }; let mut url = format!( "query?function={}&from_symbol={}&to_symbol={}", function, self.from_symbol, self.to_symbol ); if let Some(forex_interval) = self.interval { let interval = match forex_interval { TimeSeriesInterval::OneMin => "1min", TimeSeriesInterval::FiveMin => "5min", TimeSeriesInterval::FifteenMin => "15min", TimeSeriesInterval::ThirtyMin => "30min", TimeSeriesInterval::SixtyMin => "60min", }; url.push_str(&format!("&interval={}", interval)); }; if let Some(forex_output_size) = self.output_size { let size = match forex_output_size { OutputSize::Full => "full", OutputSize::Compact => "compact", }; url.push_str(&format!("&outputsize={}", size)); } url } pub async fn json(&self) -> Result<Forex> { let url = self.create_url(); let forex_helper: ForexHelper = self.api_client.get_json(url).await?; forex_helper.convert() } } #[derive(Copy, Clone)] pub enum ForexFunction { IntraDay, Daily, Weekly, Monthly, }
use std::collections::HashMap; use serde::Deserialize; use crate::{ api::{ApiClient, OutputSize, TimeSeriesInterval}, deserialize::from_str, error::{detect_common_helper_error, Error, Result}, }; #[derive(Debug, Clone, Default)] struct MetaData { information: String, from_symbol: String, to_symbol: String, last_refreshed: String, interval: Option<String>, output_size: Option<String>, time_zone: String, } #[derive(Default, Debug, Clone)] pub struct Entry { time: String, open: f64, high: f64, low: f64, close: f64, } impl Entry { #[must_use] pub fn time(&self) -> &str { &self.time } #[must_use] pub fn open(&self) -> f64 { self.open } #[must_use] pub fn high(&self) -> f64 { self.high } #[must_use] pub fn low(&self) -> f64 { self.low } #[must_use] pub fn close(&self) -> f64 { self.close } } #[derive(Debug, Default)] pub struct Forex { meta_data: MetaData, forex: Vec<Entry>, } impl Forex { #[must_use] pub fn information(&self) -> &str { self.return_meta_string("information") } #[must_use] pub fn symbol_from(&self) -> &str { self.return_meta_string("from symbol") } #[must_use] pub fn symbol_to(&self) -> &str { self.return_meta_string("to symbol") } #[must_use] pub fn last_refreshed(&self) -> &str { self.return_meta_string("last refreshed") } #[must_use] pub fn time_zone(&self) -> &str { self.return_meta_string("time zone") } #[must_use] pub fn interval(&self) -> Option<&str> { self.operate_option_meta_value("interval") } #[must_use] pub fn output_size(&self) -> Option<&str> { self.operate_option_meta_value("output size") } #[must_use] pub fn entry(&self) -> &Vec<Entry> { &self.forex } fn return_meta_string(&self, which_val: &str) -> &str { match which_val { "information" => &self.meta_data.information, "from symbol" => &self.meta_data.from_symbol, "to symbol" => &self.meta_data.to_symbol, "time zone" => &self.meta_data.time_zone, "last refreshed" => &self.meta_data.last_refreshed, _ => "", } } fn operate_option_meta_value(&self, which_val: &str) -> Option<&str> { let value = match which_val { "interval" => &self.meta_data.interval, "output size" => &self.meta_data.output_size, _ => &None, }; value.as_deref() } } #[derive(Clone, Debug, Deserialize)] struct EntryHelper { #[serde(rename = "1. open", deserialize_with = "from_str")] open: f64, #[serde(rename = "2. high", deserialize_with = "from_str")] high: f64, #[serde(rename = "3. low", deserialize_with = "from_str")] low: f64, #[serde(rename = "4. close", deserialize_with = "from_str")] close: f64, } #[derive(Debug, Deserialize)] pub(crate) struct ForexHelper { #[serde(rename = "Error Message")] error_message: Option<String>, #[serde(rename = "Information")] information: Option<String>, #[serde(rename = "Note")] note: Option<String>, #[serde(rename = "Meta Data")] meta_data: Option<HashMap<String, String>>, #[serde(flatten)] forex: Option<HashMap<String, HashMap<String, EntryHelper>>>, } impl ForexHelper { pub(crate) fn convert(self) -> Result<Forex> { let mut forex_struct = Forex::default(); detect_common_helper_error(self.information, self.error_message, self.note)?; if self.meta_data.is_none() || self.forex.is_none() { return Err(Error::EmptyResponse); } let meta_data = self.meta_data.unwrap(); let information = &meta_data["1. Information"]; let from_symbol = &meta_data["2. From Symbol"]; let to_symbol = &meta_data["3. To Symbol"]; let last_refreshed = meta_data.get("4. Last Refreshed"); let mut last_refreshed_value = last_refreshed.cloned(); if last_refreshed_value.is_none() { let last_refreshed = meta_data.get("5. Last Refreshed"); last_refreshed_value = last_refreshed.cloned(); } let last_refreshed_value = last_refreshed_value.expect("Last refreshed value contains None"); let time_zone = meta_data.get("5. Time Zone"); let mut time_zone_value = time_zone.cloned(); if time_zone_value.is_none() { let time_zone = meta_data.get("6. Time Zone"); time_zone_value = time_zone.cloned(); } if time_zone_value.is_none() { let time_zone = meta_data.get("7. Time Zone"); time_zone_value = time_zone.cloned(); } let time_zone_value = time_zone_value.expect("Time zone contains None value"); let output_size = meta_data.get("4. Output Size"); let mut output_size_value = output_size.cloned(); if output_size_value.is_none() { let output_size = meta_data.get("6. Output Size"); output_size_value = output_size.cloned(); } let interval = meta_data.get("5. Interval"); let interval_value = interval.cloned(); forex_struct.meta_data = MetaData { information: information.to_string(), from_symbol: from_symbol.to_string(), to_symbol: to_symbol.to_string(), last_refreshed: last_refreshed_value, interval: interval_value, output_size: output_size_value, time_zone: time_zone_value, }; let mut forex_entries: Vec<Entry> = Vec::new(); for hash in self.forex.unwrap().values() { for val in hash.keys() { let mut entry = Entry { time: val.to_string(), ..Entry::default() }; let entry_helper = hash .get(val) .expect("Cannot get a val from hash map") .clone(); entry.open = entry_helper.open; entry.high = entry_helper.high; entry.low = entry_helper.low; entry.close = entry_helper.close; forex_entries.push(entry); } } forex_struct.forex = forex_entries; Ok(forex_struct) } } pub trait VecEntry { fn find(&self, time: &str) -> Option<Entry>; fn latest(&self) -> Entry; fn latestn(&self, n: usize) -> Result<Vec<Entry>>; } impl VecEntry for Vec<Entry> { #[must_use] fn find(&self, time: &str) -> Option<Entry> { for entry in self { if entry.time == time { return Some(entry.clone()); } } None } #[must_use] fn latest(&self) -> Entry {
fn latestn(&self, n: usize) -> Result<Vec<Entry>> { let mut time_list = Vec::new(); for entry in self { time_list.push(entry.time.clone()); } time_list.sort(); time_list.reverse(); let time_list_count: usize = time_list.len(); let mut full_list = Self::new(); for i in 0..n { let time = time_list.get(i); if let Some(time) = time { let entry = self .find(time) .unwrap_or_else(|| panic!("Failed to find time value for index {}", i)); full_list.push(entry); } else { return Err(Error::DesiredNumberOfEntryNotPresent(time_list_count)); } } Ok(full_list) } } pub struct ForexBuilder<'a> { api_client: &'a ApiClient, function: ForexFunction, from_symbol: &'a str, to_symbol: &'a str, interval: Option<TimeSeriesInterval>, output_size: Option<OutputSize>, } impl<'a> ForexBuilder<'a> { #[must_use] pub fn new( api_client: &'a ApiClient, function: ForexFunction, from_symbol: &'a str, to_symbol: &'a str, ) -> Self { Self { api_client, function, from_symbol, to_symbol, interval: None, output_size: None, } } #[must_use] pub fn interval(mut self, interval: TimeSeriesInterval) -> Self { self.interval = Some(interval); self } #[must_use] pub fn output_size(mut self, output_size: OutputSize) -> Self { self.output_size = Some(output_size); self } fn create_url(&self) -> String { let function = match self.function { ForexFunction::IntraDay => "FX_INTRADAY", ForexFunction::Daily => "FX_DAILY", ForexFunction::Weekly => "FX_WEEKLY", ForexFunction::Monthly => "FX_MONTHLY", }; let mut url = format!( "query?function={}&from_symbol={}&to_symbol={}", function, self.from_symbol, self.to_symbol ); if let Some(forex_interval) = self.interval { let interval = match forex_interval { TimeSeriesInterval::OneMin => "1min", TimeSeriesInterval::FiveMin => "5min", TimeSeriesInterval::FifteenMin => "15min", TimeSeriesInterval::ThirtyMin => "30min", TimeSeriesInterval::SixtyMin => "60min", }; url.push_str(&format!("&interval={}", interval)); }; if let Some(forex_output_size) = self.output_size { let size = match forex_output_size { OutputSize::Full => "full", OutputSize::Compact => "compact", }; url.push_str(&format!("&outputsize={}", size)); } url } pub async fn json(&self) -> Result<Forex> { let url = self.create_url(); let forex_helper: ForexHelper = self.api_client.get_json(url).await?; forex_helper.convert() } } #[derive(Copy, Clone)] pub enum ForexFunction { IntraDay, Daily, Weekly, Monthly, }
let mut latest = Entry::default(); let mut new_time = String::new(); for entry in self { if new_time < entry.time { latest = entry.clone(); new_time = entry.time.clone(); } } latest }
function_block-function_prefix_line
[ { "content": "// convert str which has percent form to f64 val\n\nfn convert_str_percent_f64(val: &str) -> f64 {\n\n let mut s = val.to_owned();\n\n s.pop();\n\n s.trim().parse::<f64>().unwrap()\n\n}\n\n\n\n/// Builder to create new Sector\n\npub struct SectorBuilder<'a> {\n\n api_client: &'a ApiClient,\n\n}\n\n\n\nimpl<'a> SectorBuilder<'a> {\n\n /// Create new sector builder from `APIClient`\n\n #[must_use]\n\n pub fn new(api_client: &'a ApiClient) -> Self {\n\n Self { api_client }\n\n }\n\n\n\n fn create_url() -> String {\n\n String::from(\"query?function=SECTOR\")\n", "file_path": "src/sector.rs", "rank": 0, "score": 128346.6979309427 }, { "content": "type DataType = HashMap<String, HashMap<String, HashMap<String, String>>>;\n\n\n\n/// Struct for storing a data values\n\n#[derive(Default)]\n\npub struct DataCollector {\n\n time: String,\n\n values: HashMap<String, f64>,\n\n}\n\n\n\nimpl DataCollector {\n\n /// Return time\n\n #[must_use]\n\n pub fn time(&self) -> &str {\n\n &self.time\n\n }\n\n\n\n /// Return values for Data\n\n #[must_use]\n\n pub fn values(&self) -> &HashMap<String, f64> {\n\n &self.values\n", "file_path": "src/technical_indicator.rs", "rank": 2, "score": 116616.16657682866 }, { "content": "/// trait which helps for performing some common operation on Vec<Entry>\n\npub trait VecEntry {\n\n /// Find a entry with a given time as a input return none if no entry found\n\n fn find(&self, time: &str) -> Option<Entry>;\n\n /// Return a entry which is of latest time period\n\n fn latest(&self) -> Entry;\n\n /// Return a top n latest Entry\n\n /// # Errors\n\n /// If n is greater than no of entry\n\n fn latestn(&self, n: usize) -> Result<Vec<Entry>>;\n\n}\n\n\n\nimpl VecEntry for Vec<Entry> {\n\n #[must_use]\n\n fn find(&self, time: &str) -> Option<Entry> {\n\n for entry in self {\n\n if entry.time == time {\n\n return Some(entry.clone());\n\n }\n\n }\n\n None\n", "file_path": "src/stock_time.rs", "rank": 4, "score": 110335.21137620231 }, { "content": "#[derive(Deserialize, Clone, Default)]\n\nstruct MetaData {\n\n #[serde(rename = \"Information\")]\n\n information: String,\n\n #[serde(rename = \"Last Refreshed\")]\n\n last_refreshed: String,\n\n}\n\n\n\n/// Store Sector data\n\n#[derive(Default, Clone)]\n\npub struct Data {\n\n rank: String,\n\n utilities: f64,\n\n health_care: f64,\n\n information_technology: f64,\n\n industrials: f64,\n\n real_estate: f64,\n\n consumer_staples: f64,\n\n consumer_discretionary: f64,\n\n financials: f64,\n\n communication_services: f64,\n", "file_path": "src/sector.rs", "rank": 5, "score": 103229.4367559547 }, { "content": "#[derive(Deserialize, Clone, Default)]\n\nstruct MetaData {\n\n #[serde(rename = \"1. Information\")]\n\n information: String,\n\n #[serde(rename = \"2. Digital Currency Code\")]\n\n digital_code: String,\n\n #[serde(rename = \"3. Digital Currency Name\")]\n\n digital_name: String,\n\n #[serde(rename = \"4. Market Code\")]\n\n market_code: String,\n\n #[serde(rename = \"5. Market Name\")]\n\n market_name: String,\n\n #[serde(rename = \"6. Last Refreshed\")]\n\n last_refreshed: String,\n\n #[serde(rename = \"7. Time Zone\")]\n\n time_zone: String,\n\n}\n\n\n\n/// Struct which stores Crypto data\n\n#[derive(Default, Debug, Clone)]\n\npub struct Entry {\n", "file_path": "src/crypto.rs", "rank": 6, "score": 103229.4367559547 }, { "content": "// convert string to optional T\n\nfn option_from_str<T>(val: Option<String>) -> Option<T>\n\nwhere\n\n T: FromStr,\n\n T::Err: std::error::Error,\n\n{\n\n val.map(|s| T::from_str(&s).unwrap())\n\n}\n\n\n\n/// Builder to create new `TimeSeries`\n\npub struct TimeSeriesBuilder<'a> {\n\n api_client: &'a ApiClient,\n\n function: StockFunction,\n\n symbol: &'a str,\n\n interval: Option<TimeSeriesInterval>,\n\n output_size: Option<OutputSize>,\n\n adjusted: Option<bool>,\n\n}\n\n\n\nimpl<'a> TimeSeriesBuilder<'a> {\n\n /// Create new `TimeSeriesBuilder` form `APIClient`\n", "file_path": "src/stock_time.rs", "rank": 7, "score": 102893.86758534762 }, { "content": "/// trait which helps for performing some common operation on Vec<Entry>\n\npub trait VecEntry {\n\n /// Find a entry with a given time as a input return none if no entry found\n\n fn find(&self, time: &str) -> Option<Entry>;\n\n /// Return a entry which is of latest time period\n\n fn latest(&self) -> Entry;\n\n /// Return a top n latest Entry\n\n /// # Errors\n\n /// If n is greater than no of entry\n\n fn latestn(&self, n: usize) -> Result<Vec<Entry>>;\n\n}\n\n\n\nimpl VecEntry for Vec<Entry> {\n\n #[must_use]\n\n fn find(&self, time: &str) -> Option<Entry> {\n\n for entry in self {\n\n if entry.time == time {\n\n return Some(entry.clone());\n\n }\n\n }\n\n None\n", "file_path": "src/crypto.rs", "rank": 8, "score": 93455.15667854356 }, { "content": "#[derive(Clone, Deserialize)]\n\nstruct EntryHelper {\n\n #[serde(rename = \"1. open\", deserialize_with = \"from_str\")]\n\n open: f64,\n\n #[serde(rename = \"2. high\", deserialize_with = \"from_str\")]\n\n high: f64,\n\n #[serde(rename = \"3. low\", deserialize_with = \"from_str\")]\n\n low: f64,\n\n #[serde(rename = \"4. close\", deserialize_with = \"from_str\")]\n\n close: f64,\n\n #[serde(rename = \"5. volume\", deserialize_with = \"from_str\")]\n\n volume: u64,\n\n}\n\n\n\n/// Helper struct to store adjusted data\n", "file_path": "src/stock_time.rs", "rank": 10, "score": 82384.92464089704 }, { "content": "/// Trait which can be implemented for all common library client for getting\n\n/// output from server\n\n/// surf and reqwest are two client which are supported with feature flag. If\n\n/// you prefer alternate http client you can add support by implementing\n\n/// `HttpClient` trait for client.\n\n/// Some example of other client which can be used are isahc client\n\npub trait HttpClient {\n\n /// AlphaVantage provider output function which provides one field path\n\n /// where get GET request needs to be performed\n\n async fn get_alpha_vantage_provider_output(&self, path: String) -> Result<String>;\n\n\n\n /// RapidAPI provider function which provides two field path and api_key.\n\n /// Path needs to be set along with header x-rapidapi-host as\n\n /// alpha-vantage.p.rapidapi.com and header x-rapidapi-key same as\n\n /// api_key field\n\n async fn get_rapid_api_provider_output(&self, path: String, api_key: String) -> Result<String>;\n\n}\n\n\n\n#[cfg(feature = \"reqwest-client\")]\n\n#[async_trait]\n\nimpl HttpClient for reqwest::Client {\n\n async fn get_alpha_vantage_provider_output(&self, path: String) -> Result<String> {\n\n self.get(&path)\n\n .send()\n\n .await\n\n .map_err(|_| Error::GetRequestFailed)?\n", "file_path": "src/client.rs", "rank": 11, "score": 70758.2707160141 }, { "content": "#[must_use]\n\npub fn set_api<T>(api: &str, client: T) -> ApiClient\n\nwhere\n\n T: HttpClient + 'static + Send + Sync,\n\n{\n\n ApiClient::set_api(api, client)\n\n}\n", "file_path": "src/lib.rs", "rank": 12, "score": 65974.27509159221 }, { "content": "#[derive(Deserialize, Clone)]\n\nstruct EntryHelper {\n\n #[serde(rename = \"1b. open (USD)\", deserialize_with = \"from_str\")]\n\n open_usd: f64,\n\n #[serde(rename = \"2b. high (USD)\", deserialize_with = \"from_str\")]\n\n high_usd: f64,\n\n #[serde(rename = \"3b. low (USD)\", deserialize_with = \"from_str\")]\n\n low_usd: f64,\n\n #[serde(rename = \"4b. close (USD)\", deserialize_with = \"from_str\")]\n\n close_usd: f64,\n\n #[serde(rename = \"5. volume\", deserialize_with = \"from_str\")]\n\n volume: f64,\n\n #[serde(rename = \"6. market cap (USD)\", deserialize_with = \"from_str\")]\n\n market_cap: f64,\n\n #[serde(flatten)]\n\n market_data: HashMap<String, String>,\n\n}\n\n\n\n/// Struct to help out for creation of struct Crypto\n\n#[derive(Deserialize)]\n\npub(crate) struct CryptoHelper {\n", "file_path": "src/crypto.rs", "rank": 13, "score": 63258.89375553114 }, { "content": "#[derive(Deserialize, Clone)]\n\nstruct AdjustedHelper {\n\n #[serde(rename = \"1. open\", deserialize_with = \"from_str\")]\n\n open: f64,\n\n #[serde(rename = \"2. high\", deserialize_with = \"from_str\")]\n\n high: f64,\n\n #[serde(rename = \"3. low\", deserialize_with = \"from_str\")]\n\n low: f64,\n\n #[serde(rename = \"4. close\", deserialize_with = \"from_str\")]\n\n close: f64,\n\n #[serde(rename = \"5. adjusted close\")]\n\n adjusted_close: Option<String>,\n\n #[serde(rename = \"6. volume\", deserialize_with = \"from_str\")]\n\n volume: u64,\n\n #[serde(rename = \"7. dividend amount\")]\n\n dividend_amount: Option<String>,\n\n #[serde(rename = \"8. split coefficient\")]\n\n split_coefficient: Option<String>,\n\n}\n\n\n\n/// helper struct for `TimeSeries` which deserialize JSON\n", "file_path": "src/stock_time.rs", "rank": 14, "score": 59681.933347459446 }, { "content": "#[derive(Debug, Deserialize, Clone, Default)]\n\nstruct GlobalQuote {\n\n #[serde(rename = \"01. symbol\")]\n\n symbol: String,\n\n #[serde(rename = \"02. open\", deserialize_with = \"from_str\")]\n\n open: f64,\n\n #[serde(rename = \"03. high\", deserialize_with = \"from_str\")]\n\n high: f64,\n\n #[serde(rename = \"04. low\", deserialize_with = \"from_str\")]\n\n low: f64,\n\n #[serde(rename = \"05. price\", deserialize_with = \"from_str\")]\n\n price: f64,\n\n #[serde(rename = \"06. volume\", deserialize_with = \"from_str\")]\n\n volume: u64,\n\n #[serde(rename = \"07. latest trading day\")]\n\n last_day: String,\n\n #[serde(rename = \"08. previous close\", deserialize_with = \"from_str\")]\n\n previous_close: f64,\n\n #[serde(rename = \"09. change\", deserialize_with = \"from_str\")]\n\n change: f64,\n\n #[serde(rename = \"10. change percent\", deserialize_with = \"percent_f64\")]\n", "file_path": "src/quote.rs", "rank": 15, "score": 39086.84508442968 }, { "content": "#[derive(Debug, Deserialize, Clone, Default)]\n\nstruct RealtimeExchangeRate {\n\n #[serde(rename = \"1. From_Currency Code\")]\n\n from_code: String,\n\n #[serde(rename = \"2. From_Currency Name\")]\n\n from_name: String,\n\n #[serde(rename = \"3. To_Currency Code\")]\n\n to_code: String,\n\n #[serde(rename = \"4. To_Currency Name\")]\n\n to_name: String,\n\n #[serde(rename = \"5. Exchange Rate\", deserialize_with = \"from_str\")]\n\n rate: f64,\n\n #[serde(rename = \"6. Last Refreshed\")]\n\n last_refreshed: String,\n\n #[serde(rename = \"7. Time Zone\")]\n\n time_zone: String,\n\n #[serde(rename = \"8. Bid Price\")]\n\n bid_price: String,\n\n #[serde(rename = \"9. Ask Price\")]\n\n ask_price: String,\n\n}\n", "file_path": "src/exchange.rs", "rank": 16, "score": 37615.83091050768 }, { "content": " let s = String::deserialize(deserializer)?.to_lowercase();\n\n if &s == \"none\" {\n\n Ok(None)\n\n } else {\n\n match T::from_str(&s) {\n\n Ok(data) => Ok(Some(data)),\n\n Err(msg) => Err(msg),\n\n }\n\n .map_err(Error::custom)\n\n }\n\n}\n\n\n\npub(crate) fn percent_f64<'de, D>(deserializer: D) -> Result<f64, D::Error>\n\nwhere\n\n D: Deserializer<'de>,\n\n{\n\n let mut s = String::deserialize(deserializer)?;\n\n s.pop();\n\n f64::from_str(&s).map_err(Error::custom)\n\n}\n", "file_path": "src/deserialize.rs", "rank": 17, "score": 27956.301127009025 }, { "content": "use std::str::FromStr;\n\n\n\nuse serde::de::{Deserialize, Deserializer, Error};\n\n\n\npub(crate) fn from_str<'de, T, D>(deserializer: D) -> Result<T, D::Error>\n\nwhere\n\n T: FromStr,\n\n T::Err: std::fmt::Display,\n\n D: Deserializer<'de>,\n\n{\n\n let s = String::deserialize(deserializer)?;\n\n T::from_str(&s).map_err(Error::custom)\n\n}\n\n\n\npub(crate) fn from_none_str<'de, T, D>(deserializer: D) -> Result<Option<T>, D::Error>\n\nwhere\n\n T: FromStr,\n\n T::Err: std::fmt::Display,\n\n D: Deserializer<'de>,\n\n{\n", "file_path": "src/deserialize.rs", "rank": 18, "score": 27947.0468600116 }, { "content": "//! Module which contains all types of error for alpha vantage crates\n\nuse thiserror::Error;\n\n\n\n/// Result type for alpha vantage crate\n\npub type Result<T> = std::result::Result<T, Error>;\n\n\n\n#[derive(Error, Debug)]\n\n/// Main error/failure enum\n\npub enum Error {\n\n /// Error which is raised if information is returned by API instead of data\n\n /// from API\n\n #[error(\"information: {0}\")]\n\n AlphaVantageInformation(String),\n\n\n\n /// Error which is raised if error_message is returned by API instead of\n\n /// data from API\n\n #[error(\"error_message: {0}\")]\n\n AlphaVantageErrorMessage(String),\n\n\n\n /// Error which is raised if note is returned by API instead of data from\n", "file_path": "src/error.rs", "rank": 19, "score": 27920.04542751022 }, { "content": " /// API\n\n #[error(\"note: {0}\")]\n\n AlphaVantageNote(String),\n\n\n\n /// Error which is raised when desired number of entry is not present\n\n #[error(\"desired number of latest entry not found try using less than {0} as n\")]\n\n DesiredNumberOfEntryNotPresent(usize),\n\n\n\n /// Error which is raised if API return empty response instead of returning\n\n /// data\n\n #[error(\"server returned empty response\")]\n\n EmptyResponse,\n\n\n\n /// Error which is raise if failed to get output from server\n\n #[error(\"Failed to get output from sever\")]\n\n GetRequestFailed,\n\n\n\n /// Error which is raised if client fails to decode it into struct\n\n #[error(\"Failed to decode string into struct\")]\n\n DecodeJsonToStruct,\n", "file_path": "src/error.rs", "rank": 20, "score": 27916.333130979838 }, { "content": "}\n\n\n\npub(crate) fn detect_common_helper_error(\n\n information: Option<String>,\n\n error_message: Option<String>,\n\n note: Option<String>,\n\n) -> Result<()> {\n\n if let Some(information) = information {\n\n return Err(Error::AlphaVantageInformation(information));\n\n }\n\n if let Some(error_message) = error_message {\n\n return Err(Error::AlphaVantageErrorMessage(error_message));\n\n }\n\n if let Some(note) = note {\n\n return Err(Error::AlphaVantageNote(note));\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/error.rs", "rank": 21, "score": 27913.72662491806 }, { "content": "};\n\n\n\n/// Struct for storing Meta Data value\n\n#[derive(Debug, Clone, Default)]\n\npub struct MetaData {\n\n information: String,\n\n symbol: String,\n\n last_refreshed: String,\n\n interval: Option<String>,\n\n output_size: Option<String>,\n\n time_zone: String,\n\n}\n\n\n\n/// Struct for Entry value\n\n#[derive(Default, Debug, Clone)]\n\npub struct Entry {\n\n time: String,\n\n open: f64,\n\n high: f64,\n\n low: f64,\n", "file_path": "src/stock_time.rs", "rank": 44, "score": 25207.51333808487 }, { "content": " /// Return a meta data value as a form of String\n\n fn return_meta_string(&self, which_val: &str) -> &str {\n\n match which_val {\n\n \"information\" => &self.meta_data.information,\n\n \"symbol\" => &self.meta_data.symbol,\n\n \"time zone\" => &self.meta_data.time_zone,\n\n \"last refreshed\" => &self.meta_data.last_refreshed,\n\n _ => \"\",\n\n }\n\n }\n\n\n\n /// Return Option metadata value as a Result form of String\n\n fn operate_option_meta_value(&self, which_val: &str) -> Option<&str> {\n\n let value = match which_val {\n\n \"interval\" => &self.meta_data.interval,\n\n \"output size\" => &self.meta_data.output_size,\n\n _ => &None,\n\n };\n\n value.as_deref()\n\n }\n\n}\n\n\n\n/// Helper struct to store non adjusted data\n\n#[derive(Clone, Deserialize)]\n", "file_path": "src/stock_time.rs", "rank": 45, "score": 25205.592940278657 }, { "content": " detect_common_helper_error(self.information, self.error_message, self.note)?;\n\n if self.meta_data.is_none()\n\n || (self.time_series.is_none() && self.adjusted_series.is_none())\n\n {\n\n return Err(Error::EmptyResponse);\n\n }\n\n let meta_data = self.meta_data.unwrap();\n\n let information = &meta_data[\"1. Information\"];\n\n let symbol = &meta_data[\"2. Symbol\"];\n\n let last_refreshed = &meta_data[\"3. Last Refreshed\"];\n\n let interval = meta_data.get(\"4. Interval\");\n\n let interval = interval.cloned();\n\n let output_size = meta_data.get(\"4. Output Size\");\n\n let mut output_size_value = output_size.cloned();\n\n if output_size_value.is_none() {\n\n let output_size = meta_data.get(\"5. Output Size\");\n\n output_size_value = output_size.cloned();\n\n }\n\n let time_zone = meta_data.get(\"4. Time Zone\");\n\n let mut time_zone_value = time_zone.cloned();\n", "file_path": "src/stock_time.rs", "rank": 46, "score": 25203.962849750955 }, { "content": " if time_zone_value.is_none() {\n\n let time_zone = meta_data.get(\"5. Time Zone\");\n\n time_zone_value = time_zone.cloned();\n\n }\n\n if time_zone_value.is_none() {\n\n let time_zone = meta_data.get(\"6. Time Zone\");\n\n time_zone_value = time_zone.cloned();\n\n }\n\n let time_zone_value = time_zone_value.expect(\"time zone value is None\");\n\n time_series.meta_data = MetaData {\n\n information: information.to_string(),\n\n symbol: symbol.to_string(),\n\n last_refreshed: last_refreshed.to_string(),\n\n interval,\n\n output_size: output_size_value,\n\n time_zone: time_zone_value,\n\n };\n\n let mut entry_value: Vec<Entry> = Vec::new();\n\n if let Some(time_series) = self.time_series {\n\n for hash in time_series.values() {\n", "file_path": "src/stock_time.rs", "rank": 47, "score": 25197.179909185976 }, { "content": "#[derive(Deserialize)]\n\npub(crate) struct TimeSeriesHelper {\n\n #[serde(rename = \"Error Message\")]\n\n error_message: Option<String>,\n\n #[serde(rename = \"Information\")]\n\n information: Option<String>,\n\n #[serde(rename = \"Note\")]\n\n note: Option<String>,\n\n #[serde(rename = \"Meta Data\")]\n\n meta_data: Option<HashMap<String, String>>,\n\n #[serde(flatten)]\n\n time_series: Option<HashMap<String, HashMap<String, EntryHelper>>>,\n\n #[serde(flatten)]\n\n adjusted_series: Option<HashMap<String, HashMap<String, AdjustedHelper>>>,\n\n}\n\n\n\nimpl TimeSeriesHelper {\n\n /// Convert `TimeSeriesHelper` to `TimeSeries`\n\n pub(crate) fn convert(self) -> Result<TimeSeries> {\n\n let mut time_series = TimeSeries::default();\n", "file_path": "src/stock_time.rs", "rank": 48, "score": 25185.035670938963 }, { "content": " /// .output_size(alpha_vantage::api::OutputSize::Full)\n\n /// .json()\n\n /// .await\n\n /// .unwrap();\n\n /// let information = stock_time.information();\n\n /// assert_eq!(\n\n /// information,\n\n /// \"Intraday (5min) open, high, low, close prices and volume\"\n\n /// );\n\n /// }\n\n /// ```\n\n #[must_use]\n\n pub fn information(&self) -> &str {\n\n self.return_meta_string(\"information\")\n\n }\n\n\n\n /// Return symbol for which time series function is called\n\n ///\n\n /// ```\n\n /// #[tokio::main]\n", "file_path": "src/stock_time.rs", "rank": 49, "score": 25183.904895982814 }, { "content": " pub fn last_refreshed(&self) -> &str {\n\n self.return_meta_string(\"last refreshed\")\n\n }\n\n\n\n /// Return time zone of all data time\n\n #[must_use]\n\n pub fn time_zone(&self) -> &str {\n\n self.return_meta_string(\"time zone\")\n\n }\n\n\n\n /// Time series interval between two consecutive data\n\n ///\n\n /// ```\n\n /// #[tokio::main]\n\n /// async fn main() {\n\n /// let api = alpha_vantage::set_api(\"demo\", reqwest::Client::new());\n\n /// let stock_time = api\n\n /// .stock_time(alpha_vantage::stock_time::StockFunction::IntraDay, \"MSFT\")\n\n /// .interval(alpha_vantage::api::TimeSeriesInterval::FiveMin)\n\n /// .output_size(alpha_vantage::api::OutputSize::Full)\n", "file_path": "src/stock_time.rs", "rank": 50, "score": 25181.979274959493 }, { "content": " for val in hash.keys() {\n\n let mut entry = Entry {\n\n time: val.to_string(),\n\n ..Entry::default()\n\n };\n\n let entry_helper = hash\n\n .get(val)\n\n .expect(\"failed to get val from hash for time series\")\n\n .clone();\n\n entry.open = entry_helper.open;\n\n entry.high = entry_helper.high;\n\n entry.low = entry_helper.low;\n\n entry.close = entry_helper.close;\n\n entry.volume = entry_helper.volume;\n\n entry_value.push(entry);\n\n }\n\n }\n\n }\n\n if let Some(adjusted_series) = self.adjusted_series {\n\n for hash in adjusted_series.values() {\n", "file_path": "src/stock_time.rs", "rank": 51, "score": 25180.305104750478 }, { "content": " for val in hash.keys() {\n\n let mut entry = Entry {\n\n time: val.to_string(),\n\n ..Entry::default()\n\n };\n\n let entry_helper = hash\n\n .get(val)\n\n .expect(\"failed to get val from hash for adjusted series\")\n\n .clone();\n\n entry.open = entry_helper.open;\n\n entry.high = entry_helper.high;\n\n entry.low = entry_helper.low;\n\n entry.close = entry_helper.close;\n\n entry.volume = entry_helper.volume;\n\n entry.adjusted_close = option_from_str(entry_helper.adjusted_close);\n\n entry.split_coefficient = option_from_str(entry_helper.split_coefficient);\n\n entry.dividend_amount = option_from_str(entry_helper.dividend_amount);\n\n entry_value.push(entry);\n\n }\n\n }\n\n }\n\n time_series.entry = entry_value;\n\n Ok(time_series)\n\n }\n\n}\n\n\n", "file_path": "src/stock_time.rs", "rank": 52, "score": 25179.672578737536 }, { "content": " #[must_use]\n\n pub fn new(api_client: &'a ApiClient, function: StockFunction, symbol: &'a str) -> Self {\n\n Self {\n\n api_client,\n\n function,\n\n symbol,\n\n interval: None,\n\n output_size: None,\n\n adjusted: None,\n\n }\n\n }\n\n\n\n /// Define time series interval for intraday stock time series\n\n #[must_use]\n\n pub fn interval(mut self, interval: TimeSeriesInterval) -> Self {\n\n self.interval = Some(interval);\n\n self\n\n }\n\n\n\n /// Define output size for intraday or daily stock time series\n", "file_path": "src/stock_time.rs", "rank": 53, "score": 25178.09166074774 }, { "content": " /// .interval(alpha_vantage::api::TimeSeriesInterval::FiveMin)\n\n /// .output_size(alpha_vantage::api::OutputSize::Full)\n\n /// .json()\n\n /// .await\n\n /// .unwrap();\n\n /// let output_size = stock_time.output_size();\n\n /// assert_eq!(output_size.unwrap(), \"Full size\");\n\n /// }\n\n /// ```\n\n #[must_use]\n\n pub fn output_size(&self) -> Option<&str> {\n\n self.operate_option_meta_value(\"output size\")\n\n }\n\n\n\n /// Return Entry\n\n #[must_use]\n\n pub fn entry(&self) -> &Vec<Entry> {\n\n &self.entry\n\n }\n\n\n", "file_path": "src/stock_time.rs", "rank": 54, "score": 25177.66311125524 }, { "content": "//! Module for stock time series\n\n//!\n\n//! This suite of APIs provide realtime and historical global equity data in 4\n\n//! different temporal resolutions: (1) daily, (2) weekly, (3) monthly, and (4)\n\n//! intraday. Daily, weekly, and monthly time series contain 20+ years of\n\n//! historical data\n\n//!\n\n//! You can read about [Stock Time][stock_time] API and what it returns\n\n//! on alphavantage documentation\n\n//!\n\n//! [stock_time]: https://www.alphavantage.co/documentation/#time-series-data\n\n\n\nuse std::{collections::HashMap, str::FromStr};\n\n\n\nuse serde::Deserialize;\n\n\n\nuse crate::{\n\n api::{ApiClient, OutputSize, TimeSeriesInterval},\n\n deserialize::from_str,\n\n error::{detect_common_helper_error, Error, Result},\n", "file_path": "src/stock_time.rs", "rank": 55, "score": 25176.578810100367 }, { "content": " /// async fn main() {\n\n /// let api = alpha_vantage::set_api(\"demo\", reqwest::Client::new());\n\n /// let stock_time = api\n\n /// .stock_time(alpha_vantage::stock_time::StockFunction::IntraDay, \"MSFT\")\n\n /// .interval(alpha_vantage::api::TimeSeriesInterval::FiveMin)\n\n /// .output_size(alpha_vantage::api::OutputSize::Full)\n\n /// .json()\n\n /// .await\n\n /// .unwrap();\n\n /// let symbol = stock_time.symbol();\n\n /// assert_eq!(symbol, \"MSFT\");\n\n /// }\n\n /// ```\n\n #[must_use]\n\n pub fn symbol(&self) -> &str {\n\n self.return_meta_string(\"symbol\")\n\n }\n\n\n\n /// Return last refreshed time\n\n #[must_use]\n", "file_path": "src/stock_time.rs", "rank": 56, "score": 25176.081425413988 }, { "content": " close: f64,\n\n adjusted_close: Option<f64>,\n\n volume: u64,\n\n dividend_amount: Option<f64>,\n\n split_coefficient: Option<f64>,\n\n}\n\n\n\nimpl Entry {\n\n /// Get time\n\n #[must_use]\n\n pub fn time(&self) -> &str {\n\n &self.time\n\n }\n\n\n\n /// Return open\n\n #[must_use]\n\n pub fn open(&self) -> f64 {\n\n self.open\n\n }\n\n\n", "file_path": "src/stock_time.rs", "rank": 57, "score": 25175.644913819106 }, { "content": " }\n\n}\n\n\n\n/// Struct for storing time series data\n\n#[derive(Debug, Default)]\n\npub struct TimeSeries {\n\n meta_data: MetaData,\n\n entry: Vec<Entry>,\n\n}\n\n\n\nimpl TimeSeries {\n\n /// Return information present in meta data\n\n ///\n\n /// ```\n\n /// #[tokio::main]\n\n /// async fn main() {\n\n /// let api = alpha_vantage::set_api(\"demo\", reqwest::Client::new());\n\n /// let stock_time = api\n\n /// .stock_time(alpha_vantage::stock_time::StockFunction::IntraDay, \"MSFT\")\n\n /// .interval(alpha_vantage::api::TimeSeriesInterval::FiveMin)\n", "file_path": "src/stock_time.rs", "rank": 58, "score": 25172.339583176337 }, { "content": " #[must_use]\n\n pub fn output_size(mut self, output_size: OutputSize) -> Self {\n\n self.output_size = Some(output_size);\n\n self\n\n }\n\n\n\n /// Define if output time series is adjusted by historical split and\n\n /// dividend events\n\n #[must_use]\n\n pub fn adjusted(mut self, adjusted: bool) -> Self {\n\n self.adjusted = Some(adjusted);\n\n self\n\n }\n\n\n\n fn create_url(&self) -> String {\n\n let function = match self.function {\n\n StockFunction::IntraDay => \"TIME_SERIES_INTRADAY\",\n\n StockFunction::Daily => \"TIME_SERIES_DAILY\",\n\n StockFunction::DailyAdjusted => \"TIME_SERIES_DAILY_ADJUSTED\",\n\n StockFunction::Weekly => \"TIME_SERIES_WEEKLY\",\n", "file_path": "src/stock_time.rs", "rank": 59, "score": 25171.814187852007 }, { "content": " /// Return high\n\n #[must_use]\n\n pub fn high(&self) -> f64 {\n\n self.high\n\n }\n\n\n\n /// Return low\n\n #[must_use]\n\n pub fn low(&self) -> f64 {\n\n self.low\n\n }\n\n\n\n /// Return close\n\n #[must_use]\n\n pub fn close(&self) -> f64 {\n\n self.close\n\n }\n\n\n\n /// Return adjusted\n\n #[must_use]\n", "file_path": "src/stock_time.rs", "rank": 60, "score": 25169.292412423725 }, { "content": " StockFunction::WeeklyAdjusted => \"TIME_SERIES_WEEKLY_ADJUSTED\",\n\n StockFunction::Monthly => \"TIME_SERIES_MONTHLY\",\n\n StockFunction::MonthlyAdjusted => \"TIME_SERIES_MONTHLY_ADJUSTED\",\n\n };\n\n\n\n let mut url = format!(\"query?function={}&symbol={}\", function, self.symbol);\n\n\n\n if let Some(stock_time_interval) = self.interval {\n\n let interval = match stock_time_interval {\n\n TimeSeriesInterval::OneMin => \"1min\",\n\n TimeSeriesInterval::FiveMin => \"5min\",\n\n TimeSeriesInterval::FifteenMin => \"15min\",\n\n TimeSeriesInterval::ThirtyMin => \"30min\",\n\n TimeSeriesInterval::SixtyMin => \"60min\",\n\n };\n\n url.push_str(&format!(\"&interval={}\", interval));\n\n };\n\n\n\n if let Some(stock_time_output_size) = self.output_size {\n\n let size = match stock_time_output_size {\n", "file_path": "src/stock_time.rs", "rank": 61, "score": 25168.309207951614 }, { "content": " /// Raise error if data obtained cannot be properly converted to struct or\n\n /// API returns any 4 possible known errors\n\n pub async fn json(&self) -> Result<TimeSeries> {\n\n let url = self.create_url();\n\n let stock_time_helper: TimeSeriesHelper = self.api_client.get_json(url).await?;\n\n stock_time_helper.convert()\n\n }\n\n}\n\n\n\n/// Enum for declaring function for stock time series by defining which type of\n\n/// series of stock to be returned\n\n#[derive(Copy, Clone)]\n\npub enum StockFunction {\n\n /// returns intraday time series (timestamp, open, high, low, close, volume)\n\n /// of the equity specified\n\n IntraDay,\n\n /// returns daily time series (date, daily open, daily high, daily low,\n\n /// daily close, daily volume) of the global equity specified, covering 20+\n\n /// years of historical data\n\n Daily,\n", "file_path": "src/stock_time.rs", "rank": 62, "score": 25168.255073140015 }, { "content": " }\n\n\n\n #[must_use]\n\n fn latest(&self) -> Entry {\n\n let mut latest = Entry::default();\n\n let mut new_time = String::new();\n\n for entry in self {\n\n if new_time < entry.time {\n\n latest = entry.clone();\n\n new_time = entry.time.clone();\n\n }\n\n }\n\n latest\n\n }\n\n\n\n fn latestn(&self, n: usize) -> Result<Vec<Entry>> {\n\n let mut time_list = Vec::new();\n\n for entry in self {\n\n time_list.push(entry.time.clone());\n\n }\n", "file_path": "src/stock_time.rs", "rank": 63, "score": 25167.227316047483 }, { "content": " /// .json()\n\n /// .await\n\n /// .unwrap();\n\n /// let interval = stock_time.interval();\n\n /// assert_eq!(interval.unwrap(), \"5min\");\n\n /// }\n\n /// ```\n\n #[must_use]\n\n pub fn interval(&self) -> Option<&str> {\n\n self.operate_option_meta_value(\"interval\")\n\n }\n\n\n\n /// Output Size of intraday which can be either Full or compact\n\n ///\n\n /// ```\n\n /// #[tokio::main]\n\n /// async fn main() {\n\n /// let api = alpha_vantage::set_api(\"demo\", reqwest::Client::new());\n\n /// let stock_time = api\n\n /// .stock_time(alpha_vantage::stock_time::StockFunction::IntraDay, \"MSFT\")\n", "file_path": "src/stock_time.rs", "rank": 64, "score": 25165.593018317315 }, { "content": " /// returns daily time series (date, daily open, daily high, daily low,\n\n /// daily close, daily volume, daily adjusted close, and split/dividend\n\n /// events) of the global equity specified, covering 20+ years of historical\n\n /// data.\n\n DailyAdjusted,\n\n /// returns weekly time series (last trading day of each week, weekly open,\n\n /// weekly high, weekly low, weekly close, weekly volume) of the global\n\n /// equity specified, covering 20+ years of historical data.\n\n Weekly,\n\n /// returns weekly adjusted time series (last trading day of each week,\n\n /// weekly open, weekly high, weekly low, weekly close, weekly adjusted\n\n /// close, weekly volume, weekly dividend) of the global equity specified,\n\n /// covering 20+ years of historical data.\n\n WeeklyAdjusted,\n\n /// returns monthly time series (last trading day of each month, monthly\n\n /// open, monthly high, monthly low, monthly close, monthly volume) of\n\n /// the global equity specified, covering 20+ years of historical data.\n\n Monthly,\n\n /// returns monthly adjusted time series (last trading day of each month,\n\n /// monthly open, monthly high, monthly low, monthly close, monthly adjusted\n\n /// close, monthly volume, monthly dividend) of the equity specified,\n\n /// covering 20+ years of historical data.\n\n MonthlyAdjusted,\n\n}\n", "file_path": "src/stock_time.rs", "rank": 65, "score": 25164.51313972106 }, { "content": " OutputSize::Full => \"full\",\n\n OutputSize::Compact => \"compact\",\n\n };\n\n url.push_str(&format!(\"&outputsize={}\", size));\n\n }\n\n\n\n if let Some(adjusted) = self.adjusted {\n\n if adjusted {\n\n url.push_str(\"&adjusted=true\");\n\n } else {\n\n url.push_str(\"&adjusted=false\");\n\n }\n\n };\n\n\n\n url\n\n }\n\n\n\n /// Returns JSON data struct\n\n ///\n\n /// # Errors\n", "file_path": "src/stock_time.rs", "rank": 66, "score": 25164.29111683364 }, { "content": " time_list.sort();\n\n time_list.reverse();\n\n let time_list_count: usize = time_list.len();\n\n let mut full_list = Self::new();\n\n for i in 0..n {\n\n let time = time_list.get(i);\n\n if let Some(time) = time {\n\n let entry = self\n\n .find(time)\n\n .unwrap_or_else(|| panic!(\"Failed to find time value for index {}\", i));\n\n full_list.push(entry);\n\n } else {\n\n return Err(Error::DesiredNumberOfEntryNotPresent(time_list_count));\n\n }\n\n }\n\n Ok(full_list)\n\n }\n\n}\n\n\n", "file_path": "src/stock_time.rs", "rank": 67, "score": 25163.35826272773 }, { "content": " pub fn adjusted(&self) -> Option<f64> {\n\n self.adjusted_close\n\n }\n\n\n\n /// Return volume\n\n #[must_use]\n\n pub fn volume(&self) -> u64 {\n\n self.volume\n\n }\n\n\n\n /// Return dividend\n\n #[must_use]\n\n pub fn dividend(&self) -> Option<f64> {\n\n self.dividend_amount\n\n }\n\n\n\n /// Return split dividend\n\n #[must_use]\n\n pub fn split(&self) -> Option<f64> {\n\n self.split_coefficient\n", "file_path": "src/stock_time.rs", "rank": 68, "score": 25159.275167156622 }, { "content": "\n\n /// Return a entry\n\n #[must_use]\n\n pub fn entry(&self) -> &Vec<Entry> {\n\n &self.entry\n\n }\n\n\n\n /// Return meta string\n\n fn return_meta_string(&self, which_val: &str) -> &str {\n\n match which_val {\n\n \"information\" => &self.meta_data.information,\n\n \"digital code\" => &self.meta_data.digital_code,\n\n \"digital name\" => &self.meta_data.digital_name,\n\n \"market code\" => &self.meta_data.market_code,\n\n \"market name\" => &self.meta_data.market_name,\n\n \"time zone\" => &self.meta_data.time_zone,\n\n \"last refreshed\" => &self.meta_data.last_refreshed,\n\n _ => \"\",\n\n }\n\n }\n\n}\n\n\n\n/// Struct to help out for creation of struct Entry\n", "file_path": "src/crypto.rs", "rank": 69, "score": 43.74825090666109 }, { "content": " }\n\n\n\n /// Return last refreshed time\n\n #[must_use]\n\n pub fn last_refreshed(&self) -> &str {\n\n self.return_meta_data_val(\"last refreshed\")\n\n }\n\n\n\n /// Return vector of data\n\n #[must_use]\n\n pub fn data(&self) -> &Vec<Data> {\n\n &self.data\n\n }\n\n\n\n /// Return metadata value\n\n fn return_meta_data_val(&self, name: &str) -> &str {\n\n match name {\n\n \"information\" => &self.meta_data.information,\n\n \"last refreshed\" => &self.meta_data.last_refreshed,\n\n _ => \"\",\n", "file_path": "src/sector.rs", "rank": 70, "score": 42.970843722105165 }, { "content": " #[serde(rename = \"Information\")]\n\n information: Option<String>,\n\n #[serde(rename = \"Error Message\")]\n\n error_message: Option<String>,\n\n #[serde(rename = \"Note\")]\n\n note: Option<String>,\n\n #[serde(rename = \"Meta Data\")]\n\n meta_data: Option<MetaData>,\n\n #[serde(flatten)]\n\n entry: Option<HashMap<String, HashMap<String, EntryHelper>>>,\n\n}\n\n\n\nimpl CryptoHelper {\n\n /// Function which convert `CryptoHelper` to `Crypto`\n\n pub(crate) fn convert(self) -> Result<Crypto> {\n\n let mut crypto = Crypto::default();\n\n detect_common_helper_error(self.information, self.error_message, self.note)?;\n\n if self.meta_data.is_none() || self.entry.is_none() {\n\n return Err(Error::EmptyResponse);\n\n }\n", "file_path": "src/crypto.rs", "rank": 71, "score": 42.64319298264396 }, { "content": " }\n\n}\n\n\n\n/// Struct for indicator\n\n#[derive(Default, Debug)]\n\npub struct Indicator {\n\n metadata: HashMap<String, Value>,\n\n data: DataType,\n\n}\n\n\n\nimpl Indicator {\n\n /// Return meta data in hash form with key as `String` and values as\n\n /// `serde_json::value::Value`\n\n #[must_use]\n\n pub fn meta_data(&self) -> &HashMap<String, Value> {\n\n &self.metadata\n\n }\n\n\n\n /// Return data as a vector\n\n #[must_use]\n", "file_path": "src/technical_indicator.rs", "rank": 72, "score": 42.02843642466589 }, { "content": " pub fn result(&self) -> &Vec<DataValue> {\n\n &self.matches\n\n }\n\n}\n\n\n\n/// struct for helping creation of search struct\n\n#[derive(Debug, Deserialize)]\n\npub(crate) struct SearchHelper {\n\n #[serde(rename = \"Information\")]\n\n information: Option<String>,\n\n #[serde(rename = \"Note\")]\n\n note: Option<String>,\n\n #[serde(rename = \"bestMatches\")]\n\n matches: Option<Vec<DataValue>>,\n\n}\n\n\n\nimpl SearchHelper {\n\n pub(crate) fn convert(self) -> Result<Search> {\n\n let mut search = Search::default();\n\n detect_common_helper_error(self.information, None, self.note)?;\n", "file_path": "src/search.rs", "rank": 73, "score": 39.33870272499942 }, { "content": " }\n\n }\n\n vector\n\n }\n\n}\n\n\n\n/// Struct for helping indicator struct\n\n#[derive(Deserialize)]\n\npub(crate) struct IndicatorHelper {\n\n #[serde(rename = \"Error Message\")]\n\n error_message: Option<String>,\n\n #[serde(rename = \"Information\")]\n\n information: Option<String>,\n\n #[serde(rename = \"Note\")]\n\n note: Option<String>,\n\n #[serde(rename = \"Meta Data\")]\n\n metadata: Option<HashMap<String, Value>>,\n\n #[serde(flatten)]\n\n data: Option<DataType>,\n\n}\n", "file_path": "src/technical_indicator.rs", "rank": 74, "score": 39.16026223557001 }, { "content": " note: Option<String>,\n\n #[serde(flatten)]\n\n extras: HashMap<String, Value>,\n\n}\n\n\n\nimpl CustomHelper {\n\n pub(crate) fn convert<T>(self) -> Result<T>\n\n where\n\n T: DeserializeOwned,\n\n {\n\n detect_common_helper_error(self.information, self.error_message, self.note)?;\n\n let data = self.extras;\n\n T::deserialize(MapDeserializer::new(data.into_iter()))\n\n .map_err(|_| Error::DecodeJsonToStruct)\n\n }\n\n}\n\n\n\n/// Builder to create new Custom Struct\n\npub struct CustomBuilder<'a> {\n\n api_client: &'a ApiClient,\n", "file_path": "src/custom.rs", "rank": 75, "score": 37.9204328108444 }, { "content": "use std::collections::HashMap;\n\n\n\nuse serde::{\n\n de::{value::MapDeserializer, DeserializeOwned},\n\n Deserialize,\n\n};\n\nuse serde_json::Value;\n\n\n\nuse crate::{\n\n api::ApiClient,\n\n error::{detect_common_helper_error, Error, Result},\n\n};\n\n/// struct used for helping creation of custom url\n\n#[derive(Debug, Deserialize)]\n\npub(crate) struct CustomHelper {\n\n #[serde(rename = \"Error Message\")]\n\n error_message: Option<String>,\n\n #[serde(rename = \"Information\")]\n\n information: Option<String>,\n\n #[serde(rename = \"Note\")]\n", "file_path": "src/custom.rs", "rank": 76, "score": 37.379416157274164 }, { "content": " }\n\n }\n\n}\n\n\n\n/// struct for helping out sector\n\n#[derive(Deserialize)]\n\npub(crate) struct SectorHelper {\n\n #[serde(rename = \"Information\")]\n\n information: Option<String>,\n\n #[serde(rename = \"Error Message\")]\n\n error_message: Option<String>,\n\n #[serde(rename = \"Note\")]\n\n note: Option<String>,\n\n #[serde(rename = \"Meta Data\")]\n\n meta_data: Option<MetaData>,\n\n #[serde(flatten)]\n\n data: Option<HashMap<String, HashMap<String, String>>>,\n\n}\n\n\n\nimpl SectorHelper {\n", "file_path": "src/sector.rs", "rank": 77, "score": 37.087500896610265 }, { "content": "\n\nimpl IndicatorHelper {\n\n pub(crate) fn convert(self) -> Result<Indicator> {\n\n let mut indicator = Indicator::default();\n\n detect_common_helper_error(self.information, self.error_message, self.note)?;\n\n if self.metadata.is_none() || self.data.is_none() {\n\n return Err(Error::EmptyResponse);\n\n }\n\n indicator.metadata = self.metadata.unwrap();\n\n indicator.data = self.data.unwrap();\n\n Ok(indicator)\n\n }\n\n}\n\n\n\n/// Builder to help create `Indicator`\n\npub struct IndicatorBuilder<'a> {\n\n api_client: &'a ApiClient,\n\n function: &'a str,\n\n symbol: &'a str,\n\n interval: TechnicalIndicatorInterval,\n", "file_path": "src/technical_indicator.rs", "rank": 78, "score": 36.94422574955773 }, { "content": " time: String,\n\n market_open: f64,\n\n usd_open: f64,\n\n market_high: f64,\n\n usd_high: f64,\n\n market_low: f64,\n\n usd_low: f64,\n\n market_close: f64,\n\n usd_close: f64,\n\n volume: f64,\n\n market_cap: f64,\n\n}\n\n\n\nimpl Entry {\n\n /// Return time\n\n #[must_use]\n\n pub fn time(&self) -> &str {\n\n &self.time\n\n }\n\n\n", "file_path": "src/crypto.rs", "rank": 79, "score": 36.36469898369527 }, { "content": "\n\nimpl Exchange {\n\n /// Get Rate for exchange\n\n #[must_use]\n\n pub fn rate(&self) -> f64 {\n\n self.real_time.rate\n\n }\n\n\n\n /// Get time when exchange rate was last refreshed along with time zone.\n\n #[must_use]\n\n pub fn refreshed_time(&self) -> &str {\n\n &self.real_time.last_refreshed\n\n }\n\n\n\n /// Return time zone of all data time\n\n #[must_use]\n\n pub fn time_zone(&self) -> &str {\n\n &self.real_time.time_zone\n\n }\n\n\n", "file_path": "src/exchange.rs", "rank": 80, "score": 35.22450951877281 }, { "content": " /// let market_name = crypto.market_name();\n\n /// assert_eq!(market_name, \"Chinese Yuan\");\n\n /// }\n\n /// ```\n\n #[must_use]\n\n pub fn market_name(&self) -> &str {\n\n self.return_meta_string(\"market name\")\n\n }\n\n\n\n /// Return last refreshed time\n\n #[must_use]\n\n pub fn last_refreshed(&self) -> &str {\n\n self.return_meta_string(\"last refreshed\")\n\n }\n\n\n\n /// Return time zone of all data time\n\n #[must_use]\n\n pub fn time_zone(&self) -> &str {\n\n self.return_meta_string(\"time zone\")\n\n }\n", "file_path": "src/crypto.rs", "rank": 81, "score": 35.05231540969529 }, { "content": "\n\n/// Struct for helping creation of Quote\n\n#[derive(Debug, Deserialize)]\n\npub(crate) struct QuoteHelper {\n\n #[serde(rename = \"Error Message\")]\n\n error_message: Option<String>,\n\n #[serde(rename = \"Information\")]\n\n information: Option<String>,\n\n #[serde(rename = \"Note\")]\n\n note: Option<String>,\n\n #[serde(rename = \"Global Quote\")]\n\n global_quote: Option<GlobalQuote>,\n\n}\n\n\n\nimpl QuoteHelper {\n\n pub(crate) fn convert(self) -> Result<Quote> {\n\n let mut quote = Quote::default();\n\n detect_common_helper_error(self.information, self.error_message, self.note)?;\n\n if self.global_quote.is_none() {\n\n return Err(Error::EmptyResponse);\n", "file_path": "src/quote.rs", "rank": 82, "score": 34.53743618878828 }, { "content": " pub fn data(&self) -> Vec<DataCollector> {\n\n let mut vector = Vec::new();\n\n for hash in self.data.values() {\n\n for time in hash.keys() {\n\n let mut data_collector = DataCollector {\n\n time: time.to_string(),\n\n ..DataCollector::default()\n\n };\n\n let hash_values = hash\n\n .get(time)\n\n .expect(\"cannot get time key value from hash map\")\n\n .clone();\n\n for (key, value) in &hash_values {\n\n let value_f64 = value\n\n .trim()\n\n .parse::<f64>()\n\n .expect(\"Cannot convert string to f64\");\n\n data_collector.values.insert(key.to_string(), value_f64);\n\n }\n\n vector.push(data_collector);\n", "file_path": "src/technical_indicator.rs", "rank": 83, "score": 33.128134078806454 }, { "content": " crypto.meta_data = self.meta_data.unwrap();\n\n let mut vec_entry = Vec::new();\n\n for value in self.entry.unwrap().values() {\n\n for key in value.keys() {\n\n let mut entry = Entry {\n\n time: key.to_string(),\n\n ..Entry::default()\n\n };\n\n let entry_helper = value\n\n .get(key)\n\n .expect(\"failed to get key from hashmap\")\n\n .clone();\n\n entry.usd_open = entry_helper.open_usd;\n\n entry.usd_high = entry_helper.high_usd;\n\n entry.usd_low = entry_helper.low_usd;\n\n entry.usd_close = entry_helper.close_usd;\n\n entry.market_cap = entry_helper.market_cap;\n\n entry.volume = entry_helper.volume;\n\n for key in entry_helper.market_data.keys() {\n\n let value = &entry_helper.market_data[key];\n", "file_path": "src/crypto.rs", "rank": 84, "score": 32.884134784984795 }, { "content": "//! Module for return earning per share for a company\n\n//!\n\n//! This API returns the annual and quarterly earnings (EPS) for the company of\n\n//! interest. Quarterly data also includes analyst estimates and surprise\n\n//! metrics.\n\n\n\nuse serde::Deserialize;\n\n\n\nuse crate::{\n\n api::ApiClient,\n\n deserialize::{from_none_str, from_str},\n\n error::{detect_common_helper_error, Error, Result},\n\n};\n\n\n\n/// Struct to store information of annual earning\n\n#[derive(Debug, Deserialize, Clone, Default)]\n\npub struct Annual {\n\n #[serde(rename = \"fiscalDateEnding\")]\n\n fiscal_date_ending: String,\n\n #[serde(rename = \"reportedEPS\", deserialize_with = \"from_str\")]\n", "file_path": "src/earning.rs", "rank": 85, "score": 32.299160021800446 }, { "content": "//! Module for crypto real time data\n\n//!\n\n//! APIs under this section provide a wide range of data feed for digital and\n\n//! crypto currencies such as Bitcoin.\n\n//!\n\n//! You can read about [Cryptocurrency][crypto_currency] API and what it returns\n\n//! on alphavantage documentation\n\n//!\n\n//! [crypto_currency]: https://www.alphavantage.co/documentation/#digital-currency\n\n\n\nuse std::{collections::HashMap, str::FromStr};\n\n\n\nuse serde::Deserialize;\n\n\n\nuse crate::{\n\n api::ApiClient,\n\n deserialize::from_str,\n\n error::{detect_common_helper_error, Error, Result},\n\n};\n\n\n\n/// Store Meta Data Information\n\n#[derive(Deserialize, Clone, Default)]\n", "file_path": "src/crypto.rs", "rank": 86, "score": 32.29823909736248 }, { "content": " /// Return Annual earning list for symbol\n\n #[must_use]\n\n pub fn annual_earning(&self) -> &Vec<Annual> {\n\n &self.annual_earning\n\n }\n\n\n\n /// Return quarterly earning for symbol\n\n #[must_use]\n\n pub fn quarterly_earning(&self) -> &Vec<Quarterly> {\n\n &self.quarterly_earning\n\n }\n\n}\n\n\n\n/// Struct used for creating earning\n\n#[derive(Debug, Deserialize)]\n\npub(crate) struct EarningHelper {\n\n #[serde(rename = \"Information\")]\n\n information: Option<String>,\n\n #[serde(rename = \"Error Message\")]\n\n error_message: Option<String>,\n", "file_path": "src/earning.rs", "rank": 87, "score": 31.82560149669646 }, { "content": " /// Convert `SectorHelper` to `Sector`\n\n pub(crate) fn convert(self) -> Result<Sector> {\n\n let mut sector = Sector::default();\n\n detect_common_helper_error(self.information, self.error_message, self.note)?;\n\n if self.meta_data.is_none() || self.data.is_none() {\n\n return Err(Error::EmptyResponse);\n\n }\n\n sector.meta_data = self.meta_data.unwrap();\n\n let mut final_data = Vec::new();\n\n for (key, val) in &self.data.unwrap() {\n\n let mut data = Data::default();\n\n match key.as_str() {\n\n \"Rank A: Real-Time Performance\" => data.rank = \"real-time\".to_string(),\n\n \"Rank B: 1 Day Performance\" => data.rank = \"1-day\".to_string(),\n\n \"Rank C: 5 Day Performance\" => data.rank = \"5-day\".to_string(),\n\n \"Rank D: 1 Month Performance\" => data.rank = \"1-month\".to_string(),\n\n \"Rank E: 3 Month Performance\" => data.rank = \"3-month\".to_string(),\n\n \"Rank F: Year-to-Date (YTD) Performance\" => data.rank = \"year-to-date\".to_string(),\n\n \"Rank G: 1 Year Performance\" => data.rank = \"1-year\".to_string(),\n\n \"Rank H: 3 Year Performance\" => data.rank = \"3-year\".to_string(),\n", "file_path": "src/sector.rs", "rank": 88, "score": 31.19864507776787 }, { "content": "pub struct DataValue {\n\n #[serde(rename = \"1. symbol\")]\n\n symbol: String,\n\n #[serde(rename = \"2. name\")]\n\n name: String,\n\n #[serde(rename = \"3. type\")]\n\n stock_type: String,\n\n #[serde(rename = \"4. region\")]\n\n region: String,\n\n #[serde(rename = \"5. marketOpen\")]\n\n market_open: String,\n\n #[serde(rename = \"6. marketClose\")]\n\n market_close: String,\n\n #[serde(rename = \"7. timezone\")]\n\n time_zone: String,\n\n #[serde(rename = \"8. currency\")]\n\n currency: String,\n\n #[serde(rename = \"9. matchScore\", deserialize_with = \"from_str\")]\n\n match_score: f64,\n\n}\n", "file_path": "src/search.rs", "rank": 89, "score": 31.13397358094435 }, { "content": "//! Module for returning latest price and volume information\n\n//!\n\n//! A lightweight alternative to the time series APIs, this service returns the\n\n//! latest price and volume information for a security of your choice.\n\n//!\n\n//! You can read about [Quote][quote] API and what it returns\n\n//! on alphavantage documentation\n\n//!\n\n//! [quote]: https://www.alphavantage.co/documentation/#latestprice\n\n\n\nuse serde::Deserialize;\n\n\n\nuse crate::{\n\n api::ApiClient,\n\n deserialize::{from_str, percent_f64},\n\n error::{detect_common_helper_error, Error, Result},\n\n};\n\n\n\n/// Struct storing Global Quote Value\n\n#[derive(Debug, Deserialize, Clone, Default)]\n", "file_path": "src/quote.rs", "rank": 90, "score": 31.10322168123458 }, { "content": " time_period: Option<u64>,\n\n series_type: Option<&'a str>,\n\n extra_params: HashMap<String, String>,\n\n}\n\n\n\nimpl<'a> IndicatorBuilder<'a> {\n\n /// Create new `IndicatorBuilder` form `APIClient`\n\n #[must_use]\n\n pub fn new(\n\n api_client: &'a ApiClient,\n\n function: &'a str,\n\n symbol: &'a str,\n\n interval: TechnicalIndicatorInterval,\n\n ) -> Self {\n\n Self {\n\n api_client,\n\n function,\n\n symbol,\n\n interval,\n\n time_period: None,\n", "file_path": "src/technical_indicator.rs", "rank": 91, "score": 30.883083974442393 }, { "content": " #[serde(rename = \"Error Message\")]\n\n error_message: Option<String>,\n\n #[serde(rename = \"Information\")]\n\n information: Option<String>,\n\n #[serde(rename = \"Note\")]\n\n note: Option<String>,\n\n #[serde(rename = \"Realtime Currency Exchange Rate\")]\n\n real_time: Option<RealtimeExchangeRate>,\n\n}\n\n\n\nimpl ExchangeHelper {\n\n pub(crate) fn convert(self) -> Result<Exchange> {\n\n let mut exchange = Exchange::default();\n\n detect_common_helper_error(self.information, self.error_message, self.note)?;\n\n if self.real_time.is_none() {\n\n return Err(Error::EmptyResponse);\n\n }\n\n exchange.real_time = self.real_time.unwrap();\n\n Ok(exchange)\n\n }\n", "file_path": "src/exchange.rs", "rank": 92, "score": 30.63024272281464 }, { "content": "//! Module for searching specific symbol or companies\n\n//!\n\n//! Looking for some specific symbols or companies? Trying to build a search box\n\n//! similar to the one below?\n\n//!\n\n//! You can read about [Symbol][symbol_search] API and what it returns\n\n//! on alphavantage documentation\n\n//!\n\n//! [symbol_search]: https://www.alphavantage.co/documentation/#symbolsearch\n\n\n\nuse serde::Deserialize;\n\n\n\nuse crate::{\n\n api::ApiClient,\n\n deserialize::from_str,\n\n error::{detect_common_helper_error, Error, Result},\n\n};\n\n\n\n/// Struct which stores matches data for search keyword\n\n#[derive(Debug, Clone, Deserialize, Default)]\n", "file_path": "src/search.rs", "rank": 93, "score": 30.582148152456348 }, { "content": " pub fn name_to(&self) -> &str {\n\n &self.real_time.to_name\n\n }\n\n\n\n /// get bid price. Returns None if no bid price\n\n #[must_use]\n\n pub fn bid_price(&self) -> Option<f64> {\n\n self.real_time.bid_price.trim().parse::<f64>().ok()\n\n }\n\n\n\n /// get ask price. Return None if no ask price\n\n #[must_use]\n\n pub fn ask_price(&self) -> Option<f64> {\n\n self.real_time.ask_price.trim().parse::<f64>().ok()\n\n }\n\n}\n\n\n\n/// Struct used for helping creation of Exchange\n\n#[derive(Debug, Deserialize)]\n\npub(crate) struct ExchangeHelper {\n", "file_path": "src/exchange.rs", "rank": 94, "score": 30.420945568990312 }, { "content": " change_percent: f64,\n\n}\n\n\n\n/// Struct for storing Quote related information\n\n#[derive(Default)]\n\npub struct Quote {\n\n global_quote: GlobalQuote,\n\n}\n\n\n\nimpl Quote {\n\n /// return open value\n\n #[must_use]\n\n pub fn open(&self) -> f64 {\n\n self.global_quote.open\n\n }\n\n\n\n /// return high value\n\n #[must_use]\n\n pub fn high(&self) -> f64 {\n\n self.global_quote.high\n", "file_path": "src/quote.rs", "rank": 95, "score": 29.873180828428854 }, { "content": " /// Return value of surprise. return None if api return none\n\n #[must_use]\n\n pub fn surprise(&self) -> Option<f64> {\n\n self.surprise\n\n }\n\n\n\n /// Return surprise percentage for symbol quarterly earning. Return None if\n\n /// api return None\n\n #[must_use]\n\n pub fn surprise_percentage(&self) -> Option<f64> {\n\n self.surprise_percentage\n\n }\n\n}\n\n\n\n/// Struct to store earning for symbol\n\n#[derive(Debug, Default)]\n\npub struct Earning {\n\n symbol: String,\n\n annual_earning: Vec<Annual>,\n\n quarterly_earning: Vec<Quarterly>,\n", "file_path": "src/earning.rs", "rank": 96, "score": 29.525131909268758 }, { "content": " reported_eps: f64,\n\n}\n\n\n\nimpl Annual {\n\n /// Return annual earning fiscal date ending\n\n #[must_use]\n\n pub fn fiscal_date_ending(&self) -> &str {\n\n &self.fiscal_date_ending\n\n }\n\n\n\n /// Return reported eps for annual earning\n\n #[must_use]\n\n pub fn reported_eps(&self) -> f64 {\n\n self.reported_eps\n\n }\n\n}\n\n\n\n/// Struct to store information of quarterly earning\n\n#[derive(Debug, Deserialize, Clone, Default)]\n\npub struct Quarterly {\n", "file_path": "src/earning.rs", "rank": 97, "score": 29.35593476498269 }, { "content": "//! Module for exchange currency (both digital & physical currency exchange)\n\n//!\n\n//! This API returns the realtime exchange rate for any pair of digital\n\n//! currency (e.g., Bitcoin) or physical currency (e.g., USD).\n\n//!\n\n//! You can read about [Exchange][exchange] API and what it returns\n\n//! on alphavantage documentation\n\n//!\n\n//! [exchange]: https://www.alphavantage.co/documentation/#currency-exchnage\n\n\n\nuse serde::Deserialize;\n\n\n\nuse crate::{\n\n api::ApiClient,\n\n deserialize::from_str,\n\n error::{detect_common_helper_error, Error, Result},\n\n};\n\n\n\n/// Struct used for exchanging currency\n\n#[derive(Default)]\n\npub struct Exchange {\n\n real_time: RealtimeExchangeRate,\n\n}\n\n\n\n/// Struct Storing Real time Exchange Value\n\n#[derive(Debug, Deserialize, Clone, Default)]\n", "file_path": "src/exchange.rs", "rank": 98, "score": 28.57061882194902 }, { "content": " pub fn usd_high(&self) -> f64 {\n\n self.usd_high\n\n }\n\n\n\n /// Return market low value\n\n #[must_use]\n\n pub fn market_low(&self) -> f64 {\n\n self.market_low\n\n }\n\n\n\n /// Return usd low value\n\n #[must_use]\n\n pub fn usd_low(&self) -> f64 {\n\n self.usd_low\n\n }\n\n\n\n /// Return market close value\n\n #[must_use]\n\n pub fn market_close(&self) -> f64 {\n\n self.market_close\n", "file_path": "src/crypto.rs", "rank": 99, "score": 28.330798406153068 } ]
Rust
src/connectivity/wlan/lib/common/rust/src/test_utils/fake_stas.rs
DamieFC/fuchsia
f78a4a1326f4a4bb5834500918756173c01bab4f
use { crate::{ ie::{self, IeType}, mac, test_utils::fake_frames::{ fake_eap_rsne, fake_wpa1_ie, fake_wpa2_enterprise_rsne, fake_wpa2_legacy_rsne, fake_wpa2_mixed_rsne, fake_wpa2_rsne, fake_wpa2_wpa3_rsne, fake_wpa3_enterprise_192_bit_rsne, fake_wpa3_rsne, fake_wpa3_transition_rsne, }, }, anyhow::Context, fidl_fuchsia_wlan_common as fidl_common, fidl_fuchsia_wlan_internal as fidl_internal, }; #[rustfmt::skip] const DEFAULT_MOCK_IES: &'static [u8] = &[ 0x03, 0x01, 0x8c, 0x05, 0x04, 0x00, 0x01, 0x00, 0x02, 0x07, 0x10, 0x55, 0x53, 0x20, 0x24, 0x04, 0x24, 0x34, 0x04, 0x1e, 0x64, 0x0c, 0x1e, 0x95, 0x05, 0x24, 0x00, 0x20, 0x01, 0x00, 0x23, 0x02, 0x09, 0x00, 0x2d, 0x1a, 0xef, 0x09, 0x17, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3d, 0x16, 0x8c, 0x0d, 0x16, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x7f, 0x08, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x40, 0xbf, 0x0c, 0x91, 0x59, 0x82, 0x0f, 0xea, 0xff, 0x00, 0x00, 0xea, 0xff, 0x00, 0x00, 0xc0, 0x05, 0x00, 0x00, 0x00, 0x00, 0x00, 0xc3, 0x03, 0x01, 0x24, 0x24, 0xdd, 0x07, 0x00, 0x0b, 0x86, 0x01, 0x04, 0x08, 0x09, 0xdd, 0x18, 0x00, 0x50, 0xf2, 0x02, 0x01, 0x01, 0x80, 0x00, 0x03, 0xa4, 0x00, 0x00, 0x27, 0xa4, 0x00, 0x00, 0x42, 0x43, 0x5e, 0x00, 0x62, 0x32, 0x2f, 0x00, ]; pub struct BssCreator { pub bssid: [u8; 6], pub bss_type: fidl_internal::BssTypes, pub beacon_period: u16, pub timestamp: u64, pub local_time: u64, pub cap: u16, pub chan: fidl_fuchsia_wlan_common::WlanChan, pub rssi_dbm: i8, pub snr_db: i8, pub protection_cfg: FakeProtectionCfg, pub ssid: Vec<u8>, pub rates: Vec<u8>, pub ies_overrides: IesOverrides, } impl BssCreator { pub fn create_bss(self) -> Result<fidl_internal::BssDescription, anyhow::Error> { let mut ies_updater = ie::IesUpdater::new(DEFAULT_MOCK_IES.to_vec()); ies_updater.set(IeType::SSID, &self.ssid[..]).context("set SSID")?; let rates_writer = ie::RatesWriter::try_new(&self.rates[..]).context("set rates")?; let mut rates_buf = vec![]; rates_writer.write_supported_rates(&mut rates_buf); ies_updater.set_raw(&rates_buf[..]).context("set rates")?; let mut ext_rates_buf = vec![]; rates_writer.write_ext_supported_rates(&mut ext_rates_buf); ies_updater.set_raw(&ext_rates_buf[..]).context("set extended rates")?; if let Some(rsne) = derive_rsne(self.protection_cfg) { ies_updater.set_raw(&rsne[..]).context("set RSNE")?; } if let Some(wpa1_vendor_ie) = derive_wpa1_vendor_ies(self.protection_cfg) { ies_updater.set_raw(&wpa1_vendor_ie[..]).context("set WPA1 vendor IE")?; } for ovr in self.ies_overrides.overrides { match ovr { IeOverride::Remove(ie_type) => ies_updater.remove(&ie_type), IeOverride::Set(ie_type, bytes) => { ies_updater .set(ie_type, &bytes[..]) .with_context(|| format!("set IE type: {:?}", ie_type))?; } } } Ok(fidl_internal::BssDescription { bssid: self.bssid, bss_type: self.bss_type, beacon_period: self.beacon_period, timestamp: self.timestamp, local_time: self.local_time, cap: self.cap, chan: self.chan, rssi_dbm: self.rssi_dbm, snr_db: self.snr_db, ies: ies_updater.finalize(), }) } } pub struct IesOverrides { overrides: Vec<IeOverride>, } impl IesOverrides { pub fn new() -> Self { Self { overrides: vec![] } } pub fn remove(mut self, ie_type: IeType) -> Self { self.overrides.push(IeOverride::Remove(ie_type)); self } pub fn set(mut self, ie_type: IeType, bytes: Vec<u8>) -> Self { self.overrides.push(IeOverride::Set(ie_type, bytes)); self } } enum IeOverride { Remove(IeType), Set(IeType, Vec<u8>), } #[derive(Debug, Copy, Clone, PartialEq)] pub enum FakeProtectionCfg { Open, Wep, Wpa1, Wpa1Enhanced, Wpa2Legacy, Wpa1Wpa2, Wpa2Mixed, Wpa2Enterprise, Wpa2, Wpa2Wpa3, Wpa3Transition, Wpa3, Wpa3Enterprise, Wpa2NoPrivacy, Eap, } pub fn build_fake_bss_creator__(protection_cfg: FakeProtectionCfg) -> BssCreator { BssCreator { bssid: [7, 1, 2, 77, 53, 8], bss_type: fidl_internal::BssTypes::Infrastructure, beacon_period: 100, timestamp: 0, local_time: 0, chan: fidl_common::WlanChan { primary: 3, secondary80: 0, cbw: fidl_common::Cbw::Cbw40 }, rssi_dbm: 0, snr_db: 0, cap: mac::CapabilityInfo(0) .with_privacy(match protection_cfg { FakeProtectionCfg::Open | FakeProtectionCfg::Wpa2NoPrivacy => false, _ => true, }) .0, protection_cfg, ssid: b"fake-ssid".to_vec(), rates: vec![0x82, 0x84, 0x8b, 0x96, 0x0c, 0x12, 0x18, 0x24, 0x30, 0x48, 0x60, 0x6c], ies_overrides: IesOverrides::new(), } } fn derive_rsne(protection_cfg: FakeProtectionCfg) -> Option<Vec<u8>> { match protection_cfg { FakeProtectionCfg::Wpa3Enterprise => Some(fake_wpa3_enterprise_192_bit_rsne()), FakeProtectionCfg::Wpa2Enterprise => Some(fake_wpa2_enterprise_rsne()), FakeProtectionCfg::Wpa3 => Some(fake_wpa3_rsne()), FakeProtectionCfg::Wpa3Transition => Some(fake_wpa3_transition_rsne()), FakeProtectionCfg::Wpa2Wpa3 => Some(fake_wpa2_wpa3_rsne()), FakeProtectionCfg::Wpa2Mixed => Some(fake_wpa2_mixed_rsne()), FakeProtectionCfg::Wpa2Legacy => Some(fake_wpa2_legacy_rsne()), FakeProtectionCfg::Wpa1Wpa2 | FakeProtectionCfg::Wpa2 | FakeProtectionCfg::Wpa2NoPrivacy => Some(fake_wpa2_rsne()), FakeProtectionCfg::Eap => Some(fake_eap_rsne()), _ => None, } } fn derive_wpa1_vendor_ies(protection_cfg: FakeProtectionCfg) -> Option<Vec<u8>> { match protection_cfg { FakeProtectionCfg::Wpa1 | FakeProtectionCfg::Wpa1Wpa2 => Some(fake_wpa1_ie(false)), FakeProtectionCfg::Wpa1Enhanced => Some(fake_wpa1_ie(true)), _ => None, } } #[macro_export] macro_rules! fake_fidl_bss { ($protection_type:ident $(, $bss_key:ident: $bss_value:expr)* $(,)?) => {{ let bss_creator = $crate::test_utils::fake_stas::BssCreator { $( $bss_key: $bss_value, )* ..$crate::test_utils::fake_stas::build_fake_bss_creator__($crate::test_utils::fake_stas::FakeProtectionCfg::$protection_type) }; let fidl_bss = bss_creator.create_bss().expect("expect creating BSS to succeed"); fidl_bss }} } #[macro_export] macro_rules! fake_bss { ($protection_type:ident $(, $bss_key:ident: $bss_value:expr)* $(,)?) => {{ let fidl_bss = $crate::fake_fidl_bss!($protection_type $(, $bss_key: $bss_value)*); let bss = $crate::bss::BssDescription::from_fidl(fidl_bss) .expect("expect BSS conversion to succeed"); bss }} } #[cfg(tests)] mod tests { use super::*; #[test] fn test_fake_bss_macro_ies() { let bss = fake_bss!(Wpa1Wpa2, ssid: b"fuchsia".to_vec(), rates: vec![11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24], ies_overrides: IesOverrides::new() .remove(IeType::new_vendor([0x00, 0x0b, 0x86, 0x01, 0x04, 0x08])) .set(IeType::DSSS_PARAM_SET, &[136]), ); #[rustfmt::skip] let expected_ies = vec![ 0x00, 0x07, b'f', b'u', b'c', b'h', b's', b'i', b'a', 0x01, 0x08, 11, 12, 13, 14, 15, 16, 17, 18, 0x03, 0x01, 136, 0x05, 0x04, 0x00, 0x01, 0x00, 0x02, 0x07, 0x10, 0x55, 0x53, 0x20, 0x24, 0x04, 0x24, 0x34, 0x04, 0x1e, 0x64, 0x0c, 0x1e, 0x95, 0x05, 0x24, 0x00, 0x20, 0x01, 0x00, 0x23, 0x02, 0x09, 0x00, 0x2d, 0x1a, 0xef, 0x09, 0x17, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x30, 18, 1, 0, 0x00, 0x0F, 0xAC, 4, 1, 0, 0x00, 0x0F, 0xAC, 4, 1, 0, 0x00, 0x0F, 0xAC, 2, 0x32, 0x06, 19, 20, 21, 22, 23, 24, 0x3d, 0x16, 0x8c, 0x0d, 0x16, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x7f, 0x08, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x40, 0xbf, 0x0c, 0x91, 0x59, 0x82, 0x0f, 0xea, 0xff, 0x00, 0x00, 0xea, 0xff, 0x00, 0x00, 0xc0, 0x05, 0x00, 0x00, 0x00, 0x00, 0x00, 0xc3, 0x03, 0x01, 0x24, 0x24, 0xdd, 0x16, 0x00, 0x50, 0xf2, 0x01, 0x01, 0x00, 0x00, 0x50, 0xf2, 0x02, 0x01, 0x00, 0x00, 0x50, 0xf2, 0x02, 0x01, 0x00, 0x00, 0x50, 0xf2, 0x02, 0xdd, 0x18, 0x00, 0x50, 0xf2, 0x02, 0x01, 0x01, 0x80, 0x00, 0x03, 0xa4, 0x00, 0x00, 0x27, 0xa4, 0x00, 0x00, 0x42, 0x43, 0x5e, 0x00, 0x62, 0x32, 0x2f, 0x00, ]; assert_eq!(bss.ies, expected_ies); } }
use { crate::{ ie::{self, IeType}, mac, test_utils::fake_frames::{ fake_eap_rsne, fake_wpa1_ie, fake_wpa2_enterprise_rsne, fake_wpa2_legacy_rsne, fake_wpa2_mixed_rsne, fake_wpa2_rsne, fake_wpa2_wpa3_rsne, fake_wpa3_enterprise_192_bit_rsne, fake_wpa3_rsne, fake_wpa3_transition_rsne, }, }, anyhow::Context, fidl_fuchsia_wlan_common as fidl_common, fidl_fuchsia_wlan_internal as fidl_internal, }; #[rustfmt::skip] const DEFAULT_MOCK_IES: &'static [u8] = &[ 0x03, 0x01, 0x8c, 0x05, 0x04, 0x00, 0x01, 0x00, 0x02, 0x07, 0x10, 0x55, 0x53, 0x20, 0x24, 0x04, 0x24, 0x34, 0x04, 0x1e, 0x64, 0x0c, 0x1e, 0x95, 0x05, 0x24, 0x00, 0x20, 0x01, 0x00, 0x23, 0x02, 0x09, 0x00, 0x2d, 0x1a, 0xef, 0x09, 0x17, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x3d, 0x16, 0x8c, 0x0d, 0x16, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x7f, 0x08, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x40, 0xbf, 0x0c, 0x91, 0x59, 0x82, 0x0f, 0xea, 0xff, 0x00, 0x00, 0xea, 0xff, 0x00, 0x00, 0xc0, 0x05, 0x00, 0x00, 0x00, 0x00, 0x00, 0xc3, 0x03, 0x01, 0x24, 0x24, 0xdd, 0x07, 0x00, 0x0b, 0x86, 0x01, 0x04, 0x08, 0x09, 0xdd, 0x18, 0x00, 0x50, 0xf2, 0x02, 0x01, 0x01, 0x80, 0x00, 0x03, 0xa4, 0x00, 0x00, 0x27, 0xa4, 0x00, 0x00, 0x42, 0x43, 0x5e, 0x00, 0x62, 0x32, 0x2f, 0x00, ]; pub struct BssCreator { pub bssid: [u8; 6], pub bss_type: fidl_internal::BssTypes, pub beacon_period: u16, pub timestamp: u64, pub local_time: u64, pub cap: u16, pub chan: fidl_fuchsia_wlan_common::WlanChan, pub rssi_dbm: i8, pub snr_db: i8, pub protection_cfg: FakeProtectionCfg, pub ssid: Vec<u8>, pub rates: Vec<u8>, pub ies_overrides: IesOverrides, } impl BssCreator { pub fn create_bss(self) -> Result<fidl_internal::BssDescription, anyhow::Error> { let mut ies_updater = ie::IesUpdater::new(DEFAULT_MOCK_IES.to_vec()); ies_updater.set(IeType::SSID, &self.ssid[..]).context("set SSID")?; let rates_writer = ie::RatesWriter::try_new(&self.rates[..]).context("set rates")?; let mut rates_buf = vec![]; rates_writer.write_supported_rates(&mut rates_buf); ies_updater.set_raw(&rates_buf[..]).context("set rates")?; let mut ext_rates_buf = vec![]; rates_writer.write_ext_supported_rates(&mut ext_rates_buf); ies_updater.set_raw(&ext_rates_buf[..]).context("set extended rates")?; if let Some(rsne) = derive_rsne(self.protection_cfg) { ies_updater.set_raw(&rsne[..]).context("set RSNE")?; } if let Some(wpa1_vendor_ie) = derive_wpa1_vendor_ies(self.protection_cfg) { ies_updater.set_raw(&wpa1_vendor_ie[..]).context("set WPA1 vendor IE")?; } for ovr in self.ies_overrides.overrides { match ovr { IeOverride::Remove(ie_type) => ies_updater.remove(&ie_type), IeOverride::Set(ie_type, bytes) => { ies_updater .set(ie_type, &bytes[..]) .with_context(|| format!("set IE type: {:?}", ie_type))?; } } } Ok(fidl_internal::BssDescription { bssid: self.bssid, bss_type: self.bss_type, beacon_period: self.beacon_period, timestamp: self.timestamp, local_time: self.local_time, cap: self.cap, chan: self.chan, rssi_dbm: self.rssi_dbm, snr_db: self.snr_db, ies: ies_updater.finalize(), }) } } pub struct IesOverrides { overrides: Vec<IeOverride>, } impl IesOverrides { pub fn new() -> Self { Self { overrides: vec![] } } pub fn remove(mut self, ie_type: IeType) -> Self { self.overrides.push(IeOverride::Remove(ie_type)); self } pub fn set(mut self, ie_type: IeType, bytes: Vec<u8>) -> Self { self.overrides.push(IeOverride::Set(ie_type, bytes)); self } } enum IeOverride { Remove(IeType), Set(IeType, Vec<u8>), } #[derive(Debug, Copy, Clone, PartialEq)] pub enum FakeProtectionCfg { Open, Wep, Wpa1, Wpa1Enhanced, Wpa2Legacy, Wpa1Wpa2, Wpa2Mixed, Wpa2Enterprise, Wpa2, Wpa2Wpa3, Wpa3Transition, Wpa3, Wpa3Enterprise, Wpa2NoPrivacy, Eap, } pub fn build_fake_bss_creator__(protection_cfg: FakeProtectionCfg) -> BssCreator { BssCreator { bssid: [7, 1, 2, 77, 53, 8], bss_type: fidl_internal::BssTypes::Infrastructure, beacon_period: 100, timestamp: 0, local_time: 0, chan: fidl_common::WlanChan { primary: 3, secondary80: 0, cbw: fidl_common::Cbw::Cbw40 }, rssi_dbm: 0, snr_db: 0, cap: mac::CapabilityInfo(0) .with_privacy(match protection_cfg { FakeProtectionCfg::Open | FakeProtectionCfg::Wpa2NoPrivacy => false, _ => true, }) .0, protection_cfg, ssid: b"fake-ssid".to_vec(), rates: vec![0x82, 0x84, 0x8b, 0x96, 0x0c, 0x12, 0x18, 0x24, 0x30, 0x48, 0x60, 0x6c], ies_overrides: IesOverrides::new(), } } fn derive_rsne(protection_cfg: FakeProtectionCfg) -> Option<Vec<u8>> { match protection_cfg { FakeProtectionCfg::Wpa3Enterprise => Some(fake_wpa3_enterprise_192_bit_rsne()), FakeProtectionCfg::Wpa2Enterprise => Some(fake_wpa2_enterprise_rsne()), FakeProtectionCfg::Wpa3 => Some(fake_wpa3_rsne()), FakeProtectionCfg::Wpa3Transition => Some(fake_wpa3_transition_rsne()), FakeProtectionCfg::Wpa2Wpa3 => Some(fake_wpa2_wpa3_rsne()), FakeProtectionCfg::Wpa2Mixed => Some(fake_wpa2_mixed_rsne()), FakeProtectionCfg::Wpa2Legacy => Some(fake_wpa2_legacy_rsne()), FakeProtectionCfg::Wpa1Wpa2 | FakeProtectionCfg::Wpa2 | FakeProtectionCfg::Wpa2NoPrivacy => Some(fake_wpa2_rsne()), FakeProtectionCfg::Eap => Some(fake_eap_rsne()), _ => None, } } fn derive_wpa1_vendor_ies(protection_cfg: FakeProtectionCfg) -> Option<Vec<u8>> { match protection_cfg { FakeProtectionCfg::Wpa1 | FakeProtectionCfg::Wpa1Wpa2 => Some(fake_wpa1_ie(false)), FakeProtectionCfg::Wpa1Enhanced => Some(fake_wpa1_ie(true)), _ => None, } } #[macro_export] macro_rules! fake_fidl_bss { ($protection_type:ident $(, $bss_key:ident: $bss_value:expr)* $(,)?) => {{ let bss_creator = $crate::test_utils::fake_stas::BssCreator { $( $bss_key: $bss_value, )* ..$crate::test_utils::fake_stas::build_fake_bss_creator__($crate::test_utils::fake_stas::FakeProtectionCfg::$protection_type) }; let fidl_bss = bss_creator.create_bss().expect("expect creating BSS to succeed"); fidl_bss }} } #[macro_export] macro_rules! fake_bss { ($protection_type
0x16, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x7f, 0x08, 0x04, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x40, 0xbf, 0x0c, 0x91, 0x59, 0x82, 0x0f, 0xea, 0xff, 0x00, 0x00, 0xea, 0xff, 0x00, 0x00, 0xc0, 0x05, 0x00, 0x00, 0x00, 0x00, 0x00, 0xc3, 0x03, 0x01, 0x24, 0x24, 0xdd, 0x16, 0x00, 0x50, 0xf2, 0x01, 0x01, 0x00, 0x00, 0x50, 0xf2, 0x02, 0x01, 0x00, 0x00, 0x50, 0xf2, 0x02, 0x01, 0x00, 0x00, 0x50, 0xf2, 0x02, 0xdd, 0x18, 0x00, 0x50, 0xf2, 0x02, 0x01, 0x01, 0x80, 0x00, 0x03, 0xa4, 0x00, 0x00, 0x27, 0xa4, 0x00, 0x00, 0x42, 0x43, 0x5e, 0x00, 0x62, 0x32, 0x2f, 0x00, ]; assert_eq!(bss.ies, expected_ies); } }
:ident $(, $bss_key:ident: $bss_value:expr)* $(,)?) => {{ let fidl_bss = $crate::fake_fidl_bss!($protection_type $(, $bss_key: $bss_value)*); let bss = $crate::bss::BssDescription::from_fidl(fidl_bss) .expect("expect BSS conversion to succeed"); bss }} } #[cfg(tests)] mod tests { use super::*; #[test] fn test_fake_bss_macro_ies() { let bss = fake_bss!(Wpa1Wpa2, ssid: b"fuchsia".to_vec(), rates: vec![11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24], ies_overrides: IesOverrides::new() .remove(IeType::new_vendor([0x00, 0x0b, 0x86, 0x01, 0x04, 0x08])) .set(IeType::DSSS_PARAM_SET, &[136]), ); #[rustfmt::skip] let expected_ies = vec![ 0x00, 0x07, b'f', b'u', b'c', b'h', b's', b'i', b'a', 0x01, 0x08, 11, 12, 13, 14, 15, 16, 17, 18, 0x03, 0x01, 136, 0x05, 0x04, 0x00, 0x01, 0x00, 0x02, 0x07, 0x10, 0x55, 0x53, 0x20, 0x24, 0x04, 0x24, 0x34, 0x04, 0x1e, 0x64, 0x0c, 0x1e, 0x95, 0x05, 0x24, 0x00, 0x20, 0x01, 0x00, 0x23, 0x02, 0x09, 0x00, 0x2d, 0x1a, 0xef, 0x09, 0x17, 0xff, 0xff, 0xff, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x30, 18, 1, 0, 0x00, 0x0F, 0xAC, 4, 1, 0, 0x00, 0x0F, 0xAC, 4, 1, 0, 0x00, 0x0F, 0xAC, 2, 0x32, 0x06, 19, 20, 21, 22, 23, 24, 0x3d, 0x16, 0x8c, 0x0d,
random
[]
Rust
src/port.rs
carlosmn/gphoto-rs
faf9dbb972c5d66f77d1265cd959314e6283fae1
use std::borrow::Cow; use std::ffi::{CStr, CString}; use std::marker::PhantomData; use std::mem; use ::libc::c_void; #[derive(Debug,PartialEq,Eq,Clone,Copy,Hash)] pub enum PortType { Serial, USB, Disk, PTPIP, Direct, SCSI, Other, } pub struct Port<'a> { inner: ::gphoto2::GPPortInfo, __phantom: PhantomData<&'a c_void>, } impl<'a> Port<'a> { pub fn port_type(&self) -> PortType { let mut port_type = mem::MaybeUninit::uninit(); unsafe { assert_eq!(::gphoto2::GP_OK, ::gphoto2::gp_port_info_get_type(self.inner, port_type.as_mut_ptr())); } match unsafe { port_type.assume_init() } { ::gphoto2::GP_PORT_SERIAL => PortType::Serial, ::gphoto2::GP_PORT_USB => PortType::USB, ::gphoto2::GP_PORT_DISK => PortType::Disk, ::gphoto2::GP_PORT_PTPIP => PortType::PTPIP, ::gphoto2::GP_PORT_USB_DISK_DIRECT => PortType::Direct, ::gphoto2::GP_PORT_USB_SCSI => PortType::SCSI, ::gphoto2::GP_PORT_NONE | _ => PortType::Other, } } pub fn name(&self) -> Cow<str> { let mut name = mem::MaybeUninit::uninit(); unsafe { assert_eq!(::gphoto2::GP_OK, ::gphoto2::gp_port_info_get_name(self.inner, name.as_mut_ptr())); String::from_utf8_lossy(CStr::from_ptr(name.assume_init()).to_bytes()) } } pub fn path(&self) -> Cow<str> { let mut path = mem::MaybeUninit::uninit(); unsafe { assert_eq!(::gphoto2::GP_OK, ::gphoto2::gp_port_info_get_path(self.inner, path.as_mut_ptr())); String::from_utf8_lossy(CStr::from_ptr(path.assume_init()).to_bytes()) } } #[doc(hidden)] pub fn as_ptr(&self) -> ::gphoto2::GPPortInfo { self.inner } } #[doc(hidden)] pub fn from_libgphoto2(_camera: & ::camera::Camera, ptr: ::gphoto2::GPPortInfo) -> Port { Port { inner: ptr, __phantom: PhantomData, } } #[doc(hidden)] pub fn from_libgphoto2_list(_camera: & PortList, ptr: ::gphoto2::GPPortInfo) -> Port { Port { inner: ptr, __phantom: PhantomData, } } #[repr(transparent)] pub struct PortList(*mut ::gphoto2::GPPortInfoList); impl Drop for PortList { fn drop(&mut self) { unsafe { ::gphoto2::gp_port_info_list_free(self.0); } } } impl PortList { pub fn new() -> ::Result<Self> { let mut list = mem::MaybeUninit::uninit(); try_unsafe!(::gphoto2::gp_port_info_list_new(list.as_mut_ptr())); let list = unsafe { list.assume_init() }; Ok(PortList(list as *mut _)) } pub fn load(&mut self) -> ::Result<()> { try_unsafe!(::gphoto2::gp_port_info_list_load(self.as_mut_ptr())); Ok(()) } pub fn lookup_name(&mut self, name: &str) -> ::Result<usize> { let cname = match CString::new(name) { Ok(s) => s, Err(_) => return Err(::error::from_libgphoto2(::gphoto2::GP_ERROR_BAD_PARAMETERS)), }; let idx = match unsafe { ::gphoto2::gp_port_info_list_lookup_name(self.as_mut_ptr(), cname.as_ptr()) } { idx if idx >= 0 => idx, err => return Err(::error::from_libgphoto2(err)), }; Ok(idx as usize) } pub fn lookup_path(&mut self, path: &str) -> ::Result<usize> { let cpath = match CString::new(path) { Ok(s) => s, Err(_) => return Err(::error::from_libgphoto2(::gphoto2::GP_ERROR_BAD_PARAMETERS)), }; let idx = match unsafe { ::gphoto2::gp_port_info_list_lookup_path(self.as_mut_ptr(), cpath.as_ptr()) } { idx if idx >= 0 => idx, err => return Err(::error::from_libgphoto2(err)), }; Ok(idx as usize) } pub fn get(&mut self, i: usize) -> ::Result<Port> { let i = i as libc::c_int; let mut cport = mem::MaybeUninit::uninit(); try_unsafe! { ::gphoto2::gp_port_info_list_get_info(self.0, i, cport.as_mut_ptr()) }; let cport = unsafe { cport.assume_init() }; Ok(from_libgphoto2_list(self, cport)) } fn as_mut_ptr(&mut self) -> *mut ::gphoto2::GPPortInfoList { self.0 } pub fn len(&mut self) -> usize { let l = unsafe { ::gphoto2::gp_port_info_list_count(self.0) }; if l < 0 { panic!(); } l as usize } }
use std::borrow::Cow; use std::ffi::{CStr, CString}; use std::marker::PhantomData; use std::mem; use ::libc::c_void; #[derive(Debug,PartialEq,Eq,Clone,Copy,Hash)] pub enum PortType { Serial, USB, Disk, PTPIP, Direct, SCSI, Other, } pub struct Port<'a> { inner: ::gphoto2::GPPortInfo, __phantom: PhantomData<&'a c_void>, } impl<'a> Port<'a> { pub fn port_type(&self) -> PortType { let mut port_type = mem::MaybeUninit::uninit(); unsafe { assert_eq!(::gphoto2::GP_OK, ::gphoto2::gp_port_info_get_type(self.inner, port_type.as_mut_ptr())); } match unsafe { port_type.assume_init() } { ::gphoto2::GP_PORT_SERIAL => PortType::Serial, ::gphoto2::GP_PORT_USB => PortType::USB, ::gphoto2::GP_PORT_DISK => PortType::Disk, ::gphoto2::GP_PORT_PTPIP => PortType::PTPIP, ::gphoto2::GP_PORT_USB_DISK_DIRECT => PortType::Direct, ::gphoto2::GP_PORT_USB_SCSI => PortType::SCSI, ::gphoto2::GP_PORT_NONE | _ => PortType::Other, } } pub fn name(&self) -> Cow<str> { let mut name = mem::MaybeUninit::uninit(); unsafe { assert_eq!(::gphoto2::GP_OK, ::gphoto2::gp_port_info_get_name(self.inner, name.as_mut_ptr())); String::from_utf8_lossy(CStr::from_ptr(name.assume_init()).to_bytes()) } } pub fn path(&self) -> Cow<str> { let mut path = mem::MaybeUninit::uninit(); unsafe { assert_eq!(::gphoto2::GP_OK, ::gphoto2::gp_port_info_get_path(self.inner, path.as_mut_ptr())); String::from_utf8_lossy(CStr::from_ptr(path.assume_init()).to_bytes()) } } #[doc(hidden)] pub fn as_ptr(&self) -> ::gphoto2::GPPortInfo { self.inner } } #[doc(hidden)] pub fn from_libgphoto2(_camera: & ::camera::Camera, ptr: ::gphoto2::GPPortInfo) -> Port { Port { inner: ptr, __phantom: PhantomData, } } #[doc(hidden)] pub fn from_libgphoto2_list(_camera: & PortList, ptr: ::gphoto2::GPPortInfo) -> Port { Port { inner: ptr, __phantom: PhantomData, } } #[repr(transparent)] pub struct PortList(*mut ::gphoto2::GPPortInfoList); impl Drop for PortList { fn drop(&mut self) { unsafe { ::gphoto2::gp_port_info_list_free(self.0); } } } impl PortList { pub fn new() -> ::Result<Self> { let mut list = mem::MaybeUninit::uninit(); try_unsafe!(::gphoto2::gp_port_info_list_new(list.as_mut_ptr())); let list = unsafe { list.assume_init() }; Ok(PortList(list as *mut _)) } pub fn load(&mut self) -> ::Result<()> { try_unsafe!(::gphoto2::gp_port_info_list_load(self.as_mut_ptr())); Ok(()) } pub fn lookup_name(&mut self, name: &str) -> ::Result<usize> { let cname = match CString::new(name) { Ok(s) => s, Err(_) => return Err(::error::from_libgphoto2(::gphoto2::GP_ERROR_BAD_PARAMETERS)), }; let idx =
; Ok(idx as usize) } pub fn lookup_path(&mut self, path: &str) -> ::Result<usize> { let cpath = match CString::new(path) { Ok(s) => s, Err(_) => return Err(::error::from_libgphoto2(::gphoto2::GP_ERROR_BAD_PARAMETERS)), }; let idx = match unsafe { ::gphoto2::gp_port_info_list_lookup_path(self.as_mut_ptr(), cpath.as_ptr()) } { idx if idx >= 0 => idx, err => return Err(::error::from_libgphoto2(err)), }; Ok(idx as usize) } pub fn get(&mut self, i: usize) -> ::Result<Port> { let i = i as libc::c_int; let mut cport = mem::MaybeUninit::uninit(); try_unsafe! { ::gphoto2::gp_port_info_list_get_info(self.0, i, cport.as_mut_ptr()) }; let cport = unsafe { cport.assume_init() }; Ok(from_libgphoto2_list(self, cport)) } fn as_mut_ptr(&mut self) -> *mut ::gphoto2::GPPortInfoList { self.0 } pub fn len(&mut self) -> usize { let l = unsafe { ::gphoto2::gp_port_info_list_count(self.0) }; if l < 0 { panic!(); } l as usize } }
match unsafe { ::gphoto2::gp_port_info_list_lookup_name(self.as_mut_ptr(), cname.as_ptr()) } { idx if idx >= 0 => idx, err => return Err(::error::from_libgphoto2(err)), }
if_condition
[ { "content": "/// Returns a structure with the version of the `libgphoto2` C library.\n\npub fn libgphoto2_version() -> LibraryVersion {\n\n LibraryVersion::new()\n\n}\n", "file_path": "src/version.rs", "rank": 2, "score": 62897.67397201661 }, { "content": "fn main() {\n\n let mut context = match gphoto::Context::new() {\n\n Ok(ctx) => ctx,\n\n Err(err) => panic!(\"error creating context: {}\", err),\n\n };\n\n\n\n // List the available cameras alongside where they're found\n\n let mut cameras = match gphoto::Camera::autodetect(&mut context) {\n\n Ok(ctx) => ctx,\n\n Err(err) => panic!(\"error detecting cameras: {}\", err),\n\n };\n\n println!(\"Found {} cameras\", cameras.len());\n\n for i in 0..cameras.len() {\n\n let name = cameras.name(i).expect(\"camera name\");\n\n let value = cameras.value(i).expect(\"camera port\");\n\n println!(\"{}\\t{}\", value, name);\n\n }\n\n\n\n // Load the port list. This will let us search for a specific port to select\n\n // once we're choosing which camera to open\n", "file_path": "examples/list_cameras.rs", "rank": 3, "score": 60652.70278231474 }, { "content": "#[doc(hidden)]\n\npub fn from_libgphoto2(err: c_int) -> Error {\n\n Error { err }\n\n}\n\n\n\n#[doc(hidden)]\n\nmacro_rules! try_unsafe {\n\n ($x:expr) => {\n\n match unsafe { $x } {\n\n ::gphoto2::GP_OK => (),\n\n err => return Err(::error::from_libgphoto2(err))\n\n }\n\n }\n\n}\n", "file_path": "src/error.rs", "rank": 4, "score": 59134.6715408141 }, { "content": "#[doc(hidden)]\n\npub fn from_libgphoto2(abilities: ::gphoto2::CameraAbilities) -> Abilities {\n\n Abilities { inner: abilities }\n\n}\n", "file_path": "src/abilities.rs", "rank": 5, "score": 54208.9918527439 }, { "content": "/// Types that manages access to a mutable resource.\n\n///\n\n/// Like the `Handle` trait, this trait is intended to be used by structs to hand out access to an\n\n/// owned resource across module boundaries without exposing the resource outside of the crate.\n\npub trait HandleMut<T>: Handle<T> {\n\n unsafe fn as_mut_ptr(&mut self) -> *mut T;\n\n}\n", "file_path": "src/handle.rs", "rank": 6, "score": 48994.99775461849 }, { "content": "fn main() {\n\n let version = gphoto::libgphoto2_version();\n\n println!(\"libgphoto2 {} {} {} {} {}\", version.version(), version.camlibs(), version.compiler(), version.ltdl(), version.exif());\n\n}\n", "file_path": "examples/version.rs", "rank": 7, "score": 38139.61818401427 }, { "content": "fn main() {\n\n let mut context = match gphoto::Context::new() {\n\n Ok(c) => c,\n\n Err(err) => panic!(\"error creating context: {}\", err)\n\n };\n\n\n\n // open camera\n\n\n\n println!(\"opening camera ...\");\n\n let mut camera = match gphoto::Camera::new() {\n\n Ok(c) => c,\n\n Err(err) => panic!(\"error creating camera: {}\", err)\n\n };\n\n match camera.init(&mut context) {\n\n Ok(c) => c,\n\n Err(err) => panic!(\"error opening camera: {}\", err)\n\n };\n\n println!(\" (done)\");\n\n\n\n // capture image\n", "file_path": "examples/capture.rs", "rank": 8, "score": 38139.61818401427 }, { "content": "/// A trait for types that can store media.\n\npub trait Media {\n\n #[doc(hidden)]\n\n unsafe fn as_mut_ptr(&mut self) -> *mut ::gphoto2::CameraFile;\n\n}\n\n\n\n\n\n/// Media stored as a local file.\n\npub struct FileMedia {\n\n file: *mut ::gphoto2::CameraFile,\n\n}\n\n\n\nimpl Drop for FileMedia {\n\n fn drop(&mut self) {\n\n unsafe {\n\n ::gphoto2::gp_file_unref(self.file);\n\n }\n\n }\n\n}\n\n\n\nimpl FileMedia {\n", "file_path": "src/media.rs", "rank": 9, "score": 36781.23981057577 }, { "content": "fn main() {\n\n let mut context = match gphoto::Context::new() {\n\n Ok(c) => c,\n\n Err(err) => panic!(\"error creating context: {}\", err),\n\n };\n\n\n\n // open camera\n\n\n\n println!(\"opening camera ...\");\n\n let mut camera = match gphoto::Camera::new() {\n\n Ok(c) => c,\n\n Err(err) => panic!(\"error creating camera: {}\", err),\n\n };\n\n match camera.init(&mut context) {\n\n Ok(c) => c,\n\n Err(err) => panic!(\"error opening camera: {}\", err),\n\n };\n\n println!(\" (done)\");\n\n\n\n // create a FileMedia to store the image in memory\n", "file_path": "examples/capture_preview.rs", "rank": 10, "score": 36779.035745037196 }, { "content": "fn main() {\n\n let mut context = match gphoto::Context::new() {\n\n Ok(c) => c,\n\n Err(err) => panic!(\"error creating context: {}\", err)\n\n };\n\n\n\n let mut camera = match gphoto::Camera::new() {\n\n Ok(c) => c,\n\n Err(err) => panic!(\"error creating camera: {}\", err)\n\n };\n\n match camera.init(&mut context) {\n\n Ok(c) => c,\n\n Err(err) => panic!(\"error opening camera: {}\", err)\n\n };\n\n\n\n {\n\n let port = camera.port();\n\n\n\n println!(\"[port info]\");\n\n println!(\"port type = {:?}\", port.port_type());\n", "file_path": "examples/camera_info.rs", "rank": 11, "score": 36779.035745037196 }, { "content": "fn main() {\n\n let mut context = match gphoto::Context::new() {\n\n Ok(c) => c,\n\n Err(err) => panic!(\"error creating context: {}\", err),\n\n };\n\n\n\n // open camera\n\n\n\n eprintln!(\"opening camera ...\");\n\n let mut camera = match gphoto::Camera::new() {\n\n Ok(c) => c,\n\n Err(err) => panic!(\"error creating camera: {}\", err),\n\n };\n\n match camera.init(&mut context) {\n\n Ok(c) => c,\n\n Err(err) => panic!(\"error opening camera: {}\", err),\n\n };\n\n eprintln!(\" (done)\");\n\n\n\n // create a FileMedia to store the image in memory\n", "file_path": "examples/capture_preview_movie.rs", "rank": 12, "score": 35559.35746001446 }, { "content": "/// Types that manages access to a resource.\n\n///\n\n/// This trait is intended to be used by structs that manage the ownership of a resource from a C\n\n/// library. Often, a pointer to the resource is needed to implement methods in other modules that\n\n/// wrap the same C library, but the pointer should not be accessible from outside of the crate.\n\n///\n\n/// The `Handle` trait allows a struct to hand out access to an owned resource across module\n\n/// boundaries. Because the `Handle` trait is not exported from the crate, the owned resources will\n\n/// not be accessible from outside the crate.\n\npub trait Handle<T> {\n\n unsafe fn as_ptr(&self) -> *const T;\n\n}\n\n\n", "file_path": "src/handle.rs", "rank": 13, "score": 34231.32231513883 }, { "content": " let mut port_list = gphoto::PortList::new().expect(\"port list\");\n\n port_list.load().expect(\"load port list\");\n\n\n\n // Now that they're listed, let's open each one explicitly and list some\n\n // details\n\n for i in 0..cameras.len() {\n\n // Create a new camera object but do not initialize it yet\n\n let mut camera = gphoto::Camera::new().expect(\"creating camera\");\n\n\n\n // Take the port path from the value of the camera list and use it to\n\n // look up the port object for it\n\n let port_path = cameras.value(i).expect(\"camera port\");\n\n let port_idx = port_list.lookup_path(&port_path).expect(\"lookup port\");\n\n let port = port_list.get(port_idx).expect(\"get port\");\n\n // Set the port in our camera object to tell the library which one we\n\n // want to use and then initialize it.\n\n camera.set_port(port).expect(\"setting port\");\n\n if let Err(err) = camera.init(&mut context) {\n\n panic!(\"failed to init camera: {}\", err)\n\n }\n", "file_path": "examples/list_cameras.rs", "rank": 24, "score": 25271.050338696852 }, { "content": "\n\n // And lastly let's print out a summary to show that we are opening the camera\n\n // we mean\n\n println!(\"Camera: {}\", cameras.name(i).expect(\"camera name\"));\n\n println!(\n\n \"Summary:\\n{}\",\n\n camera.summary(&mut context).expect(\"camera summary\")\n\n );\n\n }\n\n}\n", "file_path": "examples/list_cameras.rs", "rank": 25, "score": 25258.02880414654 }, { "content": "extern crate gphoto;\n\n\n", "file_path": "examples/list_cameras.rs", "rank": 26, "score": 25252.6777555066 }, { "content": " pub fn init(&mut self, context: &mut Context) -> ::Result<()> {\n\n try_unsafe!(::gphoto2::gp_camera_init(self.camera, context.as_mut_ptr()));\n\n Ok(())\n\n }\n\n\n\n /// Return a list of detected cameras\n\n ///\n\n /// The 'name' in the returned CameraList is the name of the camera and the\n\n /// 'value' is the port where they're attached.\n\n pub fn autodetect(context: &mut Context) -> ::Result<CameraList> {\n\n let mut list = CameraList::new()?;\n\n\n\n match unsafe { ::gphoto2::gp_camera_autodetect(list.as_mut_ptr(), context.as_mut_ptr()) } {\n\n n if n >= 0 => Ok(list),\n\n err => Err(::error::from_libgphoto2(err)),\n\n }\n\n }\n\n\n\n /// Captures an image.\n\n pub fn capture_image(&mut self, context: &mut Context) -> ::Result<CameraFile> {\n", "file_path": "src/camera.rs", "rank": 27, "score": 26.34882170681173 }, { "content": " }\n\n\n\n /// Get the name of the ith entry in the list as a CStr\n\n ///\n\n /// This version avoids allocating the String and the lossy conversion.\n\n pub fn name_cstr(&mut self, i: usize) -> ::Result<&CStr> {\n\n let i = i as libc::c_int;\n\n let mut cname = mem::MaybeUninit::uninit();\n\n try_unsafe! { ::gphoto2::gp_list_get_name(self.0, i, cname.as_mut_ptr()) };\n\n\n\n Ok(unsafe { CStr::from_ptr(cname.assume_init()) })\n\n }\n\n\n\n /// Get the name of the ith entry in the list\n\n pub fn name(&mut self, i: usize) -> ::Result<String> {\n\n self.name_cstr(i)\n\n .map(CStr::to_string_lossy)\n\n .map(Cow::into_owned)\n\n }\n\n\n", "file_path": "src/camera.rs", "rank": 28, "score": 24.920541748463187 }, { "content": " if self.inner.port & ::gphoto2::GP_PORT_USB_DISK_DIRECT != 0 {\n\n port_types.insert(PortType::Direct);\n\n }\n\n\n\n if self.inner.port & ::gphoto2::GP_PORT_USB_SCSI != 0 {\n\n port_types.insert(PortType::SCSI);\n\n }\n\n\n\n port_types\n\n }\n\n\n\n /// Returns the supported serial port speeds.\n\n pub fn speeds(&self) -> Vec<usize> {\n\n self.inner.speed.iter().take_while(|&n| *n != 0).map(|&n| n as usize).collect()\n\n }\n\n\n\n /// Returns the camera operations supported by the device.\n\n pub fn camera_operations(&self) -> HashSet<CameraOperation> {\n\n let mut operations = HashSet::<CameraOperation>::new();\n\n\n", "file_path": "src/abilities.rs", "rank": 29, "score": 24.35453405527578 }, { "content": "pub struct CameraFile {\n\n inner: ::gphoto2::CameraFilePath,\n\n}\n\n\n\nimpl CameraFile {\n\n /// Returns the directory that the file is stored in.\n\n pub fn directory(&self) -> Cow<str> {\n\n unsafe {\n\n String::from_utf8_lossy(CStr::from_ptr(self.inner.folder.as_ptr()).to_bytes())\n\n }\n\n }\n\n\n\n /// Returns the name of the file without the directory.\n\n pub fn basename(&self) -> Cow<str> {\n\n unsafe {\n\n String::from_utf8_lossy(CStr::from_ptr(self.inner.name.as_ptr()).to_bytes())\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/camera.rs", "rank": 30, "score": 22.984049167910303 }, { "content": " /// Returns the supported port types.\n\n pub fn port_types(&self) -> HashSet<PortType> {\n\n let mut port_types = HashSet::<PortType>::new();\n\n\n\n if self.inner.port & ::gphoto2::GP_PORT_SERIAL != 0 {\n\n port_types.insert(PortType::Serial);\n\n }\n\n\n\n if self.inner.port & ::gphoto2::GP_PORT_USB != 0 {\n\n port_types.insert(PortType::USB);\n\n }\n\n\n\n if self.inner.port & ::gphoto2::GP_PORT_DISK != 0 {\n\n port_types.insert(PortType::Disk);\n\n }\n\n\n\n if self.inner.port & ::gphoto2::GP_PORT_PTPIP != 0 {\n\n port_types.insert(PortType::PTPIP);\n\n }\n\n\n", "file_path": "src/abilities.rs", "rank": 31, "score": 22.43884499339327 }, { "content": "\n\n /// Create a new FileMedia to store data in memory.\n\n pub fn new() -> ::Result<Self> {\n\n let mut file = mem::MaybeUninit::uninit();\n\n match unsafe { ::gphoto2::gp_file_new(file.as_mut_ptr()) } {\n\n ::gphoto2::GP_OK => {\n\n Ok(FileMedia { file: unsafe { file.assume_init() } })\n\n },\n\n err => {\n\n Err(::error::from_libgphoto2(err))\n\n }\n\n }\n\n }\n\n\n\n /// Create a FileMedia from a File.\n\n pub fn from_file(f: File) -> ::Result<Self> {\n\n let mut ptr = mem::MaybeUninit::uninit();\n\n\n\n match unsafe { ::gphoto2::gp_file_new_from_fd(ptr.as_mut_ptr(), f.into_raw_fd()) } {\n\n ::gphoto2::GP_OK => {\n", "file_path": "src/media.rs", "rank": 32, "score": 22.178603023452002 }, { "content": " util::camera_text_to_string(about)\n\n }\n\n}\n\n\n\n/// A structure representing a list of cameras connected to the system\n\n#[repr(transparent)]\n\npub struct CameraList(*mut ::gphoto2::CameraList);\n\n\n\nimpl Drop for CameraList {\n\n fn drop(&mut self) {\n\n unsafe {\n\n ::gphoto2::gp_list_unref(self.0);\n\n }\n\n }\n\n}\n\n\n\nimpl CameraList {\n\n /// Allocate a new list\n\n fn new() -> ::Result<Self> {\n\n let mut list = mem::MaybeUninit::uninit();\n", "file_path": "src/camera.rs", "rank": 33, "score": 21.839868142349243 }, { "content": "use ::handle::{Handle,HandleMut};\n\n\n\n/// A `libgphoto2` library context.\n\npub struct Context {\n\n context: *mut ::gphoto2::GPContext,\n\n}\n\n\n\nimpl Context {\n\n /// Creates a new context.\n\n pub fn new() -> ::Result<Context> {\n\n let ptr = unsafe { ::gphoto2::gp_context_new() };\n\n\n\n if !ptr.is_null() {\n\n Ok(Context { context: ptr })\n\n }\n\n else {\n\n Err(::error::from_libgphoto2(::gphoto2::GP_ERROR_NO_MEMORY))\n\n }\n\n }\n\n}\n", "file_path": "src/context.rs", "rank": 34, "score": 20.116768081105068 }, { "content": " try_unsafe!(::gphoto2::gp_list_new(list.as_mut_ptr()));\n\n let list = unsafe { list.assume_init() };\n\n\n\n Ok(CameraList(list))\n\n }\n\n\n\n /// Return a mutable underlying pointer\n\n fn as_mut_ptr(&mut self) -> *mut ::gphoto2::CameraList {\n\n self.0\n\n }\n\n\n\n /// Get the amount of entries in the list\n\n pub fn len(&mut self) -> usize {\n\n let l = unsafe { ::gphoto2::gp_list_count(self.0) };\n\n\n\n if l < 0 {\n\n panic!();\n\n }\n\n\n\n l as usize\n", "file_path": "src/camera.rs", "rank": 35, "score": 19.89160448697857 }, { "content": " context.as_mut_ptr())\n\n };\n\n\n\n Ok(())\n\n }\n\n\n\n /// Captures a preview image and stores it in the given destination\n\n pub fn capture_preview<T: Media>(&mut self, context: &mut Context, destination: &mut T) -> ::Result<()> {\n\n\ttry_unsafe! {\n\n\t ::gphoto2::gp_camera_capture_preview(self.camera, destination.as_mut_ptr(), context.as_mut_ptr())\n\n\t};\n\n\n\n\tOk(())\n\n }\n\n\n\n /// Returns information about the port the camera is connected to.\n\n pub fn port(&self) -> Port {\n\n let mut port = mem::MaybeUninit::uninit();\n\n\n\n unsafe {\n", "file_path": "src/camera.rs", "rank": 36, "score": 19.76414883809136 }, { "content": " let mut file_path = mem::MaybeUninit::uninit();\n\n\n\n try_unsafe! {\n\n ::gphoto2::gp_camera_capture(self.camera,\n\n ::gphoto2::GP_CAPTURE_IMAGE,\n\n file_path.as_mut_ptr(),\n\n context.as_mut_ptr())\n\n };\n\n\n\n Ok(CameraFile { inner: unsafe { file_path.assume_init() } })\n\n }\n\n\n\n /// Downloads a file from the camera.\n\n pub fn download<T: Media>(&mut self, context: &mut Context, source: &CameraFile, destination: &mut T) -> ::Result<()> {\n\n try_unsafe! {\n\n ::gphoto2::gp_camera_file_get(self.camera,\n\n source.inner.folder.as_ptr(),\n\n source.inner.name.as_ptr(),\n\n ::gphoto2::GP_FILE_TYPE_NORMAL,\n\n destination.as_mut_ptr(),\n", "file_path": "src/camera.rs", "rank": 37, "score": 19.57820739647969 }, { "content": " ::gphoto2::gp_camera_unref(self.camera);\n\n }\n\n }\n\n}\n\n\n\nimpl Camera {\n\n /// Create a new Camera instance\n\n pub fn new() -> ::Result<Self> {\n\n let mut camera = mem::MaybeUninit::uninit();\n\n\n\n try_unsafe!(::gphoto2::gp_camera_new(camera.as_mut_ptr()));\n\n Ok(Self {\n\n camera: unsafe { camera.assume_init() },\n\n })\n\n }\n\n\n\n /// Initialize the camera.\n\n ///\n\n /// If this Camera has not been set up, the library will select the first\n\n /// one it detects.\n", "file_path": "src/camera.rs", "rank": 38, "score": 19.489175023086414 }, { "content": "\n\n match ::gphoto2::gp_file_new_from_fd(ptr.as_mut_ptr(), fd) {\n\n ::gphoto2::GP_OK => {\n\n Ok(FileMedia { file: ptr.assume_init() })\n\n },\n\n\t err => {\n\n Err(::error::from_libgphoto2(err))\n\n\t }\n\n }\n\n }\n\n\n\n /// Retrieve the data from this FileMedia\n\n ///\n\n /// Note that this can cause us to read the file contents from disk.\n\n pub fn data(&self) -> ::Result<&[u8]> {\n\n\tlet mut data = mem::MaybeUninit::uninit();\n\n\tlet mut size = mem::MaybeUninit::uninit();\n\n\tmatch unsafe { ::gphoto2::gp_file_get_data_and_size(self.file, data.as_mut_ptr(), size.as_mut_ptr()) } {\n\n\t ::gphoto2::GP_OK => {\n\n\t\tunsafe {\n", "file_path": "src/media.rs", "rank": 39, "score": 18.61875596253115 }, { "content": "use std::borrow::Cow;\n\nuse std::ffi::CStr;\n\nuse std::mem;\n\n\n\nuse ::context::Context;\n\nuse ::abilities::Abilities;\n\nuse ::media::Media;\n\nuse ::port::Port;\n\nuse ::storage::Storage;\n\n\n\nuse ::handle::prelude::*;\n\n\n\n/// A structure representing a camera connected to the system.\n\npub struct Camera {\n\n camera: *mut ::gphoto2::Camera,\n\n}\n\n\n\nimpl Drop for Camera {\n\n fn drop(&mut self) {\n\n unsafe {\n", "file_path": "src/camera.rs", "rank": 40, "score": 18.499578188028973 }, { "content": " /// Get the value of the ith entry in the list as a CStr\n\n ///\n\n /// This version avoids allocating the String and the lossy conversion.\n\n pub fn value_cstr(&mut self, i: usize) -> ::Result<&CStr> {\n\n let i = i as libc::c_int;\n\n let mut cvalue = mem::MaybeUninit::uninit();\n\n try_unsafe! { ::gphoto2::gp_list_get_value(self.0, i, cvalue.as_mut_ptr()) };\n\n\n\n Ok(unsafe { CStr::from_ptr(cvalue.assume_init()) })\n\n }\n\n\n\n /// Get the value of the ith entry in the list\n\n pub fn value(&mut self, i: usize) -> ::Result<String> {\n\n self.value_cstr(i)\n\n .map(CStr::to_string_lossy)\n\n .map(Cow::into_owned)\n\n }\n\n}\n\n\n\n/// A file stored on a camera's storage.\n", "file_path": "src/camera.rs", "rank": 41, "score": 18.245883776486956 }, { "content": " assert_eq!(::gphoto2::GP_OK, ::gphoto2::gp_camera_get_port_info(self.camera, port.as_mut_ptr()));\n\n }\n\n\n\n ::port::from_libgphoto2(self, unsafe { port.assume_init() })\n\n }\n\n\n\n /// Specify which port the camera should be connected to.\n\n ///\n\n /// Call this before Camera::init() in order to select a specific camera.\n\n pub fn set_port(&mut self, port: Port) -> ::Result<()> {\n\n try_unsafe! {\n\n ::gphoto2::gp_camera_set_port_info(self.camera, port.as_ptr())\n\n };\n\n\n\n Ok(())\n\n }\n\n\n\n /// Retrieves the camera's abilities.\n\n pub fn abilities(&self) -> Abilities {\n\n let mut abilities = mem::MaybeUninit::uninit();\n", "file_path": "src/camera.rs", "rank": 42, "score": 16.691956707284284 }, { "content": " /// Creates a new file that stores media.\n\n ///\n\n /// This function creates a new file on disk. The file will start out empty.\n\n ///\n\n /// ## Errors\n\n ///\n\n /// This function returns an error if the file can not be created:\n\n ///\n\n /// * `FileExists` if the file already exists.\n\n pub fn create(path: &Path) -> ::Result<Self> {\n\n\tlet file = match OpenOptions::new().create_new(true).read(true).write(true).open(path) {\n\n\t Ok(f) => f,\n\n\t Err(_) => {\n\n\t\t// This should be more specific but this what we have for now\n\n\t\treturn Err(::error::from_libgphoto2(::gphoto2::GP_ERROR_FILE_EXISTS));\n\n\t }\n\n\t};\n\n\n\n\tSelf::from_file(file)\n\n }\n", "file_path": "src/media.rs", "rank": 43, "score": 16.361332321418598 }, { "content": " }\n\n }\n\n\n\n /// Returns the name of the camera's model.\n\n pub fn model(&self) -> Cow<str> {\n\n unsafe {\n\n String::from_utf8_lossy(CStr::from_ptr(self.inner.model.as_ptr()).to_bytes())\n\n }\n\n }\n\n\n\n /// Returns the driver's stability status.\n\n pub fn driver_status(&self) -> DriverStatus {\n\n match self.inner.status {\n\n ::gphoto2::GP_DRIVER_STATUS_PRODUCTION => DriverStatus::Production,\n\n ::gphoto2::GP_DRIVER_STATUS_TESTING => DriverStatus::Testing,\n\n ::gphoto2::GP_DRIVER_STATUS_EXPERIMENTAL => DriverStatus::Experimental,\n\n ::gphoto2::GP_DRIVER_STATUS_DEPRECATED => DriverStatus::Deprecated,\n\n }\n\n }\n\n\n", "file_path": "src/abilities.rs", "rank": 44, "score": 16.04980640593372 }, { "content": " Ok(FileMedia { file: unsafe { ptr.assume_init() } })\n\n },\n\n\t err => {\n\n Err(::error::from_libgphoto2(err))\n\n\t }\n\n }\n\n }\n\n\n\n /// Create a FileMedia from a RawFd.\n\n ///\n\n /// Care must the taken that the descriptor is not owned by something else\n\n /// that might free it while this object lives.\n\n ///\n\n /// # Safety\n\n ///\n\n /// This is marked as unsafe for the same reason as the FromRawFd, namely\n\n /// that we take onwership of the file descriptor but we cannot guarantee\n\n /// this as part of the type system.\n\n pub unsafe fn from_raw_fd(fd: RawFd) -> ::Result<Self> {\n\n let mut ptr = mem::MaybeUninit::uninit();\n", "file_path": "src/media.rs", "rank": 45, "score": 14.93691737975728 }, { "content": "#[repr(C)]\n\npub struct Storage {\n\n inner: ::gphoto2::CameraStorageInformation,\n\n}\n\n\n\nimpl Storage {\n\n /// Base directory of the storage.\n\n pub fn base_dir(&self) -> Option<Cow<str>> {\n\n if self.inner.fields & ::gphoto2::GP_STORAGEINFO_BASE != 0 {\n\n Some(unsafe {\n\n String::from_utf8_lossy(CStr::from_ptr(self.inner.basedir.as_ptr()).to_bytes())\n\n })\n\n }\n\n else {\n\n None\n\n }\n\n }\n\n\n\n /// The storage's label.\n\n pub fn label(&self) -> Option<Cow<str>> {\n", "file_path": "src/storage.rs", "rank": 46, "score": 14.881165632331516 }, { "content": " #[doc(hidden)]\n\n unsafe fn as_mut_ptr(&mut self) -> *mut ::gphoto2::CameraFile {\n\n self.file\n\n }\n\n}\n\n\n\nimpl TryFrom<File> for FileMedia {\n\n type Error = crate::Error;\n\n\n\n fn try_from(f: File) -> ::Result<Self> {\n\n\tFileMedia::from_file(f)\n\n }\n\n}\n", "file_path": "src/media.rs", "rank": 47, "score": 14.852562619581683 }, { "content": "\n\n unsafe {\n\n assert_eq!(::gphoto2::GP_OK, ::gphoto2::gp_camera_get_abilities(self.camera, abilities.as_mut_ptr()));\n\n }\n\n\n\n ::abilities::from_libgphoto2(unsafe { abilities.assume_init() })\n\n }\n\n\n\n /// Retrieves information about the camera's storage.\n\n ///\n\n /// Returns a `Vec` containing one `Storage` for each filesystem on the device.\n\n pub fn storage(&mut self, context: &mut Context) -> ::Result<Vec<Storage>> {\n\n let mut ptr = mem::MaybeUninit::uninit();\n\n\tlet mut len = mem::MaybeUninit::uninit();\n\n\n\n try_unsafe! {\n\n ::gphoto2::gp_camera_get_storageinfo(self.camera,\n\n ptr.as_mut_ptr(),\n\n len.as_mut_ptr(),\n\n context.as_mut_ptr())\n", "file_path": "src/camera.rs", "rank": 48, "score": 14.811216988792282 }, { "content": "\n\nimpl Drop for Context {\n\n fn drop(&mut self) {\n\n unsafe {\n\n ::gphoto2::gp_context_unref(self.context);\n\n }\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n\nimpl Handle<::gphoto2::GPContext> for Context {\n\n unsafe fn as_ptr(&self) -> *const ::gphoto2::GPContext {\n\n self.context\n\n }\n\n}\n\n\n\n#[doc(hidden)]\n\nimpl HandleMut<::gphoto2::GPContext> for Context {\n\n unsafe fn as_mut_ptr(&mut self) -> *mut ::gphoto2::GPContext {\n\n self.context\n\n }\n\n}\n", "file_path": "src/context.rs", "rank": 49, "score": 14.740847931965465 }, { "content": "mod util {\n\n use std::ffi::CStr;\n\n\n\n pub fn camera_text_to_string(camera_text: *mut::gphoto2::CameraText) -> ::Result<String> {\n\n let length = unsafe {\n\n CStr::from_ptr((*camera_text).text.as_ptr()).to_bytes().len()\n\n };\n\n\n\n let vec = unsafe {\n\n Vec::<u8>::from_raw_parts((*camera_text).text.as_mut_ptr() as *mut u8, length, (*camera_text).text.len())\n\n };\n\n\n\n String::from_utf8(vec).map_err(|_| {\n\n ::error::from_libgphoto2(::gphoto2::GP_ERROR_CORRUPTED_DATA)\n\n })\n\n }\n\n}\n", "file_path": "src/camera.rs", "rank": 50, "score": 14.573019816883082 }, { "content": " let summary = Box::into_raw(Box::new(::gphoto2::CameraText{ text: [0; 32 *1024] }));\n\n\n\n try_unsafe!(::gphoto2::gp_camera_get_summary(self.camera, summary, context.as_mut_ptr()));\n\n\n\n util::camera_text_to_string(summary)\n\n }\n\n\n\n /// Returns the camera's manual.\n\n ///\n\n /// The manual contains information about using the camera.\n\n ///\n\n /// ## Errors\n\n ///\n\n /// This function returns an error if the manual could not be retrieved:\n\n ///\n\n /// * `NotSupported` if there is no manual available for the camera.\n\n /// * `CorruptedData` if the summary is invalid UTF-8.\n\n pub fn manual(&mut self, context: &mut Context) -> ::Result<String> {\n\n let manual = Box::into_raw(Box::new(::gphoto2::CameraText{ text: [0; 32 *1024] }));\n\n\n", "file_path": "src/camera.rs", "rank": 51, "score": 13.91007507317556 }, { "content": " if self.inner.fields & ::gphoto2::GP_STORAGEINFO_LABEL != 0 {\n\n Some(unsafe {\n\n String::from_utf8_lossy(CStr::from_ptr(self.inner.label.as_ptr()).to_bytes())\n\n })\n\n }\n\n else {\n\n None\n\n }\n\n }\n\n\n\n /// A description of the storage.\n\n pub fn description(&self) -> Option<Cow<str>> {\n\n if self.inner.fields & ::gphoto2::GP_STORAGEINFO_DESCRIPTION != 0 {\n\n Some(unsafe {\n\n String::from_utf8_lossy(CStr::from_ptr(self.inner.description.as_ptr()).to_bytes())\n\n })\n\n }\n\n else {\n\n None\n\n }\n", "file_path": "src/storage.rs", "rank": 52, "score": 13.735971615258853 }, { "content": " ::gphoto2::GP_ERROR_CANCEL => ErrorKind::Cancel,\n\n ::gphoto2::GP_ERROR_CAMERA_ERROR => ErrorKind::CameraError,\n\n ::gphoto2::GP_ERROR_OS_FAILURE => ErrorKind::OSFailure,\n\n ::gphoto2::GP_ERROR_NO_SPACE => ErrorKind::NoSpace,\n\n\n\n ::gphoto2::GP_ERROR | _ => ErrorKind::Other\n\n }\n\n }\n\n\n\n /// Returns an error message.\n\n pub fn message(&self) -> &'static str {\n\n unsafe {\n\n str::from_utf8_unchecked(CStr::from_ptr(::gphoto2::gp_result_as_string(self.err)).to_bytes())\n\n }\n\n }\n\n}\n\n\n\nimpl fmt::Display for Error {\n\n fn fmt(&self, fmt: &mut fmt::Formatter) -> StdResult<(),fmt::Error> {\n\n fmt.write_str(self.message())\n", "file_path": "src/error.rs", "rank": 53, "score": 13.435188404021119 }, { "content": " };\n\n\n\n let storage = unsafe { ptr.assume_init() } as *mut Storage;\n\n let length = unsafe { len.assume_init() } as usize;\n\n\n\n Ok(unsafe { Vec::from_raw_parts(storage, length, length) })\n\n }\n\n\n\n /// Returns the camera's summary.\n\n ///\n\n /// The summary typically contains non-configurable information about the camera, such as\n\n /// manufacturer and number of pictures taken.\n\n ///\n\n /// ## Errors\n\n ///\n\n /// This function returns an error if the summary could not be retrieved:\n\n ///\n\n /// * `NotSupported` if there is no summary available for the camera.\n\n /// * `CorruptedData` if the summary is invalid UTF-8.\n\n pub fn summary(&mut self, context: &mut Context) -> ::Result<String> {\n", "file_path": "src/camera.rs", "rank": 54, "score": 13.360719111300035 }, { "content": " try_unsafe!(::gphoto2::gp_camera_get_manual(self.camera, manual, context.as_mut_ptr()));\n\n\n\n util::camera_text_to_string(manual)\n\n }\n\n\n\n /// Returns information about the camera driver.\n\n ///\n\n /// This text typically contains information about the driver's author, acknowledgements, etc.\n\n ///\n\n /// ## Errors\n\n ///\n\n /// This function returns an error if the about text could not be retrieved:\n\n ///\n\n /// * `NotSupported` if there is no about text available for the camera's driver.\n\n /// * `CorruptedData` if the summary is invalid UTF-8.\n\n pub fn about_driver(&mut self, context: &mut Context) -> ::Result<String> {\n\n let about = Box::into_raw(Box::new(::gphoto2::CameraText{ text: [0; 32 *1024] }));\n\n\n\n try_unsafe!(::gphoto2::gp_camera_get_about(self.camera, about, context.as_mut_ptr()));\n\n\n", "file_path": "src/camera.rs", "rank": 55, "score": 12.939480360459973 }, { "content": " }\n\n\n\n /// Returns the `libgphoto2` version number.\n\n ///\n\n /// The version number is a string representing the version number, e.g., `\"2.4.16\"`.\n\n pub fn version(&self) -> &str {\n\n self.version\n\n }\n\n\n\n /// Describes which camlibs were compiled with `libgphoto2`.\n\n pub fn camlibs(&self) -> &str {\n\n self.camlibs\n\n }\n\n\n\n /// Returns the name of the C compiler that was used to compile `libgphoto2`.\n\n pub fn compiler(&self) -> &str {\n\n self.compiler\n\n }\n\n\n\n /// Returns a string describing whether `libgphoto2` was compiled for portable loading of\n", "file_path": "src/version.rs", "rank": 56, "score": 12.740242728453577 }, { "content": "extern crate gphoto2_sys as gphoto2;\n\nextern crate libc;\n\n\n\npub use error::{Result,Error,ErrorKind};\n\npub use abilities::{Abilities,DeviceType,DriverStatus,CameraOperation,FileOperation,FolderOperation};\n\npub use camera::{Camera,CameraFile,CameraList};\n\npub use context::{Context};\n\npub use media::{Media,FileMedia};\n\npub use port::{PortType,Port,PortList};\n\npub use storage::{Storage,StorageType,FilesystemType,AccessType};\n\npub use version::{LibraryVersion,libgphoto2_version};\n\n\n\n#[macro_use]\n\nmod error;\n\nmod abilities;\n\nmod camera;\n\nmod context;\n\nmod media;\n\nmod port;\n\nmod storage;\n\nmod version;\n\n\n\n// internal\n\nmod handle;\n", "file_path": "src/lib.rs", "rank": 57, "score": 12.517086125844433 }, { "content": "/// speeds = []\n\n/// camera operations = {CaptureImage, TriggerCapture, Config, CapturePreview}\n\n/// file operations = {Delete, Preview}\n\n/// folder operations = {MakeDirectory, RemoveDirectory, PutFile}\n\n/// USB vendor = 1200\n\n/// USB product = 1079\n\n/// USB class = 0\n\n/// USB subclass = 0\n\n/// USB protocol = 0\n\n/// ```\n\npub struct Abilities {\n\n inner: ::gphoto2::CameraAbilities\n\n}\n\n\n\nimpl Abilities {\n\n /// Returns the type of the device.\n\n pub fn device_type(&self) -> DeviceType {\n\n match self.inner.device_type {\n\n ::gphoto2::GP_DEVICE_STILL_CAMERA => DeviceType::Camera,\n\n ::gphoto2::GP_DEVICE_AUDIO_PLAYER => DeviceType::Audio,\n", "file_path": "src/abilities.rs", "rank": 58, "score": 12.279133939536056 }, { "content": "use std::borrow::Cow;\n\nuse std::collections::HashSet;\n\nuse std::ffi::CStr;\n\n\n\nuse ::port::{PortType};\n\n\n\n/// Describes the abilities of a device.\n\n///\n\n/// ## Example\n\n///\n\n/// An `Abilities` object can be used to retrieve information about a camera's driver:\n\n///\n\n/// ```no_run\n\n/// let mut context = gphoto::Context::new().unwrap();\n\n/// let mut camera = gphoto::Camera::new().unwrap();\n\n/// camera.init(&mut context).unwrap();\n\n/// let abilities = camera.abilities();\n\n///\n\n/// println!(\" device type = {:?}\", abilities.device_type());\n\n/// println!(\" model = {:?}\", abilities.model());\n", "file_path": "src/abilities.rs", "rank": 59, "score": 12.25393821439065 }, { "content": "///\n\n/// This may output something like the following:\n\n///\n\n/// ```text\n\n/// libgphoto2 2.5.7 all camlibs clang ltdl no EXIF\n\n/// ```\n\n#[derive(Debug)]\n\npub struct LibraryVersion {\n\n version: &'static str,\n\n camlibs: &'static str,\n\n compiler: &'static str,\n\n ltdl: &'static str,\n\n exif: &'static str,\n\n}\n\n\n\nimpl LibraryVersion {\n\n fn new() -> LibraryVersion {\n\n let ptr = unsafe {\n\n ::gphoto2::gp_library_version(::gphoto2::GPVersionVerbosity::GP_VERSION_SHORT)\n\n };\n", "file_path": "src/version.rs", "rank": 60, "score": 11.941834909452421 }, { "content": "\n\n let mut len: usize = 0;\n\n\n\n while !unsafe { *ptr.add(len) }.is_null() {\n\n len += 1;\n\n }\n\n\n\n assert!(len >= 5);\n\n\n\n let table = unsafe {\n\n slice::from_raw_parts(ptr, len)\n\n };\n\n\n\n LibraryVersion {\n\n version: unsafe { str::from_utf8_unchecked(CStr::from_ptr(table[0]).to_bytes()) },\n\n camlibs: unsafe { str::from_utf8_unchecked(CStr::from_ptr(table[1]).to_bytes()) },\n\n compiler: unsafe { str::from_utf8_unchecked(CStr::from_ptr(table[2]).to_bytes()) },\n\n ltdl: unsafe { str::from_utf8_unchecked(CStr::from_ptr(table[3]).to_bytes()) },\n\n exif: unsafe { str::from_utf8_unchecked(CStr::from_ptr(table[4]).to_bytes()) },\n\n }\n", "file_path": "src/version.rs", "rank": 61, "score": 11.639111885914907 }, { "content": "use std::ffi::CStr;\n\nuse std::fmt;\n\nuse std::error::Error as StdError;\n\nuse std::result::Result as StdResult;\n\nuse std::str;\n\n\n\nuse ::libc::{c_int};\n\n\n\n/// A specialized `Result` type for working with gphoto2.\n\npub type Result<T> = StdResult<T,Error>;\n\n\n\n/// Types of errors reported by gphoto2.\n\n#[derive(Debug,PartialEq,Eq,Clone,Copy)]\n\npub enum ErrorKind {\n\n /// A parameter was incorrect.\n\n InvalidInput,\n\n\n\n /// Functionality not supported.\n\n NotSupported,\n\n\n", "file_path": "src/error.rs", "rank": 62, "score": 11.267557534863442 }, { "content": " //\n\n // you might often want to create a FileMedia out of stdout's fd instead but\n\n // that forces us to use unsafe here so let's take the hit on allocation and\n\n // copy as the limiting factor is generally going to be how fast we can get\n\n // the data from the camera\n\n let mut image = match gphoto::FileMedia::new() {\n\n Ok(o) => o,\n\n Err(err) => panic!(\"error creating FileMedia: {}\", err),\n\n };\n\n\n\n // capture image\n\n eprintln!(\"sending preview images to stdout\");\n\n loop {\n\n image.clean().expect(\"cleaning image\");\n\n match camera.capture_preview(&mut context, &mut image) {\n\n Ok(_) => {}\n\n Err(err) => {\n\n eprintln!(\"error capturing image: {}\", err);\n\n break;\n\n }\n", "file_path": "examples/capture_preview_movie.rs", "rank": 63, "score": 11.213448484156046 }, { "content": " let mut image = match gphoto::FileMedia::new() {\n\n Ok(o) => o,\n\n Err(err) => panic!(\"error creating FileMedia: {}\", err),\n\n };\n\n\n\n // capture image\n\n println!(\"capturing preview image ...\");\n\n match camera.capture_preview(&mut context, &mut image) {\n\n Ok(_) => {}\n\n Err(err) => panic!(\"error capturing image: {}\", err),\n\n };\n\n println!(\" (done) \");\n\n\n\n // store the preview in a file (and assume it's a JPEG)\n\n let mut f = match File::create(\"preview.jpg\") {\n\n Ok(f) => f,\n\n Err(err) => panic!(\"error creating preview file: {}\", err),\n\n };\n\n\n\n // we unwrap the data access as it's in-memory so that won't fail\n\n match f.write_all(image.data().unwrap()) {\n\n Ok(_) => {}\n\n Err(err) => panic!(\"error writing to disk: {}\", err),\n\n };\n\n\n\n println!(\" (done)\");\n\n}\n", "file_path": "examples/capture_preview.rs", "rank": 64, "score": 11.079963216584558 }, { "content": " println!(\"port name = {:?}\", port.name());\n\n println!(\"port path = {:?}\", port.path());\n\n }\n\n\n\n let abilities = camera.abilities();\n\n\n\n println!(\"\\n[abilities]\");\n\n println!(\" device type = {:?}\", abilities.device_type());\n\n println!(\" model = {:?}\", abilities.model());\n\n println!(\" driver status = {:?}\", abilities.driver_status());\n\n println!(\" port types = {:?}\", abilities.port_types());\n\n println!(\" speeds = {:?}\", abilities.speeds());\n\n println!(\"camera operations = {:?}\", abilities.camera_operations());\n\n println!(\" file operations = {:?}\", abilities.file_operations());\n\n println!(\"folder operations = {:?}\", abilities.folder_operations());\n\n println!(\" USB vendor = {:?}\", abilities.usb_vendor());\n\n println!(\" USB product = {:?}\", abilities.usb_product());\n\n println!(\" USB class = {:?}\", abilities.usb_class());\n\n println!(\" USB subclass = {:?}\", abilities.usb_subclass());\n\n println!(\" USB protocol = {:?}\", abilities.usb_protocol());\n", "file_path": "examples/camera_info.rs", "rank": 65, "score": 11.040406525309361 }, { "content": "\t\t // The data already lives in memory so the size cast is fine\n\n\t\t Ok(slice::from_raw_parts(data.assume_init() as *const u8, size.assume_init() as usize))\n\n\t\t}\n\n },\n\n\t err => {\n\n Err(::error::from_libgphoto2(err))\n\n\t }\n\n\t}\n\n }\n\n\n\n // Free the components of the FileMedia\n\n //\n\n // This is mostly relevant for in-memory kinds where the memory gets freed\n\n pub fn clean(&mut self) -> ::Result<()> {\n\n try_unsafe! { ::gphoto2::gp_file_clean(self.file) };\n\n Ok(())\n\n }\n\n}\n\n\n\nimpl Media for FileMedia {\n", "file_path": "src/media.rs", "rank": 66, "score": 10.962662616706123 }, { "content": "\n\n println!(\"capturing image ...\");\n\n let capture = match camera.capture_image(&mut context) {\n\n Ok(c) => c,\n\n Err(err) => panic!(\"error capturing image: {}\", err)\n\n };\n\n println!(\" (done) {:?}\", capture.basename());\n\n\n\n // download file\n\n\n\n let mut file = match gphoto::FileMedia::create(Path::new(capture.basename().as_ref())) {\n\n Ok(f) => f,\n\n Err(err) => panic!(\"error saving file: {}\", err)\n\n };\n\n\n\n println!(\"downloading ...\");\n\n if let Err(err) = camera.download(&mut context, &capture, &mut file) {\n\n panic!(\"error downloading file: {}\", err);\n\n }\n\n println!(\" (done)\");\n\n}\n", "file_path": "examples/capture.rs", "rank": 67, "score": 10.717706519292841 }, { "content": "# gphoto\n\n\n\nThe `gphoto` crate provides a safe wrapper around the native `libgphoto2` library.\n\n\n\n## Dependencies\n\nIn order to use the `gphoto` crate, you must have a Unix system with the `libgphoto2` library\n\ninstalled where it can be found by `pkg-config`.\n\n\n\nOn Debian-based Linux distributions, install the `libgphoto2-dev` package:\n\n\n\n```\n\nsudo apt-get install libgphoto2-dev\n\n```\n\n\n\nOn OS X, install `libgphoto2` with Homebrew:\n\n\n\n```\n\nbrew install libgphoto2\n\n```\n\n\n\n## Usage\n\nAdd `gphoto` as a dependency in `Cargo.toml`:\n\n\n\n```toml\n\n[dependencies]\n\ngphoto = \"0.1.2\"\n\n```\n\n\n\nImport the `gphoto` crate. The starting point for nearly all `gphoto` functionality is to create a\n\ncontext object. You can then autodetect a camera using the `Camera::autodetect()` function:\n\n\n\n```rust\n\nextern crate gphoto;\n\n\n\nuse std::path::Path;\n\n\n\nfn main() {\n\n let mut context = gphoto::Context::new().unwrap();\n\n\n\n let mut camera = gphoto::Camera::autodetect(&mut context).unwrap();\n\n let capture = camera.capture_image(&mut context).unwrap();\n\n let mut file = gphoto::FileMedia::create(Path::new(&*capture.basename())).unwrap();\n\n\n\n camera.download(&mut context, &capture, &mut file).unwrap();\n\n}\n\n```\n\n\n\n### OS X Usage\n\nOS X opens cameras automatically when connected, which prevents other applications from opening the\n\ncamera device. When attempting to open a camera that is already opened by the operating system, you\n\nwill get an error message like the following:\n\n\n\n```\n\nCould not claim the USB device\n\n```\n\n\n\nTo fix this, you have to kill the `PTPCamera` process after connecting a camera to your system:\n\n\n\n```\n\nkillall PTPCamera\n\n```\n\n\n\nEach camera is opened with a separate instance of the `PTPCamera` application. If you have several\n\ncameras connected, you may want to kill individual `PTPCamera` processes instead of using `killall`.\n\n\n\n## License\n\nCopyright © 2015 David Cuddeback\n\n\n\nDistributed under the [MIT License](LICENSE).\n\n\n\n*Note:* By using this crate, your executable will link to the `libgphoto2` C library, which is\n\nlicensed under the [LGPL version 2.1](https://github.com/gphoto/libgphoto2/blob/master/COPYING).\n", "file_path": "README.md", "rank": 68, "score": 10.679693717393262 }, { "content": " }\n\n\n\n /// USB product ID.\n\n pub fn usb_product(&self) -> u16 {\n\n self.inner.usb_product as u16\n\n }\n\n\n\n /// USB device class.\n\n pub fn usb_class(&self) -> u8 {\n\n self.inner.usb_class as u8\n\n }\n\n\n\n /// USB device subclass.\n\n pub fn usb_subclass(&self) -> u8 {\n\n self.inner.usb_subclass as u8\n\n }\n\n\n\n /// USB device protocol.\n\n pub fn usb_protocol(&self) -> u8 {\n\n self.inner.usb_protocol as u8\n", "file_path": "src/abilities.rs", "rank": 69, "score": 10.164997408053615 }, { "content": "use std::borrow::Cow;\n\nuse std::ffi::CStr;\n\n\n\n/// Structure containing information about a camera's storage.\n\n///\n\n/// ## Example\n\n///\n\n/// A `Storage` object can be used to retrieve information about a camera's storage:\n\n///\n\n/// ```no_run\n\n/// let mut context = gphoto::Context::new().unwrap();\n\n/// let mut camera = gphoto::Camera::new().unwrap();\n\n/// camera.init(&mut context).unwrap();\n\n///\n\n/// for storage in camera.storage(&mut context).unwrap() {\n\n/// println!(\" base dir = {:?}\", storage.base_dir());\n\n/// println!(\" label = {:?}\", storage.label());\n\n/// println!(\" description = {:?}\", storage.description());\n\n/// println!(\" storage type = {:?}\", storage.storage_type());\n\n/// println!(\"filesystem type = {:?}\", storage.filesystem_type());\n", "file_path": "src/storage.rs", "rank": 70, "score": 9.080559355727711 }, { "content": " if self.inner.operations & ::gphoto2::GP_OPERATION_TRIGGER_CAPTURE != 0 {\n\n operations.insert(CameraOperation::TriggerCapture);\n\n }\n\n\n\n operations\n\n }\n\n\n\n /// Returns the file operations supported by the device.\n\n pub fn file_operations(&self) -> HashSet<FileOperation> {\n\n let mut operations = HashSet::<FileOperation>::new();\n\n\n\n if self.inner.file_operations & ::gphoto2::GP_FILE_OPERATION_DELETE != 0 {\n\n operations.insert(FileOperation::Delete);\n\n }\n\n\n\n if self.inner.file_operations & ::gphoto2::GP_FILE_OPERATION_PREVIEW != 0 {\n\n operations.insert(FileOperation::Preview);\n\n }\n\n\n\n if self.inner.file_operations & ::gphoto2::GP_FILE_OPERATION_RAW != 0 {\n", "file_path": "src/abilities.rs", "rank": 71, "score": 8.791219855626366 }, { "content": " operations.insert(FileOperation::Raw);\n\n }\n\n\n\n if self.inner.file_operations & ::gphoto2::GP_FILE_OPERATION_AUDIO != 0 {\n\n operations.insert(FileOperation::Audio);\n\n }\n\n\n\n if self.inner.file_operations & ::gphoto2::GP_FILE_OPERATION_EXIF != 0 {\n\n operations.insert(FileOperation::EXIF);\n\n }\n\n\n\n operations\n\n }\n\n\n\n /// Returns the folder operations supported by the device.\n\n pub fn folder_operations(&self) -> HashSet<FolderOperation> {\n\n let mut operations = HashSet::<FolderOperation>::new();\n\n\n\n if self.inner.folder_operations & ::gphoto2::GP_FOLDER_OPERATION_DELETE_ALL != 0 {\n\n operations.insert(FolderOperation::DeleteAll);\n", "file_path": "src/abilities.rs", "rank": 72, "score": 8.689675478554928 }, { "content": " /// camlibs.\n\n pub fn ltdl(&self) -> &str {\n\n self.ltdl\n\n }\n\n\n\n /// Returns a string describing whether `libgphoto2` was compiled with support for handling\n\n /// EXIF metadata.\n\n pub fn exif(&self) -> &str {\n\n self.exif\n\n }\n\n}\n\n\n\n/// Returns a structure with the version of the `libgphoto2` C library.\n", "file_path": "src/version.rs", "rank": 73, "score": 8.642149188174686 }, { "content": " };\n\n match io::stdout().write_all(image.data().unwrap()) {\n\n Ok(_) => {}\n\n Err(err) => {\n\n eprintln!(\"error writing to disk: {}\", err);\n\n break;\n\n }\n\n };\n\n }\n\n println!(\" (done) \");\n\n}\n", "file_path": "examples/capture_preview_movie.rs", "rank": 74, "score": 7.746738259035166 }, { "content": " }\n\n\n\n if self.inner.folder_operations & ::gphoto2::GP_FOLDER_OPERATION_PUT_FILE != 0 {\n\n operations.insert(FolderOperation::PutFile);\n\n }\n\n\n\n if self.inner.folder_operations & ::gphoto2::GP_FOLDER_OPERATION_MAKE_DIR != 0 {\n\n operations.insert(FolderOperation::MakeDirectory);\n\n }\n\n\n\n if self.inner.folder_operations & ::gphoto2::GP_FOLDER_OPERATION_REMOVE_DIR != 0 {\n\n operations.insert(FolderOperation::RemoveDirectory);\n\n }\n\n\n\n operations\n\n }\n\n\n\n /// USB vendor ID.\n\n pub fn usb_vendor(&self) -> u16 {\n\n self.inner.usb_vendor as u16\n", "file_path": "src/abilities.rs", "rank": 75, "score": 7.483574140828042 }, { "content": "/// An error type for working with gphoto2.\n\n#[derive(Debug)]\n\npub struct Error {\n\n err: c_int,\n\n}\n\n\n\nimpl Error {\n\n /// Returns the kind of error.\n\n pub fn kind(&self) -> ErrorKind {\n\n match self.err {\n\n ::gphoto2::GP_ERROR_BAD_PARAMETERS => ErrorKind::InvalidInput,\n\n ::gphoto2::GP_ERROR_NOT_SUPPORTED => ErrorKind::NotSupported,\n\n ::gphoto2::GP_ERROR_CORRUPTED_DATA => ErrorKind::CorruptedData,\n\n ::gphoto2::GP_ERROR_FILE_EXISTS => ErrorKind::FileExists,\n\n ::gphoto2::GP_ERROR_MODEL_NOT_FOUND => ErrorKind::ModelNotFound,\n\n ::gphoto2::GP_ERROR_DIRECTORY_NOT_FOUND => ErrorKind::DirectoryNotFound,\n\n ::gphoto2::GP_ERROR_FILE_NOT_FOUND => ErrorKind::FileNotFound,\n\n ::gphoto2::GP_ERROR_DIRECTORY_EXISTS => ErrorKind::DirectoryExists,\n\n ::gphoto2::GP_ERROR_CAMERA_BUSY => ErrorKind::CameraBusy,\n\n ::gphoto2::GP_ERROR_PATH_NOT_ABSOLUTE => ErrorKind::PathNotAbsolute,\n", "file_path": "src/error.rs", "rank": 76, "score": 6.962201006576281 }, { "content": "use std::fs::{File, OpenOptions};\n\nuse std::{mem, slice};\n\nuse std::path::Path;\n\nuse std::convert::{TryFrom};\n\n\n\nuse std::os::unix::prelude::*;\n\n\n\n\n\n/// A trait for types that can store media.\n", "file_path": "src/media.rs", "rank": 77, "score": 6.718592795461136 }, { "content": "extern crate gphoto;\n\n\n\nuse std::path::Path;\n\n\n", "file_path": "examples/capture.rs", "rank": 78, "score": 6.585602257800094 }, { "content": "/// println!(\" driver status = {:?}\", abilities.driver_status());\n\n/// println!(\" port types = {:?}\", abilities.port_types());\n\n/// println!(\" speeds = {:?}\", abilities.speeds());\n\n/// println!(\"camera operations = {:?}\", abilities.camera_operations());\n\n/// println!(\" file operations = {:?}\", abilities.file_operations());\n\n/// println!(\"folder operations = {:?}\", abilities.folder_operations());\n\n/// println!(\" USB vendor = {:?}\", abilities.usb_vendor());\n\n/// println!(\" USB product = {:?}\", abilities.usb_product());\n\n/// println!(\" USB class = {:?}\", abilities.usb_class());\n\n/// println!(\" USB subclass = {:?}\", abilities.usb_subclass());\n\n/// println!(\" USB protocol = {:?}\", abilities.usb_protocol());\n\n/// ```\n\n///\n\n/// The above example may print something like the following:\n\n///\n\n/// ```text\n\n/// device type = Camera\n\n/// model = \"Nikon DSC D750\"\n\n/// driver status = Production\n\n/// port types = {USB}\n", "file_path": "src/abilities.rs", "rank": 79, "score": 6.325728556838444 }, { "content": " println!(\"\\n[summary]\\n{}\", s);\n\n }\n\n\n\n if let Ok(s) = camera.manual(&mut context) {\n\n println!(\"\\n[manual]\\n{}\", s);\n\n }\n\n\n\n if let Ok(s) = camera.about_driver(&mut context) {\n\n println!(\"\\n[driver]\\n{}\", s);\n\n }\n\n}\n", "file_path": "examples/camera_info.rs", "rank": 80, "score": 6.19828990104612 }, { "content": "pub mod prelude {\n\n pub use super::{Handle,HandleMut};\n\n}\n\n\n\n/// Types that manages access to a resource.\n\n///\n\n/// This trait is intended to be used by structs that manage the ownership of a resource from a C\n\n/// library. Often, a pointer to the resource is needed to implement methods in other modules that\n\n/// wrap the same C library, but the pointer should not be accessible from outside of the crate.\n\n///\n\n/// The `Handle` trait allows a struct to hand out access to an owned resource across module\n\n/// boundaries. Because the `Handle` trait is not exported from the crate, the owned resources will\n\n/// not be accessible from outside the crate.\n", "file_path": "src/handle.rs", "rank": 81, "score": 6.1780586338403936 }, { "content": "\n\n match camera.storage(&mut context) {\n\n Ok(storage) => {\n\n for s in storage {\n\n println!(\"\\n[storage]\");\n\n println!(\" base dir = {:?}\", s.base_dir());\n\n println!(\" label = {:?}\", s.label());\n\n println!(\" description = {:?}\", s.description());\n\n println!(\" storage type = {:?}\", s.storage_type());\n\n println!(\"filesystem type = {:?}\", s.filesystem_type());\n\n println!(\" access type = {:?}\", s.access_type());\n\n println!(\" capacity kb = {:?}\", s.capacity_kbytes());\n\n println!(\" free kb = {:?}\", s.free_kbytes());\n\n println!(\" free images = {:?}\", s.free_images());\n\n }\n\n },\n\n Err(err) => println!(\"\\ncould not retrieve storage information: {}\", err)\n\n }\n\n\n\n if let Ok(s) = camera.summary(&mut context) {\n", "file_path": "examples/camera_info.rs", "rank": 82, "score": 6.13669952664277 }, { "content": " if self.inner.fields & ::gphoto2::GP_STORAGEINFO_FILESYSTEMTYPE != 0 {\n\n Some(match self.inner.fstype {\n\n ::gphoto2::GP_STORAGEINFO_FST_GENERICFLAT => FilesystemType::Flat,\n\n ::gphoto2::GP_STORAGEINFO_FST_GENERICHIERARCHICAL => FilesystemType::Hierarchical,\n\n ::gphoto2::GP_STORAGEINFO_FST_DCF => FilesystemType::DCF,\n\n ::gphoto2::GP_STORAGEINFO_FST_UNDEFINED => FilesystemType::Unknown,\n\n })\n\n }\n\n else {\n\n None\n\n }\n\n }\n\n\n\n /// The storage's access permissions.\n\n pub fn access_type(&self) -> Option<AccessType> {\n\n if self.inner.fields & ::gphoto2::GP_STORAGEINFO_ACCESS != 0 {\n\n Some(match self.inner.access {\n\n ::gphoto2::GP_STORAGEINFO_AC_READWRITE => AccessType::ReadWrite,\n\n ::gphoto2::GP_STORAGEINFO_AC_READONLY => AccessType::ReadOnly,\n\n ::gphoto2::GP_STORAGEINFO_AC_READONLY_WITH_DELETE => AccessType::ReadDelete,\n", "file_path": "src/storage.rs", "rank": 83, "score": 5.993474933843508 }, { "content": " }\n\n}\n\n\n\nimpl StdError for Error {\n\n fn description(&self) -> &str {\n\n self.message()\n\n }\n\n}\n\n\n\n\n\n#[doc(hidden)]\n", "file_path": "src/error.rs", "rank": 84, "score": 5.943581549672665 }, { "content": " })\n\n }\n\n else {\n\n None\n\n }\n\n }\n\n\n\n /// The storage's total capacity in kilobytes.\n\n pub fn capacity_kbytes(&self) -> Option<u64> {\n\n if self.inner.fields & ::gphoto2::GP_STORAGEINFO_MAXCAPACITY != 0 {\n\n Some(self.inner.capacitykbytes)\n\n }\n\n else {\n\n None\n\n }\n\n }\n\n\n\n /// The storage's free space in kilobytes.\n\n pub fn free_kbytes(&self) -> Option<u64> {\n\n if self.inner.fields & ::gphoto2::GP_STORAGEINFO_FREESPACEKBYTES != 0 {\n", "file_path": "src/storage.rs", "rank": 85, "score": 5.628160987548104 }, { "content": "use std::ffi::CStr;\n\nuse std::slice;\n\nuse std::str;\n\n\n\n/// A structure that describes the version of the `libgphoto2` library.\n\n///\n\n/// The structure describes not only the version number, but also the state of several compile-time\n\n/// configuration options of the `libgphoto2` library.\n\n///\n\n/// ## Example\n\n///\n\n/// ```no_run\n\n/// let version = gphoto::libgphoto2_version();\n\n/// println!(\"libgphoto2 {} {} {} {} {}\",\n\n/// version.version(),\n\n/// version.camlibs(),\n\n/// version.compiler(),\n\n/// version.ltdl(),\n\n/// version.exif());\n\n/// ```\n", "file_path": "src/version.rs", "rank": 86, "score": 5.572065476629737 }, { "content": " }\n\n\n\n /// The storage's hardware type.\n\n pub fn storage_type(&self) -> Option<StorageType> {\n\n if self.inner.fields & ::gphoto2::GP_STORAGEINFO_STORAGETYPE != 0 {\n\n Some(match self.inner.storage_type {\n\n ::gphoto2::GP_STORAGEINFO_ST_FIXED_ROM => StorageType::FixedRom,\n\n ::gphoto2::GP_STORAGEINFO_ST_REMOVABLE_ROM => StorageType::RemovableRom,\n\n ::gphoto2::GP_STORAGEINFO_ST_FIXED_RAM => StorageType::FixedRam,\n\n ::gphoto2::GP_STORAGEINFO_ST_REMOVABLE_RAM => StorageType::RemoveableRam,\n\n ::gphoto2::GP_STORAGEINFO_ST_UNKNOWN => StorageType::Unknown,\n\n })\n\n }\n\n else {\n\n None\n\n }\n\n }\n\n\n\n /// The hiearchy type of the storage's filesystem.\n\n pub fn filesystem_type(&self) -> Option<FilesystemType> {\n", "file_path": "src/storage.rs", "rank": 87, "score": 5.479154027039651 }, { "content": " Some(self.inner.freekbytes)\n\n }\n\n else {\n\n None\n\n }\n\n }\n\n\n\n /// An estimate of the number of images that could fit in the storage's remaining space.\n\n ///\n\n /// This value is estimated by the camera.\n\n pub fn free_images(&self) -> Option<u64> {\n\n if self.inner.fields & ::gphoto2::GP_STORAGEINFO_FREESPACEIMAGES != 0 {\n\n Some(self.inner.freeimages)\n\n }\n\n else {\n\n None\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/storage.rs", "rank": 88, "score": 5.32772203008268 }, { "content": "extern crate gphoto;\n\n\n\nuse std::fs::File;\n\nuse std::io::prelude::*;\n\n\n", "file_path": "examples/capture_preview.rs", "rank": 89, "score": 3.39911002638223 }, { "content": " if self.inner.operations & ::gphoto2::GP_OPERATION_CONFIG != 0 {\n\n operations.insert(CameraOperation::Config);\n\n }\n\n\n\n if self.inner.operations & ::gphoto2::GP_OPERATION_CAPTURE_IMAGE != 0 {\n\n operations.insert(CameraOperation::CaptureImage);\n\n }\n\n\n\n if self.inner.operations & ::gphoto2::GP_OPERATION_CAPTURE_VIDEO != 0 {\n\n operations.insert(CameraOperation::CaptureVideo);\n\n }\n\n\n\n if self.inner.operations & ::gphoto2::GP_OPERATION_CAPTURE_AUDIO != 0 {\n\n operations.insert(CameraOperation::CaptureAudio);\n\n }\n\n\n\n if self.inner.operations & ::gphoto2::GP_OPERATION_CAPTURE_PREVIEW != 0 {\n\n operations.insert(CameraOperation::CapturePreview);\n\n }\n\n\n", "file_path": "src/abilities.rs", "rank": 90, "score": 3.2502473221922497 }, { "content": "\n\n/// Types of filesystem hierarchies.\n\n#[derive(Debug)]\n\npub enum FilesystemType {\n\n /// All files stored in one directory.\n\n Flat,\n\n\n\n /// Files are stored in a generic tree-like hierarchy.\n\n Hierarchical,\n\n\n\n /// Files are stored in a DCF-compatible hierarchy.\n\n ///\n\n /// Design rule for Camera File system (DCF) is a standard that defines a directory structure\n\n /// (among other things) for use on digital cameras. A filesystem that follows the DCF standard\n\n /// will store its media in a `DCIM` directory.\n\n DCF,\n\n\n\n /// Filesystem hierarchy is unknown.\n\n Unknown,\n\n}\n", "file_path": "src/storage.rs", "rank": 91, "score": 3.183835740578377 }, { "content": "\n\n /// Path is not absolute.\n\n PathNotAbsolute,\n\n\n\n /// Operation was canceled.\n\n Cancel,\n\n\n\n /// An error was reported by the camera.\n\n CameraError,\n\n\n\n /// An error was reported by the operating system.\n\n OSFailure,\n\n\n\n /// Not enough space when uploading a file.\n\n NoSpace,\n\n\n\n /// An unspecified error occured.\n\n Other,\n\n}\n\n\n", "file_path": "src/error.rs", "rank": 92, "score": 3.078034900070863 }, { "content": "\n\n /// Raw file data can be retrieved.\n\n Raw,\n\n\n\n /// Audio data can be retrieved.\n\n Audio,\n\n\n\n /// EXIF data can be retrieved.\n\n EXIF,\n\n}\n\n\n\n/// Operations that can be performed on folders on a device's storage.\n\n#[derive(Debug,PartialEq,Eq,Clone,Copy,Hash)]\n\npub enum FolderOperation {\n\n /// Deleting all files on the device is supported.\n\n DeleteAll,\n\n\n\n /// Uploading files to the device is supported.\n\n PutFile,\n\n\n\n /// Making new directories on the device is supported.\n\n MakeDirectory,\n\n\n\n /// Removing directories from the device is supported.\n\n RemoveDirectory,\n\n}\n\n\n\n\n\n#[doc(hidden)]\n", "file_path": "src/abilities.rs", "rank": 93, "score": 3.052926566552233 }, { "content": "extern crate gphoto;\n\n\n\nuse std::io::{self, prelude::*};\n\n\n", "file_path": "examples/capture_preview_movie.rs", "rank": 94, "score": 2.9704706043092504 }, { "content": " }\n\n}\n\n\n\n/// Types of devices.\n\n#[derive(Debug,PartialEq,Eq,Clone,Copy,Hash)]\n\npub enum DeviceType {\n\n /// Still camera.\n\n Camera,\n\n\n\n /// Audio player.\n\n Audio,\n\n}\n\n\n\n/// Stability of camera driver.\n\n#[derive(Debug,PartialEq,Eq,Clone,Copy,Hash)]\n\npub enum DriverStatus {\n\n /// Driver is production ready.\n\n Production,\n\n\n\n /// Driver is beta quality.\n", "file_path": "src/abilities.rs", "rank": 95, "score": 2.303544920072082 }, { "content": "\n\n/// Types of access permissions.\n\n#[derive(Debug)]\n\npub enum AccessType {\n\n /// Read and write operations are allowed.\n\n ReadWrite,\n\n\n\n /// Read and delete operations are allowed.\n\n ReadDelete,\n\n\n\n /// Only read operations are allowed.\n\n ReadOnly,\n\n}\n", "file_path": "src/storage.rs", "rank": 96, "score": 2.003001941952216 }, { "content": "/// Types of storage hardware.\n\n#[derive(Debug)]\n\npub enum StorageType {\n\n /// A fixed ROM storage.\n\n FixedRom,\n\n\n\n /// A removable ROM storage.\n\n RemovableRom,\n\n\n\n /// A fixed RAM storage.\n\n FixedRam,\n\n\n\n /// A removable RAM storage.\n\n ///\n\n /// This includes any kind of removable cards (SD card, CompactFlash, etc).\n\n RemoveableRam,\n\n\n\n /// Unknown storage type.\n\n Unknown,\n\n}\n", "file_path": "src/storage.rs", "rank": 97, "score": 1.6746009381170586 }, { "content": " Testing,\n\n\n\n /// Driver is alpha quality and might not even work.\n\n Experimental,\n\n\n\n /// Driver is no longer recommended and will be removed.\n\n Deprecated,\n\n}\n\n\n\n/// Operations that can be performed on a device.\n\n#[derive(Debug,PartialEq,Eq,Clone,Copy,Hash)]\n\npub enum CameraOperation {\n\n /// Camera can be configured.\n\n Config,\n\n\n\n /// Camera can capture images.\n\n CaptureImage,\n\n\n\n /// Camera can capture video.\n\n CaptureVideo,\n", "file_path": "src/abilities.rs", "rank": 98, "score": 1.5929832414744807 }, { "content": "\n\n /// Camera can capture audio.\n\n CaptureAudio,\n\n\n\n /// Camera can capture image previews.\n\n CapturePreview,\n\n\n\n /// Camera can trigger capture and wait for events.\n\n TriggerCapture,\n\n}\n\n\n\n\n\n/// Operations that can be performed on files on a device's storage.\n\n#[derive(Debug,PartialEq,Eq,Clone,Copy,Hash)]\n\npub enum FileOperation {\n\n /// Files can be deleted.\n\n Delete,\n\n\n\n /// Viewfinder content can be previewed.\n\n Preview,\n", "file_path": "src/abilities.rs", "rank": 99, "score": 1.5625212062735159 } ]
Rust
src/cai/uciv.rs
provotum/rust-crypto
3f284c3e6f3e3412a44d3cea11cd6487862c0d9f
use std::vec::Vec; use num::pow::Pow; use num::Zero; use std::ops::{Mul, Div, Sub, Add, Neg}; use ::arithmetic::mod_int::From; use ::arithmetic::mod_int::RandModInt; use ::arithmetic::mod_int::ModInt; use ::el_gamal::encryption::PublicKey; use ::el_gamal::ciphertext::CipherText; use ::el_gamal::serializer::Serializer; #[derive(Clone, Serialize, Deserialize)] pub struct PreImageSet { pub pre_images: Vec<ModInt> } #[derive(Clone, Serialize, Deserialize)] pub struct ImageSet { pub images: Vec<ModInt> } impl ImageSet { pub fn new(generator: ModInt, pre_image_set: PreImageSet) -> Self { let mut vec = vec![]; for pre_image in pre_image_set.pre_images.iter() { vec.push(generator.clone().pow(pre_image.clone())); } ImageSet { images: vec } } } #[derive(Eq, PartialEq, Debug, Clone, Hash, Serialize, Deserialize)] pub struct CaiProof { s1_options: Vec<ModInt>, s2_options: Vec<ModInt>, h1_options: Vec<ModInt>, h2_options: Vec<ModInt>, h: ModInt } impl CaiProof { pub fn new(public_key: PublicKey, cipher_text: CipherText, pre_image_set: PreImageSet, image_set: ImageSet, chosen_vote_idx: usize, voting_options: Vec<ModInt>) -> Self { assert_eq!(pre_image_set.pre_images.len(), image_set.images.len(), "The amount of pre-images and images must be equal"); assert_eq!(pre_image_set.pre_images.len(), voting_options.len(), "The amount of pre-images must be equal to the amount of voting options"); assert!(chosen_vote_idx < pre_image_set.pre_images.len(), "The chosen vote index must refer to a voting option for which a pre-image exists"); let mut s1_options: Vec<ModInt> = vec![ModInt::zero(); pre_image_set.pre_images.len()]; let mut s2_options: Vec<ModInt> = vec![ModInt::zero(); pre_image_set.pre_images.len()]; let mut h1_options: Vec<ModInt> = vec![ModInt::zero(); pre_image_set.pre_images.len()]; let mut h2_options: Vec<ModInt> = vec![ModInt::zero(); pre_image_set.pre_images.len()]; let mut a_options: Vec<ModInt> = vec![ModInt::zero(); pre_image_set.pre_images.len()]; let mut b_options: Vec<ModInt> = vec![ModInt::zero(); pre_image_set.pre_images.len()]; let j = chosen_vote_idx; let c1 = cipher_text.big_g; let c2 = cipher_text.big_h; let mut string_to_hash = String::new(); string_to_hash += &c1.to_string(); string_to_hash += &c2.to_string(); for i in 0..pre_image_set.pre_images.len() { if i != j { let s1_i = ModInt::gen_modint(public_key.q.clone()); let h1_i = ModInt::gen_modint(public_key.q.clone()); s1_options[i] = s1_i.clone(); h1_options[i] = h1_i.clone(); let c1_i = public_key.g.clone().pow(s1_i.clone()).mul(c1.clone().pow(h1_i.clone().neg())); let c2_i = public_key.h.clone().pow(s1_i.clone()).mul((c2.clone().div(public_key.g.clone().pow(voting_options[i].clone()))).pow(h1_i.clone().neg())); let a_i = ModInt::gen_modint(public_key.q.clone()); a_options[i] = a_i.clone(); let r_i = public_key.g.clone().pow(a_i); string_to_hash += &c1_i.to_string(); string_to_hash += &c2_i.to_string(); string_to_hash += &r_i.to_string(); } else { let s2_j = ModInt::gen_modint(public_key.q.clone()); let h2_j = ModInt::gen_modint(public_key.q.clone()); s2_options[j] = s2_j.clone(); h2_options[j] = h2_j.clone(); let b_j = ModInt::gen_modint(public_key.q.clone()); b_options[j] = b_j.clone(); let c1_j = public_key.g.clone().pow(b_j.clone()); let c2_j = public_key.h.clone().pow(b_j.clone()); let r_j = public_key.g.clone().pow(s2_j).mul(image_set.images[j].clone().pow(h2_j.clone().neg())); string_to_hash += &c1_j.to_string(); string_to_hash += &c2_j.to_string(); string_to_hash += &r_j.to_string(); } } let h_hash = Serializer::string_to_sha512(string_to_hash); let h = ModInt::from_hex_string(h_hash, public_key.q.value.clone()); for i in 0..pre_image_set.pre_images.len() { if i != j { let h2_i = h.clone().sub(h1_options[i].clone()); h2_options[i] = h2_i.clone(); let s2_i = a_options[i].clone().add(pre_image_set.pre_images[i].clone().mul(h2_i.clone())); s2_options[i] = s2_i; } else { let h1_j = h.clone().sub(h2_options[j].clone()); h1_options[j] = h1_j.clone(); let s1_j = b_options[j].clone().add(cipher_text.random.clone().mul(h1_j.clone())); s1_options[j] = s1_j.clone(); } } CaiProof { s1_options, s2_options, h1_options, h2_options, h } } pub fn verify(&self, public_key: PublicKey, cipher_text: CipherText, image_set: ImageSet, voting_options: Vec<ModInt>) -> bool { let c1 = cipher_text.big_g; let c2 = cipher_text.big_h; let mut string_to_hash = String::new(); string_to_hash += &c1.to_string(); string_to_hash += &c2.to_string(); for i in 0..self.s1_options.len() { let c1_i = public_key.g.clone().pow(self.s1_options[i].clone()).mul(c1.clone().pow(self.h1_options[i].clone().neg())); let c2_i = public_key.h.clone().pow(self.s1_options[i].clone()).mul((c2.clone().div(public_key.g.clone().pow(voting_options[i].clone()))).pow(self.h1_options[i].clone().neg())); let r_i = public_key.g.clone().pow(self.s2_options[i].clone()).mul(image_set.images[i].clone().pow(self.h2_options[i].clone().neg())); string_to_hash += &c1_i.to_string(); string_to_hash += &c2_i.to_string(); string_to_hash += &r_i.to_string(); } let h_hash = Serializer::string_to_sha512(string_to_hash); let h = ModInt::from_hex_string(h_hash, public_key.q.value.clone()); self.h == h } } #[cfg(test)] mod uciv_proof_test { use ::el_gamal::encryption::PublicKey; use ::el_gamal::encryption::{encrypt}; use ::el_gamal::ciphertext::CipherText; use ::arithmetic::mod_int::ModInt; use arithmetic::mod_int::From; use ::num::bigint::BigInt; use ::num::Zero; use ::num::One; use std::vec::Vec; use std::clone::Clone; use ::cai::uciv::{CaiProof, ImageSet, PreImageSet}; #[test] pub fn test_valid_proof() { let pub_key: PublicKey = PublicKey { p: ModInt::from_value_modulus(BigInt::from(5), BigInt::zero()), q: ModInt::from_value_modulus(BigInt::from(2), BigInt::zero()), h: ModInt::from_value_modulus(BigInt::from(32), BigInt::from(5)), g: ModInt::from_value_modulus(BigInt::from(2), BigInt::from(5)) }; let mut voting_options = Vec::new(); voting_options.push(ModInt::zero()); voting_options.push(ModInt::one()); let message: ModInt = ModInt { value: BigInt::one(), modulus: BigInt::from(5) }; let cipher_text = encrypt(&pub_key, message.clone()); let chosen_vote_idx = 1; let pre_image_set = PreImageSet { pre_images: vec![ ModInt::from_value_modulus(BigInt::from(1), BigInt::from(5)), ModInt::from_value_modulus(BigInt::from(0), BigInt::from(5)) ] }; let image_set = ImageSet::new(pub_key.g.clone(), pre_image_set.clone()); let proof = CaiProof::new( pub_key.clone(), cipher_text.clone(), pre_image_set.clone(), image_set.clone(), chosen_vote_idx, voting_options.clone() ); let is_proven = proof.verify( pub_key.clone(), cipher_text.clone(), image_set.clone(), voting_options.clone() ); assert!(is_proven); } #[test] pub fn test_invalid_proof() { let pub_key: PublicKey = PublicKey { p: ModInt::from_value_modulus(BigInt::from(5), BigInt::zero()), q: ModInt::from_value_modulus(BigInt::from(2), BigInt::zero()), h: ModInt::from_value_modulus(BigInt::from(32), BigInt::from(5)), g: ModInt::from_value_modulus(BigInt::from(2), BigInt::from(5)) }; let mut voting_options = Vec::new(); voting_options.push(ModInt::zero()); voting_options.push(ModInt::one()); let message: ModInt = ModInt { value: BigInt::one(), modulus: BigInt::from(5) }; let cipher_text = encrypt(&pub_key, message.clone()); let chosen_vote_idx = 1; let pre_image_set = PreImageSet { pre_images: vec![ ModInt::from_value_modulus(BigInt::from(1), BigInt::from(5)), ModInt::from_value_modulus(BigInt::from(0), BigInt::from(5)) ] }; let image_set = ImageSet::new(pub_key.g.clone(), pre_image_set.clone()); let proof = CaiProof::new( pub_key.clone(), cipher_text.clone(), pre_image_set.clone(), image_set.clone(), chosen_vote_idx, voting_options.clone() ); let fake_cipher_text = CipherText { big_g: ModInt::from_value_modulus(BigInt::from(1), BigInt::from(0)), big_h: ModInt::from_value_modulus(BigInt::from(2), BigInt::from(0)), random: ModInt::from_value_modulus(BigInt::from(3), BigInt::from(0)) }; let is_proven = proof.verify( pub_key.clone(), fake_cipher_text.clone(), image_set.clone(), voting_options.clone() ); assert!(!is_proven); } }
use std::vec::Vec; use num::pow::Pow; use num::Zero; use std::ops::{Mul, Div, Sub, Add, Neg}; use ::arithmetic::mod_int::From; use ::arithmetic::mod_int::RandModInt; use ::arithmetic::mod_int::ModInt; use ::el_gamal::encryption::PublicKey; use ::el_gamal::ciphertext::CipherText; use ::el_gamal::serializer::Serializer; #[derive(Clone, Serialize, Deserialize)] pub struct PreImageSet { pub pre_images: Vec<ModInt> } #[derive(Clone, Serialize, Deserialize)] pub struct ImageSet { pub images: Vec<ModInt> } impl ImageSet { pub fn new(generator: ModInt, pre_image_set: PreImageSet) -> Self { let mut vec = vec![]; for pre_image in pre_image_set.pre_images.iter() { vec.push(generator.clone().pow(pre_image.clone())); } ImageSet { images: vec } } } #[derive(Eq, PartialEq, Debug, Clone, Hash, Serialize, Deserialize)] pub struct CaiProof { s1_options: Vec<ModInt>, s2_options: Vec<ModInt>, h1_options: Vec<ModInt>, h2_options: Vec<ModInt>, h: ModInt } impl CaiProof { pub fn new(public_key: PublicKey, cipher_text: CipherText, pre_image_set: PreImageSet, image_set: ImageSet, chosen_vote_idx: usize, voting_options: Vec<ModInt>) -> Self { assert_eq!(pre_image_set.pre_images.len(), image_set.images.len(), "The amount of pre-images and images must be equal"); assert_eq!(pre_image_set.pre_images.len(), voting_options.len(), "The amount of pre-images must be equal to the amount of voting options"); assert!(chosen_vote_idx < pre_image_set.pre_images.len(), "The chosen vote index must refer to a voting option for which a pre-image exists"); let mut s1_options: Vec<ModInt> = vec![ModInt::zero(); pre_image_set.pre_images.len()]; let mut s2_options: Vec<ModInt> = vec![ModInt::zero(); pre_image_set.pre_images.len()]; let mut h1_options: Vec<ModInt> = vec![ModInt::zero(); pre_image_set.pre_images.len()]; let mut h2_options: Vec<ModInt> = vec![ModInt::zero(); pre_image_set.pre_images.len()]; let mut a_options: Vec<ModInt> = vec![ModInt::zero(); pre_image_set.pre_images.len()]; let mut b_options: Vec<ModInt> = vec![ModInt::zero(); pre_image_set.pre_images.len()]; let j = chosen_vote_idx; let c1 = cipher_text.big_g; let c2 = cipher_text.big_h; let mut string_to_hash = String::new(); string_to_hash += &c1.to_string(); string_to_hash += &c2.to_string(); for i in 0..pre_image_set.pre_images.len() { if i != j { let s1_i = ModInt::gen_modint(public_key.q.clone()); let h1_i = ModInt::gen_modint(public_key.q.clone()); s1_options[i] = s1_i.clone(); h1_options[i] = h1_i.clone(); let c1_i = public_key.g.clone().pow(s1_i.clone()).mul(c1.clone().pow(h1_i.clone().neg())); let c2_i = public_key.h.clone().pow(s1_i.clone()).mul((c2.clone().div(public_key.g.clone().pow(voting_options[i].clone()))).pow(h1_i.clone().neg())); let a_i = ModInt::gen_modint(public_key.q.clone()); a_options[i] = a_i.clone(); let r_i = public_key.g.clone().pow(a_i); string_to_hash += &c1_i.to_string(); string_to_hash += &c2_i.to_string(); string_to_hash += &r_i.to_string(); } else { let s2_j = ModInt::gen_modint(public_key.q.clone()); let h2_j = ModInt::gen_modint(public_key.q.clone()); s2_options[j] = s2_j.clone(); h2_options[j] = h2_j.clone(); let b_j = ModInt::gen_modint(public_key.q.clone()); b_options[j] = b_j.clone(); let c1_j = public_key.g.clone().pow(b_j.clone()); let c2_j = public_key.h.clone().pow(b_j.clone()); let r_j = public_key.g.clone().pow(s2_j).mul(image_set.images[j].clone().pow(h2_j.clone().neg())); string_to_hash += &c1_j.to_string(); string_to_hash += &c2_j.to_string(); string_to_hash += &r_j.to_string(); } } let h_hash = Serializer::string_to_sha512(string_to_hash); let h = ModInt::from_hex_string(h_hash, public_key.q.value.clone()); for i in 0..pre_image_set.pre_images.len() { if i != j { let h2_i = h.clone().sub(h1_options[i].clone()); h2_options[i] = h2_i.clone(); let s2_i = a_options[i].clone().add(pre_image_set.pre_images[i].clone().mul(h2_i.clone())); s2_options[i] = s2_i; } else { let h1_j = h.clone().sub(h2_options[j].clone()); h1_options[j] = h1_j.clone(); let s1_j = b_options[j].clone().add(cipher_text.random.clone().mul(h1_j.clone())); s1_options[j] = s1_j.clone(); } } CaiProof { s1_options, s2_options, h1_options, h2_options, h } } pub fn verify(&self, public_key: PublicKey, cipher_text: CipherText, image_set: ImageSet, voting_options: Vec<ModInt>) -> bool { let c1 = cipher_text.big_g; let c2 = cipher_text.big_h; let mut string_to_hash = String::new(); string_to_hash += &c1.to_string(); string_to_hash += &c2.to_string(); for i in 0..self.s1_options.len() { let c1_i = public_key.g.clone().pow(self.s1_options[i].clone()).mul(c1.clone().pow(self.h1_options[i].clone().neg())); let c2_i = public_key.h.clone().pow(self.s1_options[i].clone()).mul((c2.clone().div(public_key.g.clone().pow(voting_options[i].clone()))).pow(self.h1_options[i].clone().neg())); let r_i = public_key.g.clone().pow(self.s2_options[i].clone()).mul(image_set.images[i].clone().pow(self.h2_options[i].clone().neg())); string_to_hash += &c1_i.to_string(); string_to_hash += &c2_i.to_string(); string_to_hash += &r_i.to_string(); } let h_hash = Serializer::string_to_sha512(string_to_hash); let h = ModInt::from_hex_string(h_hash, public_key.q.value.clone()); self.h == h } } #[cfg(test)] mod uciv_proof_test { use ::el_gamal::encryption::PublicKey; use ::el_gamal::encryption::{encrypt}; use ::el_gamal::ciphertext::CipherText; use ::arithmetic::mod_int::ModInt; use arithmetic::mod_int::From; use ::num::bigint::BigInt; use ::num::Zero; use ::num::One; use std::vec::Vec; use std::clone::Clone; use ::cai::uciv::{CaiProof, ImageSet, PreImageSet}; #[test] pub fn test_valid_proof() { let pub_key: PublicKey = PublicKey { p: ModInt::from_value_modulus(BigInt::from(5), BigInt::zero()), q: ModInt::from_value_modulus(BigInt::from(2), BigInt::zero()), h: ModInt::from_value_modulus(BigInt::from(32), BigInt::from(5)), g: ModInt::from_value_modulus(BigInt::from(2), BigInt::from(5)) }; let mut voting_options = Vec::new(); voting_options.push(ModInt::zero()); voting_options.push(ModInt::one()); let message: ModInt = ModInt { value: BigInt::one(), modulus: BigInt::from(5) }; let cipher_text = encrypt(&pub_key, message.clone()); let chosen_vote_idx = 1; let pre_image_set = PreImageSet { pre_images: vec![ ModInt::from_value_modulus(BigInt::from(1), BigInt::from(5)), ModInt::from_value_modulus(BigInt::from(0), BigInt::from(5)) ] }; let image_set = ImageSet::new(pub_key.g.clone(), pre_image_set.clone()); let proof = CaiProof::new( pub_key.clone(), cipher_text.clone(), pre_image_set.clone(), image_set.clone(), chosen_vote_idx, voting_options.clone() ); let is_proven = proof.verify( pub_key.clone(), cipher_text.clone(), image_set.clone(), voting_options.clone() ); assert!(is_proven); } #[test] pub fn test_invalid_proof() { let pub_key: PublicKey = PublicKey { p: ModInt::from_value_modulus(BigInt::from(5), BigInt::zero()), q: ModInt::from_value_modulus(BigInt::from(2), BigInt::zero()), h: ModInt::from_value_modulus(BigInt::from(32), BigInt::from(5)), g: ModInt::from_value_modulus(BigInt::from(2), BigInt::from(5)) }; let m
}
ut voting_options = Vec::new(); voting_options.push(ModInt::zero()); voting_options.push(ModInt::one()); let message: ModInt = ModInt { value: BigInt::one(), modulus: BigInt::from(5) }; let cipher_text = encrypt(&pub_key, message.clone()); let chosen_vote_idx = 1; let pre_image_set = PreImageSet { pre_images: vec![ ModInt::from_value_modulus(BigInt::from(1), BigInt::from(5)), ModInt::from_value_modulus(BigInt::from(0), BigInt::from(5)) ] }; let image_set = ImageSet::new(pub_key.g.clone(), pre_image_set.clone()); let proof = CaiProof::new( pub_key.clone(), cipher_text.clone(), pre_image_set.clone(), image_set.clone(), chosen_vote_idx, voting_options.clone() ); let fake_cipher_text = CipherText { big_g: ModInt::from_value_modulus(BigInt::from(1), BigInt::from(0)), big_h: ModInt::from_value_modulus(BigInt::from(2), BigInt::from(0)), random: ModInt::from_value_modulus(BigInt::from(3), BigInt::from(0)) }; let is_proven = proof.verify( pub_key.clone(), fake_cipher_text.clone(), image_set.clone(), voting_options.clone() ); assert!(!is_proven); }
function_block-function_prefixed
[ { "content": "pub fn encrypt(public_key: &PublicKey, message: ModInt) -> CipherText {\n\n let random: ModInt = ModInt::gen_modint(public_key.q.clone());\n\n\n\n let g = public_key.g.clone();\n\n let h = public_key.h.clone();\n\n\n\n let big_g = g.clone().pow(random.clone());\n\n let big_h1= h.clone().pow(random.clone());\n\n let big_h2 = g.clone().pow(message.clone());\n\n\n\n let big_h = big_h1 * big_h2;\n\n\n\n CipherText {\n\n big_g,\n\n big_h,\n\n random,\n\n }\n\n}\n\n\n", "file_path": "src/el_gamal/encryption.rs", "rank": 0, "score": 142097.54209327992 }, { "content": "pub fn decrypt(private_key: PrivateKey, cipher_text: CipherText) -> ModInt {\n\n\n\n let h: &ModInt = &cipher_text.big_h;\n\n let g: &ModInt = &cipher_text.big_g;\n\n let x: &ModInt = &private_key.x;\n\n\n\n let g_to_m: ModInt = h.clone() / (g.clone().pow(x.clone()));\n\n\n\n let mut i: ModInt = ModInt::zero();\n\n // find cleartext value so that it matches target\n\n loop {\n\n let target: ModInt = ModInt::from_value_modulus(private_key.g.value.clone(), g_to_m.modulus.clone()).pow(i.clone());\n\n\n\n if target.eq(&g_to_m) {\n\n return i;\n\n }\n\n\n\n i = i + ModInt::one();\n\n }\n\n}\n", "file_path": "src/el_gamal/encryption.rs", "rank": 1, "score": 112471.20306524349 }, { "content": "///\n\n/// # Modular Inverse\n\n///\n\n/// Calculates the modular inverse `a^-1 mod m`\n\n///\n\n/// ## Credits\n\n/// Inspired by [simon-andrews/rust-modinverse](https://github.com/simon-andrews/rust-modinverse)\n\n///\n\npub fn mod_inverse(a: BigInt, m: BigInt) -> Option<BigInt> {\n\n let (g, x, _) = extended_gcd(a.clone(), m.clone());\n\n if g != BigInt::one() {\n\n return None;\n\n } else {\n\n // actually use the modulus instead of the remainder\n\n // operator \"%\" which behaves differently for negative values\n\n // -> https://stackoverflow.com/questions/31210357/is-there-a-modulus-not-remainder-function-operation\n\n let modulus: BigInt = (x % m.clone()) + m;\n\n\n\n return Some(modulus);\n\n }\n\n}\n\n\n", "file_path": "src/arithmetic/mod_inverse.rs", "rank": 2, "score": 107899.55355094699 }, { "content": "/// # Random ModInt\n\n///\n\n/// Generate random numbers\n\npub trait RandModInt {\n\n /// Generate random ModInts with the given upper_bound.\n\n /// Note, that the returned ModInt has a modulus set equal to the given upper_bound.\n\n fn gen_modint(upper_bound: ModInt) -> ModInt;\n\n}\n\n\n\nimpl RandModInt for ModInt {\n\n fn gen_modint(upper_bound: ModInt) -> ModInt {\n\n assert!(upper_bound.value > BigInt::zero(), \"the upper_bound must be greater than zero\");\n\n\n\n let mut rng = rand::thread_rng();\n\n let rnd_val = rng.gen_bigint_range(&BigInt::zero(), &upper_bound.value);\n\n\n\n ModInt {\n\n value: rnd_val,\n\n modulus: upper_bound.value,\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/arithmetic/mod_int.rs", "rank": 3, "score": 68433.42755899011 }, { "content": "pub trait From {\n\n /// Create a ModInt with the given value and modulus.\n\n fn from_value_modulus(value: BigInt, modulus: BigInt) -> ModInt;\n\n\n\n /// Create a ModInt with the given value and a zero modulus.\n\n fn from_value(value: BigInt) -> ModInt;\n\n\n\n fn from_hex_string(hex_string: String, modulus: BigInt) -> ModInt;\n\n}\n\n\n\nimpl From for ModInt {\n\n fn from_value_modulus(value: BigInt, modulus: BigInt) -> ModInt {\n\n let non_normalized = ModInt {\n\n value,\n\n modulus,\n\n };\n\n\n\n non_normalized.normalize()\n\n }\n\n\n", "file_path": "src/arithmetic/mod_int.rs", "rank": 4, "score": 66463.66559785482 }, { "content": "/// # Homomorphic Operation\n\n///\n\n/// Operate in a homomorphic way on the given cipher text.\n\npub trait Operate {\n\n fn operate(self, cipher_text: CipherText) -> CipherText;\n\n}\n\n\n\nimpl Operate for CipherText {\n\n\n\n fn operate(self, cipher_text: CipherText) -> CipherText {\n\n CipherText {\n\n big_g: self.big_g * cipher_text.big_g,\n\n big_h: self.big_h * cipher_text.big_h,\n\n random: self.random + cipher_text.random\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod additive_tests {\n\n\n\n use ::arithmetic::mod_int::From;\n\n use ::arithmetic::mod_int::ModInt;\n", "file_path": "src/el_gamal/additive.rs", "rank": 5, "score": 65121.605337089 }, { "content": "use ::arithmetic::mod_int::ModInt;\n\n\n\n/// # ElGamal CipherText.\n\n#[derive(Eq, PartialEq, Clone, Debug, Hash, Serialize, Deserialize)]\n\npub struct CipherText {\n\n pub big_g: ModInt,\n\n pub big_h: ModInt,\n\n pub random: ModInt\n\n}", "file_path": "src/el_gamal/ciphertext.rs", "rank": 6, "score": 55079.72450083779 }, { "content": "\n\n#[cfg(test)]\n\nmod serializer_test {\n\n\n\n use ::el_gamal::serializer::Serializer;\n\n\n\n #[test]\n\n fn test_string_to_sha512_hex() {\n\n let result = Serializer::string_to_sha512(\"1234\".to_string());\n\n\n\n assert_eq!(\n\n \"d404559f602eab6fd602ac7680dacbfaadd13630335e951f097af3900e9de176b6db28512f2e000b9d04fba5133e8b1c6e8df59db3a8ab9d60be4b97cc9e81db\".to_string(),\n\n result\n\n );\n\n }\n\n}", "file_path": "src/el_gamal/serializer.rs", "rank": 7, "score": 55028.37746639422 }, { "content": "use sha2::{Sha512, Digest};\n\n\n\npub struct Serializer {}\n\n\n\nimpl Serializer {\n\n pub fn string_to_sha512(string: String) -> String {\n\n // create a Sha512 object\n\n let mut hasher = Sha512::default();\n\n\n\n // write input message\n\n hasher.input(&string.as_bytes());\n\n\n\n let mut hex_string = String::new();\n\n for byte in hasher.result().iter() {\n\n hex_string += &format!(\"{:02x}\", byte)\n\n }\n\n\n\n hex_string\n\n }\n\n}\n", "file_path": "src/el_gamal/serializer.rs", "rank": 8, "score": 55028.17585290873 }, { "content": "pub mod additive;\n\npub mod ciphertext;\n\npub mod encryption;\n\npub mod membership_proof;\n\npub mod serializer;", "file_path": "src/el_gamal/mod.rs", "rank": 9, "score": 52628.498025065106 }, { "content": "use ::arithmetic::mod_int::ModInt;\n\nuse ::arithmetic::mod_int::RandModInt;\n\nuse ::el_gamal::ciphertext::CipherText;\n\nuse ::el_gamal::encryption::{PublicKey};\n\nuse arithmetic::mod_int::From;\n\nuse num::bigint::BigInt;\n\nuse num::{Zero};\n\nuse num::traits::pow::Pow;\n\nuse std::ops::Div;\n\nuse std::ops::Mul;\n\nuse std::ops::Neg;\n\nuse std::ops::Sub;\n\nuse std::vec::Vec;\n\nuse std::ops::Add;\n\nuse ::el_gamal::serializer::Serializer;\n\n\n\n#[derive(Eq, PartialEq, Serialize, Deserialize, Hash, Clone, Debug)]\n\npub struct MembershipProof {\n\n s_responses: Vec<ModInt>,\n\n c_responses: Vec<ModInt>,\n", "file_path": "src/el_gamal/membership_proof.rs", "rank": 10, "score": 52431.854476699664 }, { "content": " }\n\n}\n\n\n\n#[cfg(test)]\n\nmod membership_proof_test {\n\n\n\n use ::el_gamal::encryption::PublicKey;\n\n use ::el_gamal::encryption::{encrypt};\n\n use ::arithmetic::mod_int::ModInt;\n\n use arithmetic::mod_int::From;\n\n use ::num::bigint::BigInt;\n\n use ::num::Zero;\n\n use ::num::One;\n\n use ::el_gamal::membership_proof::MembershipProof;\n\n use std::vec::Vec;\n\n use std::clone::Clone;\n\n\n\n #[test]\n\n pub fn test_one_or_proof() {\n\n let message: ModInt = ModInt {\n", "file_path": "src/el_gamal/membership_proof.rs", "rank": 11, "score": 52428.43241414972 }, { "content": " let proof = MembershipProof::new(\n\n pub_key.clone(),\n\n message,\n\n cipher_text.clone(),\n\n domains.clone()\n\n );\n\n\n\n let is_proven = proof.verify(pub_key.clone(), cipher_text.clone(), domains.clone());\n\n\n\n assert!(is_proven);\n\n }\n\n\n\n #[test]\n\n pub fn test_zero_or_proof() {\n\n let message: ModInt = ModInt {\n\n value: BigInt::zero(),\n\n modulus: BigInt::from(5) // must be equal to the value p of the public key\n\n };\n\n\n\n //h := (g^x) mod p\n", "file_path": "src/el_gamal/membership_proof.rs", "rank": 12, "score": 52428.4314012711 }, { "content": " y = g.clone().pow(s.clone()).mul(cipher_text.big_g.clone().pow(neg_c.clone()));\n\n z = h.clone().pow(s.clone()).mul(cipher_text.big_h.clone().div(g_pow).pow(neg_c.clone()));\n\n }\n\n\n\n y_response.push(y.clone());\n\n z_response.push(z.clone());\n\n\n\n string_to_hash += &y.to_string();\n\n string_to_hash += &z.to_string();\n\n }\n\n\n\n let c_hash = Serializer::string_to_sha512(string_to_hash);\n\n let mut c_0 = ModInt::from_hex_string(c_hash, public_key.q.value.clone());\n\n\n\n for fake_c in c_response.clone() {\n\n c_0 = c_0.sub(fake_c);\n\n }\n\n\n\n s_response[message_idx] = c_0.clone().mul(cipher_text.random.clone()).add(t.clone());\n\n c_response[message_idx] = c_0;\n", "file_path": "src/el_gamal/membership_proof.rs", "rank": 13, "score": 52426.88074834348 }, { "content": " y_responses: Vec<ModInt>,\n\n z_responses: Vec<ModInt>,\n\n\n\n p: ModInt,\n\n q: ModInt,\n\n}\n\n\n\nimpl MembershipProof {\n\n pub fn new(public_key: PublicKey, plain_text: ModInt, cipher_text: CipherText, domains: Vec<ModInt>) -> MembershipProof {\n\n let mut y_response: Vec<ModInt> = vec![];\n\n let mut z_response: Vec<ModInt> = vec![];\n\n let mut s_response: Vec<ModInt> = vec![];\n\n let mut c_response: Vec<ModInt> = vec![];\n\n\n\n let g = ModInt {\n\n value: public_key.g.value.clone(),\n\n modulus: public_key.p.value.clone(),\n\n };\n\n\n\n let h = ModInt {\n", "file_path": "src/el_gamal/membership_proof.rs", "rank": 14, "score": 52425.052705502916 }, { "content": " // we need to add fake values\n\n s_response.push(ModInt::zero());\n\n c_response.push(ModInt::zero());\n\n\n\n y = g.clone().pow(t.clone());\n\n z = h.clone().pow(t.clone());\n\n\n\n message_idx = i;\n\n } else {\n\n // add fake commitments as well as the corresponding response\n\n // for a value which is not the plaintext message\n\n let s = ModInt::gen_modint(public_key.q.clone());\n\n let c = ModInt::gen_modint(public_key.q.clone());\n\n\n\n s_response.push(s.clone());\n\n c_response.push(c.clone());\n\n\n\n let neg_c = c.neg();\n\n let g_pow = g.clone().pow(domain_val.clone());\n\n\n", "file_path": "src/el_gamal/membership_proof.rs", "rank": 15, "score": 52424.10511942948 }, { "content": " //2 := 2^5 mod 5\n\n let pub_key: PublicKey = PublicKey {\n\n p: ModInt::from_value_modulus(BigInt::from(5), BigInt::zero()),\n\n q: ModInt::from_value_modulus(BigInt::from(2), BigInt::zero()),\n\n h: ModInt::from_value_modulus(BigInt::from(32), BigInt::from(5)),\n\n g: ModInt::from_value_modulus(BigInt::from(2), BigInt::from(5))\n\n };\n\n\n\n let cipher_text = encrypt(&pub_key, message.clone());\n\n\n\n let mut domains = Vec::new();\n\n domains.push(ModInt::zero());\n\n domains.push(ModInt::one());\n\n\n\n\n\n let proof = MembershipProof::new(\n\n pub_key.clone(),\n\n message, // <- other message than encrypted\n\n cipher_text.clone(),\n\n domains.clone()\n\n );\n\n\n\n let is_proven = proof.verify(pub_key.clone(), cipher_text.clone(), domains.clone());\n\n\n\n assert!(is_proven);\n\n }\n\n}\n", "file_path": "src/el_gamal/membership_proof.rs", "rank": 16, "score": 52423.92151319152 }, { "content": " value: public_key.h.value.clone(),\n\n modulus: public_key.p.value.clone(),\n\n };\n\n\n\n let t = ModInt::gen_modint(public_key.q.clone());\n\n\n\n let mut string_to_hash = String::new();\n\n string_to_hash += &g.to_string();\n\n string_to_hash += &h.to_string();\n\n string_to_hash += &cipher_text.big_g.to_string();\n\n string_to_hash += &cipher_text.big_h.to_string();\n\n\n\n let mut message_idx = 0;\n\n for i in 0..domains.len() {\n\n let mut y: ModInt;\n\n let mut z: ModInt;\n\n\n\n let domain_val: ModInt = (*domains.get(i).unwrap()).clone();\n\n\n\n if domain_val.eq(&plain_text) {\n", "file_path": "src/el_gamal/membership_proof.rs", "rank": 17, "score": 52423.59190210731 }, { "content": " let domain_val = domain.get(i).unwrap();\n\n let g_pow = g.clone().pow(domain_val.clone());\n\n\n\n let s: ModInt = (*self.s_responses.get(i).unwrap()).clone();\n\n let c: ModInt = (*self.c_responses.get(i).unwrap()).clone();\n\n let neg_c = c.clone().neg();\n\n\n\n c_choices = c_choices.add(c.clone());\n\n\n\n let y = g.clone().pow(s.clone()).mul(cipher_text.big_g.clone().pow(neg_c.clone()));\n\n let z = h.clone().pow(s.clone()).mul(cipher_text.big_h.clone().div(g_pow).pow(neg_c.clone()));\n\n\n\n string_to_hash += &y.to_string();\n\n string_to_hash += &z.to_string();\n\n }\n\n\n\n let c_hash: String = Serializer::string_to_sha512(string_to_hash);\n\n let new_c = ModInt::from_hex_string(c_hash, self.q.value.clone());\n\n\n\n return c_choices.eq(&new_c);\n", "file_path": "src/el_gamal/membership_proof.rs", "rank": 18, "score": 52423.36780069166 }, { "content": "\n\n MembershipProof {\n\n s_responses: s_response,\n\n c_responses: c_response,\n\n y_responses: y_response,\n\n z_responses: z_response,\n\n p: public_key.p,\n\n q: public_key.q,\n\n }\n\n }\n\n\n\n pub fn verify(&self, public_key: PublicKey, cipher_text: CipherText, domain: Vec<ModInt>) -> bool {\n\n if domain.len() < self.c_responses.len() || domain.len() < self.s_responses.len() {\n\n // The domain of the message is bigger than specified.\n\n // Therefore, the proof that the message is within the given domain is invalid.\n\n panic!(\"Domain has not the same length as the values of the proof.\")\n\n }\n\n\n\n let g = ModInt {\n\n value: public_key.g.value.clone(),\n", "file_path": "src/el_gamal/membership_proof.rs", "rank": 19, "score": 52422.41775926614 }, { "content": " value: BigInt::one(),\n\n modulus: BigInt::from(5) // must be equal to the value p of the public key\n\n };\n\n\n\n //h := (g^x) mod p\n\n //2 := 2^5 mod 5\n\n let pub_key: PublicKey = PublicKey {\n\n p: ModInt::from_value_modulus(BigInt::from(5), BigInt::zero()),\n\n q: ModInt::from_value_modulus(BigInt::from(2), BigInt::zero()),\n\n h: ModInt::from_value_modulus(BigInt::from(32), BigInt::from(5)),\n\n g: ModInt::from_value_modulus(BigInt::from(2), BigInt::from(5))\n\n };\n\n\n\n let cipher_text = encrypt(&pub_key, message.clone());\n\n\n\n let mut domains = Vec::new();\n\n domains.push(ModInt::zero());\n\n domains.push(ModInt::one());\n\n\n\n\n", "file_path": "src/el_gamal/membership_proof.rs", "rank": 20, "score": 52422.02663903832 }, { "content": " modulus: public_key.p.value.clone(),\n\n };\n\n\n\n let h = ModInt {\n\n value: public_key.h.value.clone(),\n\n modulus: public_key.p.value.clone(),\n\n };\n\n\n\n let mut c_choices = ModInt {\n\n value: BigInt::zero(),\n\n modulus: public_key.q.value.clone()\n\n };\n\n\n\n let mut string_to_hash = String::new();\n\n string_to_hash += &g.to_string();\n\n string_to_hash += &h.to_string();\n\n string_to_hash += &cipher_text.big_g.to_string();\n\n string_to_hash += &cipher_text.big_h.to_string();\n\n\n\n for i in 0..self.c_responses.len() {\n", "file_path": "src/el_gamal/membership_proof.rs", "rank": 21, "score": 52421.035133362035 }, { "content": "fn extended_gcd(a: BigInt, b: BigInt) -> (BigInt, BigInt, BigInt) {\n\n assert!(a < b);\n\n if a == BigInt::zero() {\n\n return (b, BigInt::zero(), BigInt::one());\n\n } else {\n\n let (g, x, y) = extended_gcd(b.clone() % a.clone(), a.clone());\n\n return (g, y - (b / a) * x.clone(), x.clone());\n\n }\n\n}\n", "file_path": "src/arithmetic/mod_inverse.rs", "rank": 22, "score": 43540.65400495885 }, { "content": "initSidebarItems({\"struct\":[[\"CipherText\",\"ElGamal CipherText.\"]]});", "file_path": "docs/crypto_rs/el_gamal/ciphertext/sidebar-items.js", "rank": 23, "score": 33245.75719708625 }, { "content": "initSidebarItems({\"struct\":[[\"Serializer\",\"\"]]});", "file_path": "docs/crypto_rs/el_gamal/serializer/sidebar-items.js", "rank": 24, "score": 33218.06499057486 }, { "content": "initSidebarItems({\"struct\":[[\"MembershipProof\",\"\"]]});", "file_path": "docs/crypto_rs/el_gamal/membership_proof/sidebar-items.js", "rank": 25, "score": 32145.621882187435 }, { "content": "var searchIndex = {};\n", "file_path": "docs/search-index.js", "rank": 26, "score": 31835.136987293878 }, { "content": "/// Modular arithmetic within a cyclic field\n\npub mod mod_int;\n\n\n\n/// Perform the inverse operation in modular arithmetic\n\npub mod mod_inverse;", "file_path": "src/arithmetic/mod.rs", "rank": 27, "score": 27081.62679883522 }, { "content": "pub mod uciv;", "file_path": "src/cai/mod.rs", "rank": 28, "score": 27081.478629152218 }, { "content": "use ::arithmetic::mod_int::From;\n\nuse ::arithmetic::mod_int::ModInt;\n\nuse ::arithmetic::mod_int::RandModInt;\n\nuse ::el_gamal::ciphertext::CipherText;\n\nuse num::traits::Pow;\n\nuse num::Zero;\n\nuse num::One;\n\nuse std::fs::File;\n\nuse std::io::{Read, Write};\n\nuse serde_json;\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct PublicKey {\n\n pub p: ModInt,\n\n pub q: ModInt,\n\n pub h: ModInt,\n\n pub g: ModInt,\n\n}\n\n\n\nimpl PublicKey {\n", "file_path": "src/el_gamal/encryption.rs", "rank": 29, "score": 26665.958307104665 }, { "content": " use ::el_gamal::additive::Operate;\n\n use ::el_gamal::ciphertext::CipherText;\n\n use ::num::BigInt;\n\n\n\n #[test]\n\n fn test_additive_elgamal() {\n\n let c1: CipherText = CipherText {\n\n big_g: ModInt::from_value(BigInt::from(2)),\n\n big_h: ModInt::from_value(BigInt::from(4)),\n\n random: ModInt::from_value(BigInt::from(1)),\n\n };\n\n\n\n let c2: CipherText = CipherText {\n\n big_g: ModInt::from_value(BigInt::from(2)),\n\n big_h: ModInt::from_value(BigInt::from(1)),\n\n random: ModInt::from_value(BigInt::from(1)),\n\n };\n\n\n\n\n\n let res: CipherText = c1.operate(c2);\n\n\n\n assert_eq!(BigInt::from(4), res.big_g.value);\n\n assert_eq!(BigInt::from(4), res.big_h.value);\n\n assert_eq!(BigInt::from(2), res.random.value);\n\n }\n\n}", "file_path": "src/el_gamal/additive.rs", "rank": 30, "score": 26663.167787700724 }, { "content": "\n\n\n\n#[cfg(test)]\n\nmod encryption_test {\n\n\n\n use ::el_gamal::encryption::PrivateKey;\n\n use ::el_gamal::encryption::PublicKey;\n\n use ::el_gamal::encryption::{encrypt, decrypt};\n\n use ::arithmetic::mod_int::ModInt;\n\n use arithmetic::mod_int::From;\n\n use ::num::bigint::BigInt;\n\n use ::num::Zero;\n\n use ::num::One;\n\n\n\n #[test]\n\n fn encrypt_decrypt() {\n\n let message: ModInt = ModInt::one();\n\n\n\n let priv_key: PrivateKey = PrivateKey {\n\n p: ModInt::from_value_modulus(BigInt::from(5), BigInt::zero()),\n", "file_path": "src/el_gamal/encryption.rs", "rank": 31, "score": 26661.31542050766 }, { "content": " public_key\n\n }\n\n\n\n /// Write this public key to a file with the given path.\n\n ///\n\n /// - `public_key_file_name`: The file name of the public key.\n\n /// Must reside in the same directory as the binary is launched.\n\n ///\n\n pub fn to_file(&self, public_key_file_name: &str) {\n\n // Read the input file to string.\n\n let mut file = File::create(\"./\".to_owned() + public_key_file_name).unwrap();\n\n\n\n let public_key_str = serde_json::to_string_pretty(&self).unwrap();\n\n file.write(public_key_str.as_bytes()).unwrap();\n\n }\n\n}\n\n\n\n#[derive(Debug, Clone, Serialize, Deserialize)]\n\npub struct PrivateKey {\n\n pub p: ModInt,\n", "file_path": "src/el_gamal/encryption.rs", "rank": 32, "score": 26660.35188247018 }, { "content": " pub q: ModInt,\n\n pub g: ModInt,\n\n pub x: ModInt,\n\n}\n\n\n\nimpl PrivateKey {\n\n /// Create a PrivateKey based its string representation at a specific path.\n\n ///\n\n /// - `private_key_file_name`: The file name of the private key.\n\n /// Must reside in the same directory as the binary is launched.\n\n ///\n\n pub fn new(private_key_file_name: &str) -> Self {\n\n // Read the input file to string.\n\n let mut file = File::open(\"./\".to_owned() + private_key_file_name).unwrap();\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents).unwrap();\n\n\n\n let private_key: PrivateKey = match serde_json::from_str(&contents) {\n\n Ok(private_key_data) => {\n\n private_key_data\n", "file_path": "src/el_gamal/encryption.rs", "rank": 33, "score": 26658.827841501712 }, { "content": " q: ModInt::from_value_modulus(BigInt::from(2), BigInt::zero()),\n\n g: ModInt::from_value_modulus(BigInt::from(2), BigInt::zero()),\n\n x: ModInt::from_value_modulus(BigInt::from(5), BigInt::zero())\n\n };\n\n\n\n //h := (g^x) mod p\n\n //2 := 2^5 mod 5\n\n let pub_key: PublicKey = PublicKey {\n\n p: ModInt::from_value_modulus(BigInt::from(5), BigInt::zero()),\n\n q: ModInt::from_value_modulus(BigInt::from(2), BigInt::zero()),\n\n h: ModInt::from_value_modulus(BigInt::from(32), BigInt::from(5)),\n\n g: ModInt::from_value_modulus(BigInt::from(2), BigInt::from(5))\n\n };\n\n\n\n let c = encrypt(&pub_key, message);\n\n\n\n let result_message = decrypt(priv_key, c);\n\n\n\n assert_eq!(ModInt::one().value, result_message.value);\n\n }\n", "file_path": "src/el_gamal/encryption.rs", "rank": 34, "score": 26657.60929246894 }, { "content": "\n\n #[test]\n\n fn write_read_public_key() {\n\n let pub_key: PublicKey = PublicKey {\n\n p: ModInt::from_value_modulus(BigInt::from(5), BigInt::zero()),\n\n q: ModInt::from_value_modulus(BigInt::from(2), BigInt::zero()),\n\n h: ModInt::from_value_modulus(BigInt::from(32), BigInt::from(5)),\n\n g: ModInt::from_value_modulus(BigInt::from(2), BigInt::from(5))\n\n };\n\n\n\n pub_key.to_file(\"public_key.json\");\n\n\n\n let read_pub_key = PublicKey::new(\"public_key.json\");\n\n\n\n assert_eq!(read_pub_key.p, pub_key.p);\n\n assert_eq!(read_pub_key.q, pub_key.q);\n\n assert_eq!(read_pub_key.h, pub_key.h);\n\n assert_eq!(read_pub_key.g, pub_key.g);\n\n }\n\n\n", "file_path": "src/el_gamal/encryption.rs", "rank": 35, "score": 26656.991058889518 }, { "content": "use ::el_gamal::ciphertext::CipherText;\n\n\n\n/// # Homomorphic Operation\n\n///\n\n/// Operate in a homomorphic way on the given cipher text.\n", "file_path": "src/el_gamal/additive.rs", "rank": 36, "score": 26656.89305574171 }, { "content": " /// Create a PublicKey based its string representation at a specific path.\n\n ///\n\n /// - `public_key_file_name`: The file name of the public key.\n\n /// Must reside in the same directory as the binary is launched.\n\n ///\n\n pub fn new(public_key_file_name: &str) -> Self {\n\n // Read the input file to string.\n\n let mut file = File::open(\"./\".to_owned() + public_key_file_name).unwrap();\n\n let mut contents = String::new();\n\n file.read_to_string(&mut contents).unwrap();\n\n\n\n let public_key: PublicKey = match serde_json::from_str(&contents) {\n\n Ok(public_key_data) => {\n\n public_key_data\n\n },\n\n Err(e) => {\n\n panic!(\"Failed to transform file {:?} into PublicKey: {:?}\", file, e);\n\n }\n\n };\n\n\n", "file_path": "src/el_gamal/encryption.rs", "rank": 37, "score": 26655.285231813323 }, { "content": " #[test]\n\n fn write_read_private_key() {\n\n let priv_key: PrivateKey = PrivateKey {\n\n p: ModInt::from_value_modulus(BigInt::from(5), BigInt::zero()),\n\n q: ModInt::from_value_modulus(BigInt::from(2), BigInt::zero()),\n\n g: ModInt::from_value_modulus(BigInt::from(2), BigInt::zero()),\n\n x: ModInt::from_value_modulus(BigInt::from(5), BigInt::zero())\n\n };\n\n\n\n priv_key.to_file(\"private_key.json\");\n\n\n\n let read_priv_key = PrivateKey::new(\"private_key.json\");\n\n\n\n assert_eq!(read_priv_key.p, priv_key.p);\n\n assert_eq!(read_priv_key.q, priv_key.q);\n\n assert_eq!(read_priv_key.g, priv_key.g);\n\n assert_eq!(read_priv_key.x, priv_key.x);\n\n }\n\n}", "file_path": "src/el_gamal/encryption.rs", "rank": 38, "score": 26653.78720192157 }, { "content": " },\n\n Err(e) => {\n\n panic!(\"Failed to transform file {:?} into PrivatKey: {:?}\", file, e);\n\n }\n\n };\n\n\n\n private_key\n\n }\n\n\n\n /// Write this private key to a file with the given path.\n\n ///\n\n /// - `private_key_file_name`: The file name of the private key.\n\n /// Must reside in the same directory as the binary is launched.\n\n ///\n\n pub fn to_file(&self, private_key_file_name: &str) {\n\n // Read the input file to string.\n\n let mut file = File::create(\"./\".to_owned() + private_key_file_name).unwrap();\n\n\n\n let private_key_str = serde_json::to_string_pretty(&self).unwrap();\n\n file.write(private_key_str.as_bytes()).unwrap();\n\n }\n\n}\n\n\n", "file_path": "src/el_gamal/encryption.rs", "rank": 39, "score": 26653.458410636264 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"crypto_rs\"] = [{text:\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/clone/trait.Clone.html\\\" title=\\\"trait core::clone::Clone\\\">Clone</a> for <a class=\\\"struct\\\" href=\\\"crypto_rs/arithmetic/mod_int/struct.ModInt.html\\\" title=\\\"struct crypto_rs::arithmetic::mod_int::ModInt\\\">ModInt</a>\",synthetic:false,types:[\"crypto_rs::arithmetic::mod_int::ModInt\"]},{text:\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/clone/trait.Clone.html\\\" title=\\\"trait core::clone::Clone\\\">Clone</a> for <a class=\\\"struct\\\" href=\\\"crypto_rs/cai/uciv/struct.PreImageSet.html\\\" title=\\\"struct crypto_rs::cai::uciv::PreImageSet\\\">PreImageSet</a>\",synthetic:false,types:[\"crypto_rs::cai::uciv::PreImageSet\"]},{text:\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/clone/trait.Clone.html\\\" title=\\\"trait core::clone::Clone\\\">Clone</a> for <a class=\\\"struct\\\" href=\\\"crypto_rs/cai/uciv/struct.ImageSet.html\\\" title=\\\"struct crypto_rs::cai::uciv::ImageSet\\\">ImageSet</a>\",synthetic:false,types:[\"crypto_rs::cai::uciv::ImageSet\"]},{text:\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/clone/trait.Clone.html\\\" title=\\\"trait core::clone::Clone\\\">Clone</a> for <a class=\\\"struct\\\" href=\\\"crypto_rs/el_gamal/ciphertext/struct.CipherText.html\\\" title=\\\"struct crypto_rs::el_gamal::ciphertext::CipherText\\\">CipherText</a>\",synthetic:false,types:[\"crypto_rs::el_gamal::ciphertext::CipherText\"]},{text:\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/clone/trait.Clone.html\\\" title=\\\"trait core::clone::Clone\\\">Clone</a> for <a class=\\\"struct\\\" href=\\\"crypto_rs/el_gamal/encryption/struct.PublicKey.html\\\" title=\\\"struct crypto_rs::el_gamal::encryption::PublicKey\\\">PublicKey</a>\",synthetic:false,types:[\"crypto_rs::el_gamal::encryption::PublicKey\"]},{text:\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/clone/trait.Clone.html\\\" title=\\\"trait core::clone::Clone\\\">Clone</a> for <a class=\\\"struct\\\" href=\\\"crypto_rs/el_gamal/encryption/struct.PrivateKey.html\\\" title=\\\"struct crypto_rs::el_gamal::encryption::PrivateKey\\\">PrivateKey</a>\",synthetic:false,types:[\"crypto_rs::el_gamal::encryption::PrivateKey\"]},{text:\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/clone/trait.Clone.html\\\" title=\\\"trait core::clone::Clone\\\">Clone</a> for <a class=\\\"struct\\\" href=\\\"crypto_rs/el_gamal/membership_proof/struct.MembershipProof.html\\\" title=\\\"struct crypto_rs::el_gamal::membership_proof::MembershipProof\\\">MembershipProof</a>\",synthetic:false,types:[\"crypto_rs::el_gamal::membership_proof::MembershipProof\"]},];\n\n\n\n if (window.register_implementors) {\n\n window.register_implementors(implementors);\n\n } else {\n\n window.pending_implementors = implementors;\n\n }\n\n \n\n})()\n", "file_path": "docs/implementors/core/clone/trait.Clone.js", "rank": 40, "score": 26425.232131707664 }, { "content": " fn neg(mut self) -> ModInt {\n\n self = self.normalize();\n\n\n\n let zero = BigInt::zero();\n\n\n\n if self.modulus.eq(&zero) {\n\n self.value = self.value.neg()\n\n } else {\n\n self.value = self.modulus.clone().sub(self.value.clone());\n\n }\n\n\n\n self.normalize()\n\n }\n\n}\n\n\n\nimpl Add<ModInt> for ModInt {\n\n type Output = ModInt;\n\n\n\n #[inline]\n\n fn add(mut self, rhs: ModInt) -> ModInt {\n", "file_path": "src/arithmetic/mod_int.rs", "rank": 41, "score": 25983.492545073266 }, { "content": "\n\nimpl<'de> serde::Deserialize<'de> for ModInt {\n\n fn deserialize<D>(deserializer: D) -> stdResult<Self, D::Error>\n\n where D: serde::Deserializer<'de>\n\n {\n\n let (value, modulus) = serde::Deserialize::deserialize(deserializer)?;\n\n Ok(ModInt {value, modulus})\n\n }\n\n}\n\n\n\n\n\n\n\nimpl Clone for ModInt {\n\n fn clone(&self) -> Self {\n\n ModInt {\n\n value: self.value.clone(),\n\n modulus: self.modulus.clone(),\n\n }\n\n }\n\n\n\n fn clone_from(&mut self, source: &Self) {\n\n self.value = source.value.clone();\n\n self.modulus = source.modulus.clone();\n\n }\n\n}\n\n\n", "file_path": "src/arithmetic/mod_int.rs", "rank": 42, "score": 25982.799633399864 }, { "content": "// TODO\n\n//use std::cmp::Ordering::{self, Less, Greater, Equal};\n\n\n\n\n\n/// An integer with modular operations.\n\n#[derive(Hash)]\n\npub struct ModInt {\n\n /// The value.\n\n pub value: BigInt,\n\n /// The modulus.\n\n pub modulus: BigInt,\n\n}\n\n\n\nimpl serde::Serialize for ModInt {\n\n fn serialize<S>(&self, serializer: S) -> stdResult<S::Ok, S::Error> where\n\n S: serde::Serializer {\n\n\n\n (&self.value, &self.modulus).serialize(serializer)\n\n }\n\n}\n", "file_path": "src/arithmetic/mod_int.rs", "rank": 43, "score": 25981.513308097365 }, { "content": " // TODO: apply modulus after add to avoid overflows\n\n self.value = self.value.add(rhs.value);\n\n\n\n self.normalize()\n\n }\n\n}\n\n\n\nimpl Sub<ModInt> for ModInt {\n\n type Output = ModInt;\n\n\n\n #[inline]\n\n fn sub(mut self, rhs: ModInt) -> ModInt {\n\n let zero = BigInt::zero();\n\n\n\n if self.modulus.eq(&zero) {\n\n self.value = self.value.sub(rhs.value)\n\n } else {\n\n self.value = self.value.add(rhs.neg().value)\n\n }\n\n\n", "file_path": "src/arithmetic/mod_int.rs", "rank": 44, "score": 25980.9107734896 }, { "content": "}\n\n\n\nimpl PartialOrd<ModInt> for ModInt {\n\n fn partial_cmp(&self, other: &ModInt) -> Option<Ordering> {\n\n Some(self.cmp(other))\n\n }\n\n}\n\n\n\nimpl Ord for ModInt {\n\n fn cmp(&self, other: &Self) -> Ordering {\n\n let _val: BigInt = self.value.clone();\n\n let _mod: BigInt = self.modulus.clone();\n\n\n\n let normalized_val = _val.rem(_mod);\n\n\n\n normalized_val.cmp(&other.value)\n\n }\n\n}\n\n\n\nimpl Zero for ModInt {\n", "file_path": "src/arithmetic/mod_int.rs", "rank": 45, "score": 25978.11235246214 }, { "content": "use ::arithmetic::mod_inverse;\n\nuse num::bigint::BigInt;\n\nuse num::bigint::RandBigInt;\n\nuse num::One;\n\nuse num;\n\nuse num::pow::Pow;\n\nuse num::ToPrimitive;\n\nuse num::Zero;\n\nuse num::Num;\n\nuse rand;\n\nuse std::clone::Clone;\n\nuse std::cmp::Ordering;\n\nuse std::cmp::PartialEq;\n\nuse std::cmp::PartialOrd;\n\nuse std::ops::{Add, Div, Mul, Neg, Rem, Sub};\n\nuse std::fmt::{Formatter, Result, Display, Debug};\n\nuse serde;\n\nuse std::result::Result as stdResult;\n\n\n\n\n", "file_path": "src/arithmetic/mod_int.rs", "rank": 46, "score": 25977.946096456413 }, { "content": " self.normalize()\n\n }\n\n}\n\n\n\nimpl Mul<ModInt> for ModInt {\n\n type Output = ModInt;\n\n\n\n #[inline]\n\n fn mul(mut self, rhs: ModInt) -> ModInt {\n\n self.value = self.value.mul(rhs.value);\n\n\n\n self.normalize()\n\n }\n\n}\n\n\n\nimpl Div<ModInt> for ModInt {\n\n type Output = ModInt;\n\n\n\n #[inline]\n\n fn div(mut self, rhs: ModInt) -> ModInt {\n", "file_path": "src/arithmetic/mod_int.rs", "rank": 47, "score": 25977.69287238039 }, { "content": " /// Returns a ModInt having the value set to one and\n\n /// its modulus set to zero.\n\n fn one() -> Self {\n\n ModInt {\n\n value: BigInt::one(),\n\n modulus: BigInt::zero(),\n\n }\n\n }\n\n\n\n fn is_one(&self) -> bool where Self: PartialEq {\n\n // TODO: normalize\n\n self.value.eq(&BigInt::one())\n\n }\n\n}\n\n\n\n// Negation of ModIntegers\n\nimpl Neg for ModInt {\n\n type Output = ModInt;\n\n\n\n #[inline]\n", "file_path": "src/arithmetic/mod_int.rs", "rank": 48, "score": 25977.41584989525 }, { "content": " let inv: BigInt = self.value.modpow(&rhs.value, &self.modulus);\n\n\n\n self.value = inv;\n\n }\n\n\n\n self.normalize()\n\n }\n\n}\n\n\n\nimpl Display for ModInt {\n\n fn fmt(&self, f: &mut Formatter) -> Result {\n\n write!(f, \"(val: {}, mod: {})\", self.value, self.modulus)\n\n }\n\n}\n\n\n\nimpl Debug for ModInt {\n\n fn fmt(&self, f: &mut Formatter) -> Result {\n\n write!(f, \"(val: {}, mod: {})\", self.value, self.modulus)\n\n }\n\n}\n\n\n\n\n", "file_path": "src/arithmetic/mod_int.rs", "rank": 49, "score": 25977.355152301894 }, { "content": "\n\n#[cfg(test)]\n\nmod mod_int_tests {\n\n use ::arithmetic::mod_int::From;\n\n use ::arithmetic::mod_int::ModInt;\n\n use ::arithmetic::mod_int::RandModInt;\n\n use ::num::bigint::BigInt;\n\n use ::num::One;\n\n use ::num::traits::Pow;\n\n use ::num::Zero;\n\n use ::std::ops::Neg;\n\n\n\n #[test]\n\n fn test_equal() {\n\n let one: ModInt = ModInt::one();\n\n let one2: ModInt = ModInt::one();\n\n\n\n assert_eq!(true, one == one2);\n\n }\n\n\n", "file_path": "src/arithmetic/mod_int.rs", "rank": 50, "score": 25976.807492223248 }, { "content": "impl PartialEq<ModInt> for ModInt {\n\n fn eq(&self, other: &ModInt) -> bool {\n\n // we have to normalize, i.e. reduce the values\n\n // whenever we have the same modulus.\n\n // 21 mod 5 === 1 mod 5\n\n if self.modulus > BigInt::zero() {\n\n let _val: BigInt = self.value.clone();\n\n let _mod: BigInt = self.modulus.clone();\n\n\n\n let normalized_val = _val.rem(_mod);\n\n\n\n return normalized_val.eq(&other.value);\n\n } else {\n\n return self.value.eq(&other.value);\n\n }\n\n }\n\n\n\n fn ne(&self, other: &ModInt) -> bool {\n\n !self.eq(other)\n\n }\n", "file_path": "src/arithmetic/mod_int.rs", "rank": 51, "score": 25976.60231546864 }, { "content": " /// # Zero ModInt\n\n ///\n\n /// Returns a ModInt having both the value and\n\n /// its modulus set to zero.\n\n fn zero() -> Self {\n\n ModInt {\n\n value: BigInt::zero(),\n\n modulus: BigInt::zero(),\n\n }\n\n }\n\n\n\n fn is_zero(&self) -> bool {\n\n self.value.eq(&BigInt::zero())\n\n }\n\n}\n\n\n\n\n\nimpl One for ModInt {\n\n /// # One ModInt\n\n ///\n", "file_path": "src/arithmetic/mod_int.rs", "rank": 52, "score": 25975.82782883441 }, { "content": " let rnd: ModInt = ModInt::gen_modint(ModInt::one());\n\n\n\n assert!(rnd.value < BigInt::one());\n\n assert_eq!(BigInt::one(), rnd.modulus);\n\n }\n\n\n\n #[test]\n\n #[should_panic(expected = \"the upper_bound must be greater than zero\")]\n\n fn test_random_failing() {\n\n ModInt::gen_modint(ModInt::zero());\n\n }\n\n}", "file_path": "src/arithmetic/mod_int.rs", "rank": 53, "score": 25975.813554816516 }, { "content": " fn pow(mut self, rhs: ModInt) -> ModInt {\n\n let zero = BigInt::zero();\n\n\n\n if self.modulus.eq(&zero) {\n\n let usize_val: usize;\n\n let result = rhs.value.to_usize();\n\n\n\n match result {\n\n Some(x) => usize_val = x,\n\n None => panic!(\"Failed to convert BigInt to usize\")\n\n }\n\n\n\n self.value = num::pow(self.value, usize_val)\n\n } else {\n\n // Check whether order of the base divides the order of the exponent.\n\n // Otherwise, the result is not well-defined.\n\n // TODO: not sure whether this is appropriate...\n\n// if ! rhs.modulus.clone().rem(self.modulus.clone()).eq(&zero) {\n\n// panic!(\"Order of base is not compatible to the order of the exponent. Base modulus: {:?}, exponent modulus: {:?}\", self.modulus.clone(), rhs.modulus.clone())\n\n// }\n", "file_path": "src/arithmetic/mod_int.rs", "rank": 54, "score": 25975.64963475731 }, { "content": " fn test_negation_zero_modulus() {\n\n let one: ModInt = ModInt::one();\n\n let neg_one: ModInt = one.neg();\n\n\n\n assert_eq!(BigInt::one().neg(), neg_one.value);\n\n assert_eq!(BigInt::zero(), neg_one.modulus);\n\n }\n\n\n\n #[test]\n\n fn test_negation_non_zero_modulus() {\n\n let zero: ModInt = ModInt {\n\n value: BigInt::zero(),\n\n modulus: BigInt::from(11),\n\n };\n\n\n\n // 0 mod 11 = 0\n\n // (0 mod 11)^-1 = 11 mod 11 = 0\n\n let neg_zero: ModInt = zero.neg();\n\n assert_eq!(BigInt::from(0), neg_zero.value);\n\n assert_eq!(BigInt::from(11), neg_zero.modulus);\n", "file_path": "src/arithmetic/mod_int.rs", "rank": 55, "score": 25975.470028012838 }, { "content": " let zero = BigInt::zero();\n\n\n\n if rhs.value.eq(&BigInt::zero()) {\n\n panic!(\"Division by zero is not defined\");\n\n }\n\n\n\n if self.modulus.eq(&zero) {\n\n self.value = self.value.div(rhs.value)\n\n } else {\n\n let inv: Option<BigInt> = mod_inverse::mod_inverse(rhs.value.clone(), self.modulus.clone());\n\n\n\n let inverse: BigInt;\n\n match inv {\n\n None => panic!(\"failed to compute inverse\"),\n\n Some(x) => inverse = x\n\n }\n\n\n\n self.value = self.value.mul(inverse);\n\n self.value = self.value.rem(self.modulus.clone());\n\n }\n", "file_path": "src/arithmetic/mod_int.rs", "rank": 56, "score": 25975.463857826253 }, { "content": " #[test]\n\n fn test_non_equal() {\n\n let one3: ModInt = ModInt::one();\n\n let zero: ModInt = ModInt::zero();\n\n\n\n assert_eq!(false, one3 == zero);\n\n }\n\n\n\n #[test]\n\n fn test_equal_normalized() {\n\n let one: ModInt = ModInt::from_value_modulus(\n\n BigInt::from(21),\n\n BigInt::from(4),\n\n );\n\n\n\n let other_one: ModInt = ModInt::from_value_modulus(\n\n BigInt::from(1),\n\n BigInt::from(4),\n\n );\n\n\n", "file_path": "src/arithmetic/mod_int.rs", "rank": 57, "score": 25975.24319674336 }, { "content": " let neg_two = two.neg();\n\n assert_eq!(BigInt::from(9), neg_two.value);\n\n assert_eq!(BigInt::from(11), neg_two.modulus);\n\n }\n\n\n\n #[test]\n\n fn test_add() {\n\n let one: ModInt = ModInt::one();\n\n let one2: ModInt = ModInt::one();\n\n\n\n let two = one + one2;\n\n assert_eq!(BigInt::from(2), two.value);\n\n assert_eq!(BigInt::zero(), two.modulus);\n\n\n\n let zero: ModInt = ModInt::zero();\n\n let zero2: ModInt = ModInt::zero();\n\n\n\n let zero_result = zero + zero2;\n\n assert_eq!(BigInt::zero(), zero_result.value);\n\n assert_eq!(BigInt::zero(), zero_result.modulus);\n", "file_path": "src/arithmetic/mod_int.rs", "rank": 58, "score": 25975.07697590421 }, { "content": "\n\n self.normalize()\n\n }\n\n}\n\n\n\nimpl Rem<ModInt> for ModInt {\n\n type Output = ModInt;\n\n\n\n #[inline]\n\n fn rem(mut self, rhs: ModInt) -> ModInt {\n\n self.value = self.value.rem(rhs.modulus);\n\n\n\n self\n\n }\n\n}\n\n\n\nimpl Pow<ModInt> for ModInt {\n\n type Output = ModInt;\n\n\n\n #[inline]\n", "file_path": "src/arithmetic/mod_int.rs", "rank": 59, "score": 25974.751479318704 }, { "content": " fn test_invalid_div_modulus() {\n\n let one: ModInt = ModInt::from_value_modulus(\n\n BigInt::one(),\n\n BigInt::from(5),\n\n );\n\n let zero: ModInt = ModInt::from_value_modulus(\n\n BigInt::zero(),\n\n BigInt::from(5),\n\n );\n\n\n\n one / zero;\n\n }\n\n\n\n #[test]\n\n fn test_rem() {\n\n let one: ModInt = ModInt::from_value_modulus(\n\n BigInt::from(21),\n\n BigInt::from(4),\n\n );\n\n\n", "file_path": "src/arithmetic/mod_int.rs", "rank": 60, "score": 25974.628440930534 }, { "content": "\n\n // test overflow of mod round\n\n let nine: ModInt = ModInt {\n\n value: BigInt::from(9),\n\n modulus: BigInt::from(11),\n\n };\n\n let three: ModInt = ModInt {\n\n value: BigInt::from(3),\n\n modulus: BigInt::from(11),\n\n };\n\n\n\n let twelve_one = nine + three;\n\n assert_eq!(BigInt::from(1), twelve_one.value);\n\n assert_eq!(BigInt::from(11), twelve_one.modulus);\n\n }\n\n\n\n #[test]\n\n fn test_sub() {\n\n let two: ModInt = ModInt::from_value_modulus(BigInt::from(2), BigInt::zero());\n\n let one: ModInt = ModInt::one();\n", "file_path": "src/arithmetic/mod_int.rs", "rank": 61, "score": 25973.9864005593 }, { "content": " let four: ModInt = ModInt::from_value_modulus(\n\n BigInt::from(4),\n\n BigInt::from(4),\n\n );\n\n\n\n let result = one % four;\n\n assert_eq!(BigInt::from(1), result.value);\n\n assert_eq!(BigInt::from(4), result.modulus);\n\n }\n\n\n\n #[test]\n\n fn test_negative_rem() {\n\n let neg_one: ModInt = ModInt::from_value_modulus(\n\n BigInt::from(-21),\n\n BigInt::from(4),\n\n );\n\n\n\n let four: ModInt = ModInt::from_value_modulus(\n\n BigInt::from(4),\n\n BigInt::from(4),\n", "file_path": "src/arithmetic/mod_int.rs", "rank": 62, "score": 25973.919024425482 }, { "content": " assert_eq!(BigInt::zero(), one_mul.modulus);\n\n\n\n let two: ModInt = ModInt::from_value_modulus(\n\n BigInt::from(2),\n\n BigInt::from(4),\n\n );\n\n let three: ModInt = ModInt::from_value_modulus(\n\n BigInt::from(3),\n\n BigInt::from(4),\n\n );\n\n\n\n // 2 * 3 mod 4 = 2\n\n let two = two * three;\n\n assert_eq!(BigInt::from(2), two.value);\n\n assert_eq!(BigInt::from(4), two.modulus);\n\n }\n\n\n\n #[test]\n\n fn test_div() {\n\n let one: ModInt = ModInt::from_value_modulus(\n", "file_path": "src/arithmetic/mod_int.rs", "rank": 63, "score": 25973.85674395713 }, { "content": " );\n\n\n\n let result = neg_one % four;\n\n assert_eq!(BigInt::from(-1), result.value);\n\n assert_eq!(BigInt::from(4), result.modulus);\n\n }\n\n\n\n #[test]\n\n fn test_pow_zero_modulus() {\n\n let two: ModInt = ModInt::from_value_modulus(\n\n BigInt::from(2),\n\n BigInt::from(0),\n\n );\n\n\n\n let four: ModInt = ModInt::from_value_modulus(\n\n BigInt::from(4),\n\n BigInt::from(0),\n\n );\n\n\n\n let result = two.pow(four);\n", "file_path": "src/arithmetic/mod_int.rs", "rank": 64, "score": 25973.82136974792 }, { "content": "\n\n\n\n let one: ModInt = ModInt {\n\n value: BigInt::from(23),\n\n modulus: BigInt::from(11),\n\n };\n\n\n\n // 23 mod 11 = 1\n\n // (23 mod 11)^-1 = 10\n\n let neg_one: ModInt = one.neg();\n\n assert_eq!(BigInt::from(10), neg_one.value);\n\n assert_eq!(BigInt::from(11), neg_one.modulus);\n\n\n\n let two: ModInt = ModInt {\n\n value: BigInt::from(2),\n\n modulus: BigInt::from(11),\n\n };\n\n\n\n // 2 mod 11 = 2\n\n // (2 mod 11)^-1 = 9\n", "file_path": "src/arithmetic/mod_int.rs", "rank": 65, "score": 25973.475684115387 }, { "content": " BigInt::from(23),\n\n BigInt::from(11),\n\n );\n\n\n\n let two: ModInt = ModInt::from_value_modulus(\n\n BigInt::from(2),\n\n BigInt::from(0),\n\n );\n\n\n\n let div = one / two;\n\n assert_eq!(BigInt::from(6), div.value);\n\n assert_eq!(BigInt::from(11), div.modulus);\n\n\n\n\n\n let one2: ModInt = ModInt::from_value_modulus(\n\n BigInt::from(23),\n\n BigInt::from(11),\n\n );\n\n let two2: ModInt = ModInt::from_value_modulus(\n\n BigInt::from(2),\n", "file_path": "src/arithmetic/mod_int.rs", "rank": 66, "score": 25973.15865159937 }, { "content": "\n\n #[test]\n\n fn test_pow_with_modulus() {\n\n let two: ModInt = ModInt::from_value_modulus(\n\n BigInt::from(2),\n\n BigInt::from(5),\n\n );\n\n\n\n let four: ModInt = ModInt::from_value_modulus(\n\n BigInt::from(4),\n\n BigInt::from(5),\n\n );\n\n\n\n let result = two.pow(four);\n\n assert_eq!(BigInt::from(1), result.value);\n\n assert_eq!(BigInt::from(5), result.modulus);\n\n }\n\n\n\n #[test]\n\n fn test_random() {\n", "file_path": "src/arithmetic/mod_int.rs", "rank": 67, "score": 25973.088905188288 }, { "content": " BigInt::from(0),\n\n );\n\n\n\n let zero: ModInt = one2 - ModInt::one();\n\n let zero_res: ModInt = zero / two2;\n\n assert_eq!(BigInt::from(0), zero_res.value);\n\n assert_eq!(BigInt::from(11), zero_res.modulus);\n\n }\n\n\n\n #[test]\n\n #[should_panic(expected = \"Division by zero is not defined\")]\n\n fn test_invalid_div() {\n\n let one: ModInt = ModInt::one();\n\n let zero: ModInt = ModInt::zero();\n\n\n\n one / zero;\n\n }\n\n\n\n #[test]\n\n #[should_panic(expected = \"Division by zero is not defined\")]\n", "file_path": "src/arithmetic/mod_int.rs", "rank": 68, "score": 25972.99604568662 }, { "content": " assert_eq!(BigInt::from(16), result.value);\n\n assert_eq!(BigInt::from(0), result.modulus);\n\n }\n\n\n\n #[test]\n\n fn test_pow() {\n\n let two: ModInt = ModInt::from_value_modulus(\n\n BigInt::from(2),\n\n BigInt::from(20),\n\n );\n\n\n\n let four: ModInt = ModInt::from_value_modulus(\n\n BigInt::from(4),\n\n BigInt::from(20),\n\n );\n\n\n\n let result = two.pow(four);\n\n assert_eq!(BigInt::from(16), result.value);\n\n assert_eq!(BigInt::from(20), result.modulus);\n\n }\n", "file_path": "src/arithmetic/mod_int.rs", "rank": 69, "score": 25972.316585846947 }, { "content": " fn test_zero() {\n\n let zero: ModInt = ModInt::zero();\n\n\n\n assert_eq!(BigInt::zero(), zero.value);\n\n assert_eq!(BigInt::zero(), zero.modulus);\n\n\n\n assert_eq!(true, zero.is_zero())\n\n }\n\n\n\n #[test]\n\n fn test_one() {\n\n let one: ModInt = ModInt::one();\n\n\n\n assert_eq!(BigInt::one(), one.value);\n\n assert_eq!(BigInt::zero(), one.modulus);\n\n\n\n assert_eq!(true, one.is_one())\n\n }\n\n\n\n #[test]\n", "file_path": "src/arithmetic/mod_int.rs", "rank": 70, "score": 25972.30576813783 }, { "content": "\n\n let one2 = two - one;\n\n assert_eq!(BigInt::from(1), one2.value);\n\n assert_eq!(BigInt::zero(), one2.modulus);\n\n\n\n let one3: ModInt = ModInt::one();\n\n let one4: ModInt = ModInt::one();\n\n\n\n let zero: ModInt = one3 - one4;\n\n assert_eq!(BigInt::zero(), zero.value);\n\n assert_eq!(BigInt::zero(), zero.modulus);\n\n }\n\n\n\n #[test]\n\n fn test_mul() {\n\n let one: ModInt = ModInt::one();\n\n let one2: ModInt = ModInt::one();\n\n\n\n let one_mul: ModInt = one * one2;\n\n assert_eq!(BigInt::one(), one_mul.value);\n", "file_path": "src/arithmetic/mod_int.rs", "rank": 71, "score": 25971.764140400413 }, { "content": " assert_eq!(true, one == other_one);\n\n }\n\n\n\n #[test]\n\n fn test_unequal() {\n\n let one: ModInt = ModInt::one();\n\n let zero: ModInt = ModInt::zero();\n\n\n\n assert_eq!(true, one != zero);\n\n }\n\n\n\n #[test]\n\n fn test_non_unequal() {\n\n let one: ModInt = ModInt::one();\n\n let one2: ModInt = ModInt::one();\n\n\n\n assert_eq!(false, one != one2);\n\n }\n\n\n\n #[test]\n", "file_path": "src/arithmetic/mod_int.rs", "rank": 72, "score": 25971.311281566956 }, { "content": " fn from_value(value: BigInt) -> ModInt {\n\n let non_normalized = ModInt {\n\n value,\n\n modulus: BigInt::zero(),\n\n };\n\n\n\n non_normalized.normalize()\n\n }\n\n\n\n fn from_hex_string(hex_string: String, modulus: BigInt) -> ModInt {\n\n let value = BigInt::from_str_radix(&hex_string.as_str(), 16);\n\n\n\n let non_normalized = ModInt {\n\n value: value.unwrap(),\n\n modulus,\n\n };\n\n\n\n non_normalized.normalize()\n\n }\n\n}\n\n\n\n\n", "file_path": "src/arithmetic/mod_int.rs", "rank": 73, "score": 25970.685111373485 }, { "content": "use ::num::bigint::BigInt;\n\nuse ::num::Zero;\n\nuse ::num::One;\n\n\n\n///\n\n/// # Modular Inverse\n\n///\n\n/// Calculates the modular inverse `a^-1 mod m`\n\n///\n\n/// ## Credits\n\n/// Inspired by [simon-andrews/rust-modinverse](https://github.com/simon-andrews/rust-modinverse)\n\n///\n", "file_path": "src/arithmetic/mod_inverse.rs", "rank": 74, "score": 25970.35610572303 }, { "content": "/// Normalize ModInt values.\n\ntrait Normalize {\n\n /// Normalize a ModInt, i.e. reduce it by\n\n /// applying `value mod modulus`.\n\n /// Note, that the value is updated but the modulus remains.\n\n fn normalize(self) -> ModInt;\n\n}\n\n\n\n\n\nimpl Normalize for ModInt {\n\n fn normalize(mut self) -> ModInt {\n\n if self.modulus > BigInt::zero() {\n\n self.value = self.value.rem(self.modulus.clone());\n\n }\n\n\n\n self\n\n }\n\n}\n\n\n\nimpl Eq for ModInt {}\n\n\n", "file_path": "src/arithmetic/mod_int.rs", "rank": 75, "score": 24947.461952922244 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"crypto_rs\"] = [];\n\n\n\n if (window.register_implementors) {\n\n window.register_implementors(implementors);\n\n } else {\n\n window.pending_implementors = implementors;\n\n }\n\n \n\n})()\n", "file_path": "docs/implementors/crypto_rs/arithmetic/mod_int/trait.RandModInt.js", "rank": 76, "score": 22012.808598534517 }, { "content": "var searchIndex = {};\n\nsearchIndex[\"crypto_rs\"] = {\"doc\":\"This library provides common cryptographic functionality for working within the exponential ElGamal cryptosystem.\",\"items\":[[0,\"arithmetic\",\"crypto_rs\",\"Adds support for modular arithmetic within a cyclic field of integers.\",null,null],[0,\"mod_int\",\"crypto_rs::arithmetic\",\"Modular arithmetic within a cyclic field\",null,null],[3,\"ModInt\",\"crypto_rs::arithmetic::mod_int\",\"An integer with modular operations.\",null,null],[12,\"value\",\"\",\"The value.\",0,null],[12,\"modulus\",\"\",\"The modulus.\",0,null],[8,\"From\",\"\",\"\",null,null],[10,\"from_value_modulus\",\"\",\"Create a ModInt with the given value and modulus.\",1,{\"inputs\":[{\"name\":\"bigint\"},{\"name\":\"bigint\"}],\"output\":{\"name\":\"modint\"}}],[10,\"from_value\",\"\",\"Create a ModInt with the given value and a zero modulus.\",1,{\"inputs\":[{\"name\":\"bigint\"}],\"output\":{\"name\":\"modint\"}}],[10,\"from_hex_string\",\"\",\"\",1,{\"inputs\":[{\"name\":\"string\"},{\"name\":\"bigint\"}],\"output\":{\"name\":\"modint\"}}],[8,\"RandModInt\",\"\",\"Random ModInt\",null,null],[10,\"gen_modint\",\"\",\"Generate random ModInts with the given upper_bound. Note, that the returned ModInt has a modulus set equal to the given upper_bound.\",2,{\"inputs\":[{\"name\":\"modint\"}],\"output\":{\"name\":\"modint\"}}],[11,\"clone\",\"\",\"\",0,{\"inputs\":[{\"name\":\"self\"}],\"output\":{\"name\":\"self\"}}],[11,\"clone_from\",\"\",\"\",0,{\"inputs\":[{\"name\":\"self\"},{\"name\":\"self\"}],\"output\":null}],[11,\"from_value_modulus\",\"\",\"\",0,{\"inputs\":[{\"name\":\"bigint\"},{\"name\":\"bigint\"}],\"output\":{\"name\":\"modint\"}}],[11,\"from_value\",\"\",\"\",0,{\"inputs\":[{\"name\":\"bigint\"}],\"output\":{\"name\":\"modint\"}}],[11,\"from_hex_string\",\"\",\"\",0,{\"inputs\":[{\"name\":\"string\"},{\"name\":\"bigint\"}],\"output\":{\"name\":\"modint\"}}],[11,\"eq\",\"\",\"\",0,{\"inputs\":[{\"name\":\"self\"},{\"name\":\"modint\"}],\"output\":{\"name\":\"bool\"}}],[11,\"ne\",\"\",\"\",0,{\"inputs\":[{\"name\":\"self\"},{\"name\":\"modint\"}],\"output\":{\"name\":\"bool\"}}],[11,\"partial_cmp\",\"\",\"\",0,{\"inputs\":[{\"name\":\"self\"},{\"name\":\"modint\"}],\"output\":{\"generics\":[\"ordering\"],\"name\":\"option\"}}],[11,\"cmp\",\"\",\"\",0,{\"inputs\":[{\"name\":\"self\"},{\"name\":\"self\"}],\"output\":{\"name\":\"ordering\"}}],[11,\"zero\",\"\",\"Zero ModInt\",0,{\"inputs\":[],\"output\":{\"name\":\"self\"}}],[11,\"is_zero\",\"\",\"\",0,{\"inputs\":[{\"name\":\"self\"}],\"output\":{\"name\":\"bool\"}}],[11,\"one\",\"\",\"One ModInt\",0,{\"inputs\":[],\"output\":{\"name\":\"self\"}}],[11,\"is_one\",\"\",\"\",0,{\"inputs\":[{\"name\":\"self\"}],\"output\":{\"name\":\"bool\"}}],[11,\"neg\",\"\",\"\",0,{\"inputs\":[{\"name\":\"self\"}],\"output\":{\"name\":\"modint\"}}],[11,\"add\",\"\",\"\",0,{\"inputs\":[{\"name\":\"self\"},{\"name\":\"modint\"}],\"output\":{\"name\":\"modint\"}}],[11,\"sub\",\"\",\"\",0,{\"inputs\":[{\"name\":\"self\"},{\"name\":\"modint\"}],\"output\":{\"name\":\"modint\"}}],[11,\"mul\",\"\",\"\",0,{\"inputs\":[{\"name\":\"self\"},{\"name\":\"modint\"}],\"output\":{\"name\":\"modint\"}}],[11,\"div\",\"\",\"\",0,{\"inputs\":[{\"name\":\"self\"},{\"name\":\"modint\"}],\"output\":{\"name\":\"modint\"}}],[11,\"rem\",\"\",\"\",0,{\"inputs\":[{\"name\":\"self\"},{\"name\":\"modint\"}],\"output\":{\"name\":\"modint\"}}],[11,\"pow\",\"\",\"\",0,{\"inputs\":[{\"name\":\"self\"},{\"name\":\"modint\"}],\"output\":{\"name\":\"modint\"}}],[11,\"fmt\",\"\",\"\",0,{\"inputs\":[{\"name\":\"self\"},{\"name\":\"formatter\"}],\"output\":{\"name\":\"result\"}}],[11,\"fmt\",\"\",\"\",0,{\"inputs\":[{\"name\":\"self\"},{\"name\":\"formatter\"}],\"output\":{\"name\":\"result\"}}],[11,\"gen_modint\",\"\",\"\",0,{\"inputs\":[{\"name\":\"modint\"}],\"output\":{\"name\":\"modint\"}}],[0,\"mod_inverse\",\"crypto_rs::arithmetic\",\"Perform the inverse operation in modular arithmetic\",null,null],[5,\"mod_inverse\",\"crypto_rs::arithmetic::mod_inverse\",\"Modular Inverse\",null,{\"inputs\":[{\"name\":\"bigint\"},{\"name\":\"bigint\"}],\"output\":{\"generics\":[\"bigint\"],\"name\":\"option\"}}],[0,\"cai\",\"crypto_rs\",\"Adds a universal cast-as-intended proof for a particular ElGamal ciphertext.\",null,null],[0,\"uciv\",\"crypto_rs::cai\",\"\",null,null],[3,\"PreImageSet\",\"crypto_rs::cai::uciv\",\"Secret UCIV Information `(x1, x2, ..., xn)`. This information is specific to a particular voter. Each `xn` is further tight to the n-th voting option.\",null,null],[12,\"pre_images\",\"\",\"\",3,null],[3,\"ImageSet\",\"\",\"Public UCIV Information `(y1, y2, ..., yn)`. This information is specific to a particular voter. Each `yn` is further tight to the n-th voting option.\",null,null],[12,\"images\",\"\",\"\",4,null],[3,\"CaiProof\",\"\",\"Cast-as-Intended proof\",null,null],[11,\"clone\",\"\",\"\",3,{\"inputs\":[{\"name\":\"self\"}],\"output\":{\"name\":\"preimageset\"}}],[11,\"clone\",\"\",\"\",4,{\"inputs\":[{\"name\":\"self\"}],\"output\":{\"name\":\"imageset\"}}],[11,\"new\",\"\",\"Creates an ImageSet `(y1, y2, ..., yn)` by applying the following arithmetic operation to each element of the given PreImageSet `(x1, x2, ..., xn)`:\",4,{\"inputs\":[{\"name\":\"modint\"},{\"name\":\"usize\"},{\"name\":\"preimageset\"}],\"output\":{\"name\":\"self\"}}],[11,\"new\",\"\",\"Create a new Cast-as-Intended Proof.\",5,{\"inputs\":[{\"name\":\"publickey\"},{\"name\":\"ciphertext\"},{\"name\":\"preimageset\"},{\"name\":\"imageset\"},{\"name\":\"usize\"},{\"generics\":[\"modint\"],\"name\":\"vec\"}],\"output\":{\"name\":\"self\"}}],[11,\"verify\",\"\",\"Verify this proof for validity.\",5,{\"inputs\":[{\"name\":\"self\"},{\"name\":\"publickey\"},{\"name\":\"ciphertext\"},{\"name\":\"imageset\"},{\"generics\":[\"modint\"],\"name\":\"vec\"}],\"output\":{\"name\":\"bool\"}}],[0,\"el_gamal\",\"crypto_rs\",\"Adds support for encrypting and decrypting messages in the exponential ElGamal cryptosystem, applying homomorphic addition on the ciphertexts. In addition, membership proofs can be generated for a ciphertext, ensuring that the encrypted plain-text message is within a particular bound.\",null,null],[0,\"additive\",\"crypto_rs::el_gamal\",\"\",null,null],[8,\"Operate\",\"crypto_rs::el_gamal::additive\",\"Homomorphic Operation\",null,null],[10,\"operate\",\"\",\"\",6,{\"inputs\":[{\"name\":\"self\"},{\"name\":\"ciphertext\"}],\"output\":{\"name\":\"ciphertext\"}}],[0,\"ciphertext\",\"crypto_rs::el_gamal\",\"\",null,null],[3,\"CipherText\",\"crypto_rs::el_gamal::ciphertext\",\"ElGamal CipherText.\",null,null],[12,\"big_g\",\"\",\"\",7,null],[12,\"big_h\",\"\",\"\",7,null],[12,\"random\",\"\",\"\",7,null],[11,\"clone\",\"\",\"\",7,{\"inputs\":[{\"name\":\"self\"}],\"output\":{\"name\":\"ciphertext\"}}],[11,\"fmt\",\"\",\"\",7,{\"inputs\":[{\"name\":\"self\"},{\"name\":\"formatter\"}],\"output\":{\"name\":\"result\"}}],[0,\"encryption\",\"crypto_rs::el_gamal\",\"\",null,null],[3,\"PublicKey\",\"crypto_rs::el_gamal::encryption\",\"\",null,null],[12,\"p\",\"\",\"\",8,null],[12,\"q\",\"\",\"\",8,null],[12,\"h\",\"\",\"\",8,null],[12,\"g\",\"\",\"\",8,null],[3,\"PrivateKey\",\"\",\"\",null,null],[12,\"p\",\"\",\"\",9,null],[12,\"q\",\"\",\"\",9,null],[12,\"g\",\"\",\"\",9,null],[12,\"x\",\"\",\"\",9,null],[5,\"encrypt\",\"\",\"\",null,{\"inputs\":[{\"name\":\"publickey\"},{\"name\":\"modint\"}],\"output\":{\"name\":\"ciphertext\"}}],[5,\"decrypt\",\"\",\"\",null,{\"inputs\":[{\"name\":\"privatekey\"},{\"name\":\"ciphertext\"}],\"output\":{\"name\":\"modint\"}}],[11,\"fmt\",\"\",\"\",8,{\"inputs\":[{\"name\":\"self\"},{\"name\":\"formatter\"}],\"output\":{\"name\":\"result\"}}],[11,\"clone\",\"\",\"\",8,{\"inputs\":[{\"name\":\"self\"}],\"output\":{\"name\":\"publickey\"}}],[11,\"fmt\",\"\",\"\",9,{\"inputs\":[{\"name\":\"self\"},{\"name\":\"formatter\"}],\"output\":{\"name\":\"result\"}}],[11,\"clone\",\"\",\"\",9,{\"inputs\":[{\"name\":\"self\"}],\"output\":{\"name\":\"privatekey\"}}],[0,\"membership_proof\",\"crypto_rs::el_gamal\",\"\",null,null],[3,\"MembershipProof\",\"crypto_rs::el_gamal::membership_proof\",\"\",null,null],[11,\"clone\",\"\",\"\",10,{\"inputs\":[{\"name\":\"self\"}],\"output\":{\"name\":\"membershipproof\"}}],[11,\"fmt\",\"\",\"\",10,{\"inputs\":[{\"name\":\"self\"},{\"name\":\"formatter\"}],\"output\":{\"name\":\"result\"}}],[11,\"new\",\"\",\"\",10,{\"inputs\":[{\"name\":\"publickey\"},{\"name\":\"modint\"},{\"name\":\"ciphertext\"},{\"generics\":[\"modint\"],\"name\":\"vec\"}],\"output\":{\"name\":\"membershipproof\"}}],[11,\"verify\",\"\",\"\",10,{\"inputs\":[{\"name\":\"self\"},{\"name\":\"publickey\"},{\"name\":\"ciphertext\"},{\"generics\":[\"modint\"],\"name\":\"vec\"}],\"output\":{\"name\":\"bool\"}}],[0,\"serializer\",\"crypto_rs::el_gamal\",\"\",null,null],[3,\"Serializer\",\"crypto_rs::el_gamal::serializer\",\"\",null,null],[11,\"string_to_sha512\",\"\",\"\",11,{\"inputs\":[{\"name\":\"string\"}],\"output\":{\"name\":\"string\"}}],[11,\"operate\",\"crypto_rs::el_gamal::ciphertext\",\"\",7,{\"inputs\":[{\"name\":\"self\"},{\"name\":\"ciphertext\"}],\"output\":{\"name\":\"ciphertext\"}}]],\"paths\":[[3,\"ModInt\"],[8,\"From\"],[8,\"RandModInt\"],[3,\"PreImageSet\"],[3,\"ImageSet\"],[3,\"CaiProof\"],[8,\"Operate\"],[3,\"CipherText\"],[3,\"PublicKey\"],[3,\"PrivateKey\"],[3,\"MembershipProof\"],[3,\"Serializer\"]]};\n\ninitSearch(searchIndex);\n", "file_path": "docs/search-index.js", "rank": 77, "score": 19579.546032137438 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"crypto_rs\"] = [{text:\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/fmt/trait.Debug.html\\\" title=\\\"trait core::fmt::Debug\\\">Debug</a> for <a class=\\\"struct\\\" href=\\\"crypto_rs/arithmetic/mod_int/struct.ModInt.html\\\" title=\\\"struct crypto_rs::arithmetic::mod_int::ModInt\\\">ModInt</a>\",synthetic:false,types:[\"crypto_rs::arithmetic::mod_int::ModInt\"]},{text:\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/fmt/trait.Debug.html\\\" title=\\\"trait core::fmt::Debug\\\">Debug</a> for <a class=\\\"struct\\\" href=\\\"crypto_rs/el_gamal/ciphertext/struct.CipherText.html\\\" title=\\\"struct crypto_rs::el_gamal::ciphertext::CipherText\\\">CipherText</a>\",synthetic:false,types:[\"crypto_rs::el_gamal::ciphertext::CipherText\"]},{text:\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/fmt/trait.Debug.html\\\" title=\\\"trait core::fmt::Debug\\\">Debug</a> for <a class=\\\"struct\\\" href=\\\"crypto_rs/el_gamal/encryption/struct.PublicKey.html\\\" title=\\\"struct crypto_rs::el_gamal::encryption::PublicKey\\\">PublicKey</a>\",synthetic:false,types:[\"crypto_rs::el_gamal::encryption::PublicKey\"]},{text:\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/fmt/trait.Debug.html\\\" title=\\\"trait core::fmt::Debug\\\">Debug</a> for <a class=\\\"struct\\\" href=\\\"crypto_rs/el_gamal/encryption/struct.PrivateKey.html\\\" title=\\\"struct crypto_rs::el_gamal::encryption::PrivateKey\\\">PrivateKey</a>\",synthetic:false,types:[\"crypto_rs::el_gamal::encryption::PrivateKey\"]},{text:\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/fmt/trait.Debug.html\\\" title=\\\"trait core::fmt::Debug\\\">Debug</a> for <a class=\\\"struct\\\" href=\\\"crypto_rs/el_gamal/membership_proof/struct.MembershipProof.html\\\" title=\\\"struct crypto_rs::el_gamal::membership_proof::MembershipProof\\\">MembershipProof</a>\",synthetic:false,types:[\"crypto_rs::el_gamal::membership_proof::MembershipProof\"]},];\n\n\n\n if (window.register_implementors) {\n\n window.register_implementors(implementors);\n\n } else {\n\n window.pending_implementors = implementors;\n\n }\n\n \n\n})()\n", "file_path": "docs/implementors/core/fmt/trait.Debug.js", "rank": 78, "score": 18080.153570922426 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"crypto_rs\"] = [{text:\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/ops/arith/trait.Neg.html\\\" title=\\\"trait core::ops::arith::Neg\\\">Neg</a> for <a class=\\\"struct\\\" href=\\\"crypto_rs/arithmetic/mod_int/struct.ModInt.html\\\" title=\\\"struct crypto_rs::arithmetic::mod_int::ModInt\\\">ModInt</a>\",synthetic:false,types:[\"crypto_rs::arithmetic::mod_int::ModInt\"]},];\n\n\n\n if (window.register_implementors) {\n\n window.register_implementors(implementors);\n\n } else {\n\n window.pending_implementors = implementors;\n\n }\n\n \n\n})()\n", "file_path": "docs/implementors/core/ops/arith/trait.Neg.js", "rank": 79, "score": 17621.0554374469 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"crypto_rs\"] = [{text:\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/ops/arith/trait.Div.html\\\" title=\\\"trait core::ops::arith::Div\\\">Div</a>&lt;<a class=\\\"struct\\\" href=\\\"crypto_rs/arithmetic/mod_int/struct.ModInt.html\\\" title=\\\"struct crypto_rs::arithmetic::mod_int::ModInt\\\">ModInt</a>&gt; for <a class=\\\"struct\\\" href=\\\"crypto_rs/arithmetic/mod_int/struct.ModInt.html\\\" title=\\\"struct crypto_rs::arithmetic::mod_int::ModInt\\\">ModInt</a>\",synthetic:false,types:[\"crypto_rs::arithmetic::mod_int::ModInt\"]},];\n\n\n\n if (window.register_implementors) {\n\n window.register_implementors(implementors);\n\n } else {\n\n window.pending_implementors = implementors;\n\n }\n\n \n\n})()\n", "file_path": "docs/implementors/core/ops/arith/trait.Div.js", "rank": 80, "score": 17621.0554374469 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"crypto_rs\"] = [{text:\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/ops/arith/trait.Sub.html\\\" title=\\\"trait core::ops::arith::Sub\\\">Sub</a>&lt;<a class=\\\"struct\\\" href=\\\"crypto_rs/arithmetic/mod_int/struct.ModInt.html\\\" title=\\\"struct crypto_rs::arithmetic::mod_int::ModInt\\\">ModInt</a>&gt; for <a class=\\\"struct\\\" href=\\\"crypto_rs/arithmetic/mod_int/struct.ModInt.html\\\" title=\\\"struct crypto_rs::arithmetic::mod_int::ModInt\\\">ModInt</a>\",synthetic:false,types:[\"crypto_rs::arithmetic::mod_int::ModInt\"]},];\n\n\n\n if (window.register_implementors) {\n\n window.register_implementors(implementors);\n\n } else {\n\n window.pending_implementors = implementors;\n\n }\n\n \n\n})()\n", "file_path": "docs/implementors/core/ops/arith/trait.Sub.js", "rank": 81, "score": 17621.0554374469 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"crypto_rs\"] = [{text:\"impl <a class=\\\"trait\\\" href=\\\"https://doc.rust-lang.org/nightly/core/ops/arith/trait.Add.html\\\" title=\\\"trait core::ops::arith::Add\\\">Add</a>&lt;<a class=\\\"struct\\\" href=\\\"crypto_rs/arithmetic/mod_int/struct.ModInt.html\\\" title=\\\"struct crypto_rs::arithmetic::mod_int::ModInt\\\">ModInt</a>&gt; for <a class=\\\"struct\\\" href=\\\"crypto_rs/arithmetic/mod_int/struct.ModInt.html\\\" title=\\\"struct crypto_rs::arithmetic::mod_int::ModInt\\\">ModInt</a>\",synthetic:false,types:[\"crypto_rs::arithmetic::mod_int::ModInt\"]},];\n\n\n\n if (window.register_implementors) {\n\n window.register_implementors(implementors);\n\n } else {\n\n window.pending_implementors = implementors;\n\n }\n\n \n\n})()\n", "file_path": "docs/implementors/core/ops/arith/trait.Add.js", "rank": 82, "score": 17621.0554374469 }, { "content": "initSidebarItems({\"mod\":[[\"additive\",\"\"],[\"ciphertext\",\"\"],[\"encryption\",\"\"],[\"membership_proof\",\"\"],[\"serializer\",\"\"]]});", "file_path": "docs/crypto_rs/el_gamal/sidebar-items.js", "rank": 83, "score": 16499.08517925731 }, { "content": "initSidebarItems({\"trait\":[[\"Operate\",\"Homomorphic Operation\"]]});", "file_path": "docs/crypto_rs/el_gamal/additive/sidebar-items.js", "rank": 84, "score": 16090.508860718308 }, { "content": "initSidebarItems({\"fn\":[[\"decrypt\",\"\"],[\"encrypt\",\"\"]],\"struct\":[[\"PrivateKey\",\"\"],[\"PublicKey\",\"\"]]});", "file_path": "docs/crypto_rs/el_gamal/encryption/sidebar-items.js", "rank": 85, "score": 16090.508860718308 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"crypto_rs\"] = [];\n\n\n\n if (window.register_implementors) {\n\n window.register_implementors(implementors);\n\n } else {\n\n window.pending_implementors = implementors;\n\n }\n\n \n\n})()\n", "file_path": "docs/implementors/crypto_rs/el_gamal/additive/trait.Operate.js", "rank": 86, "score": 15701.679165104377 }, { "content": "initSidebarItems({\"struct\":[[\"ModInt\",\"An integer with modular operations.\"]],\"trait\":[[\"From\",\"\"],[\"RandModInt\",\"Random ModInt\"]]});", "file_path": "docs/crypto_rs/arithmetic/mod_int/sidebar-items.js", "rank": 87, "score": 15678.024418544954 }, { "content": "(function() {var implementors = {};\n\nimplementors[\"crypto_rs\"] = [];\n\n\n\n if (window.register_implementors) {\n\n window.register_implementors(implementors);\n\n } else {\n\n window.pending_implementors = implementors;\n\n }\n\n \n\n})()\n", "file_path": "docs/implementors/crypto_rs/arithmetic/mod_int/trait.From.js", "rank": 88, "score": 15678.024418544954 }, { "content": "initSidebarItems({\"fn\":[[\"mod_inverse\",\"Modular Inverse\"]]});", "file_path": "docs/crypto_rs/arithmetic/mod_inverse/sidebar-items.js", "rank": 89, "score": 15678.024418544954 } ]
Rust
third-party/stringprep/tests/nameprep_tests.rs
capyloon/api-daemon
ab4e4b60aa9bb617734c64655c0b8940fff098bc
extern crate stringprep; use stringprep::{Error, nameprep}; fn assert_prohibited_character<T>(result: Result<T, Error>) { assert!(result.is_err()); } fn assert_prohibited_bidirectional_text<T>(result: Result<T, Error>) { assert!(result.is_err()); } #[test] fn test_nameprep() { assert_eq!("安室奈美恵-with-super-monkeys", nameprep("安室奈美恵-with-SUPER-MONKEYS").unwrap()); assert_eq!("미술", nameprep("미술").unwrap()); assert_eq!("ليهمابتكلموشعربي؟", nameprep("ليهمابتكلموشعربي؟").unwrap()); assert_eq!("他们为什么不说中文", nameprep("他们为什么不说中文").unwrap()); assert_eq!("למההםפשוטלאמדבריםעברית", nameprep("למההםפשוטלאמדבריםעברית").unwrap()); assert_eq!("почемужеонинеговорятпорусски", nameprep("почемужеонинеговорятпорусски").unwrap()); assert_eq!("tạisaohọkhôngthểchỉnóitiếngviệt", nameprep("TạisaohọkhôngthểchỉnóitiếngViệt").unwrap()); assert_eq!("ひとつ屋根の下2", nameprep("ひとつ屋根の下2").unwrap()); assert_eq!("pročprostěnemluvíčesky", nameprep("Pročprostěnemluvíčesky").unwrap()); assert_eq!("यहलोगहिन्दीक्योंनहींबोलसकतेहैं", nameprep("यहलोगहिन्दीक्योंनहींबोलसकतेहैं").unwrap()); assert_eq!("ພາສາລາວ", nameprep("ພາສາລາວ").unwrap()); assert_eq!("bonġusaħħa", nameprep("bonġusaħħa").unwrap()); assert_eq!("ελληνικά", nameprep("ελληνικά").unwrap()); } #[test] fn should_map_to_nothing() { let input = "foo\u{00ad}\u{034f}\u{1806}\u{180b}bar\u{200b}\u{2060}baz\u{fe00}\u{fe08}\u{fe0f}\u{feff}"; assert_eq!("foobarbaz", nameprep(input).unwrap()); } #[test] fn should_case_fold_ascii() { assert_eq!("cafe", nameprep("CAFE").unwrap()); } #[test] fn should_case_fold_8bit() { assert_eq!("ss", nameprep("\u{00df}").unwrap()); } #[test] fn should_case_fold_16bit() { assert_eq!("\u{0069}\u{0307}", nameprep("\u{0130}").unwrap()); } #[test] fn should_case_fold_multibyte() { let input = "\u{0143}\u{037a}"; let output = "\u{0144} \u{03b9}"; assert_eq!(output, nameprep(input).unwrap()); } #[test] fn should_case_fold() { let input = "\u{2121}\u{33c6}\u{1d7bb}"; let output = "telc\u{2215}\u{006b}\u{0067}\u{03c3}"; assert_eq!(output, nameprep(input).unwrap()); } #[test] fn should_normalize() { let input = "j\u{030c}\u{00a0}\u{00aa}"; let output = "\u{01f0} a"; assert_eq!(output, nameprep(input).unwrap()); } #[test] fn should_case_fold_and_normalize() { let input = "\u{1fb7}"; let output = "\u{1fb6}\u{03b9}"; assert_eq!(output, nameprep(input).unwrap()); } #[test] fn should_revert_case_fold_and_normalization() { let inputs = ["\u{01f0}", "\u{0390}", "\u{03b0}", "\u{1e96}", "\u{1f56}"]; for input in inputs.iter() { assert_eq!(input.clone(), nameprep(input).unwrap()); } } #[test] fn should_permit_ascii_space() { assert_eq!(" ", nameprep(" ").unwrap()); } #[test] fn should_map_8bit_space() { assert_eq!(" ", nameprep("\u{00a0}").unwrap()); } #[test] fn should_prohibit_multibyte_space() { assert_prohibited_character(nameprep("\u{1680}")); } #[test] fn should_map_multibyte_space1() { assert_eq!(" ", nameprep("\u{2000}").unwrap()); } #[test] fn should_drop_zero_width_space() { assert_eq!("", nameprep("\u{200b}").unwrap()); } #[test] fn should_map_multibyte_space2() { assert_eq!(" ", nameprep("\u{3000}").unwrap()); } #[test] fn should_permit_ascii_control() { assert_eq!("\u{0010}\u{007f}", nameprep("\u{0010}\u{007f}").unwrap()); } #[test] fn should_prohibit_8bit_control() { assert_prohibited_character(nameprep("\u{0085}")); } #[test] fn should_prohibit_multibyte_control() { assert_prohibited_character(nameprep("\u{180e}")); } #[test] fn should_drop_zero_width_no_break_space() { assert_eq!("", nameprep("\u{feff}").unwrap()); } #[test] fn should_prohibit_non_ascii_control() { assert_prohibited_character(nameprep("\u{1d175}")); } #[test] fn should_prohibit_plane0_private_use() { assert_prohibited_character(nameprep("\u{f123}")); } #[test] fn should_prohibit_plane15_private_use() { assert_prohibited_character(nameprep("\u{f1234}")); } #[test] fn should_prohibit_plane16_private_use() { assert_prohibited_character(nameprep("\u{10f234}")); } #[test] fn should_prohibit_non_character1() { assert_prohibited_character(nameprep("\u{8fffe}")); } #[test] fn should_prohibit_non_character2() { assert_prohibited_character(nameprep("\u{10ffff}")); } #[test] fn should_prohibit_non_plain_text() { assert_prohibited_character(nameprep("\u{fffd}")); } #[test] fn should_prohibit_ideographic_description() { assert_prohibited_character(nameprep("\u{2ff5}")); } #[test] fn should_normalize_display_property() { assert_eq!("\u{0301}", nameprep("\u{0341}").unwrap()); } #[test] fn should_prohibit_left_to_right_mark() { assert_prohibited_character(nameprep("\u{200e}")); } #[test] fn should_prohibit_deprecated() { assert_prohibited_character(nameprep("\u{202a}")); } #[test] fn should_prohibit_language_tagging1() { assert_prohibited_character(nameprep("\u{e0001}")); } #[test] fn should_prohibit_language_tagging2() { assert_prohibited_character(nameprep("\u{e0042}")); } #[test] fn should_prohibit_randalcat_with_lcat1() { assert_prohibited_bidirectional_text(nameprep("foo\u{05be}bar")); } #[test] fn should_prohibit_randalcat_with_lcat2() { assert_prohibited_bidirectional_text(nameprep("foo\u{fd50}bar")); } #[test] fn should_permit_randalcat1() { assert_eq!("foo \u{064e}bar", nameprep("foo\u{fe76}bar").unwrap()); } #[test] fn should_prohibit_mixed_randalcat() { assert_prohibited_bidirectional_text(nameprep("\u{0672}\u{0031}")); } #[test] fn should_permit_randalcat2() { assert_eq!("\u{0627}\u{0031}\u{0628}", nameprep("\u{0627}\u{0031}\u{0628}").unwrap()); } #[test] fn should_prohibit_unassigned_code_point() { assert_prohibited_character(nameprep("\u{e0002}")); } #[test] fn should_shrink() { let input = "X\u{00ad}\u{00df}\u{0130}\u{2121}j\u{030c}\u{00a0}\u{00aa}\u{03b0}\u{2000}"; let output = "xssi\u{0307}tel\u{01f0} a\u{03b0}\u{0020}"; assert_eq!(output, nameprep(input).unwrap()); } #[test] fn should_expand() { let input = "X\u{00df}\u{3316}\u{0130}\u{2121}\u{249f}\u{3300}"; let output = "xss\u{30ad}\u{30ed}\u{30e1}\u{30fc}\u{30c8}\u{30eb}\u{0069}\u{0307}\u{0074}\u{0065}\u{006c}\u{0028}\u{0064}\u{0029}\u{30a2}\u{30d1}\u{30fc}\u{30c8}"; assert_eq!(output, nameprep(input).unwrap()); }
extern crate stringprep; use stringprep::{Error, nameprep}; fn assert_prohibited_character<T>(result: Result<T, Error>) { assert!(result.is_err()); } fn assert_prohibited_bidirectional_text<T>(result: Result<T, Error>) { assert!(result.is_err()); } #[test] fn test_nameprep() { assert_eq!("安室奈美恵-with-super-monkeys", nameprep("安室奈美恵-with-SUPER-MONKEYS").unwrap()); assert_eq!("미술", nameprep("미술").unwrap()); assert_eq!("ليهمابتكلموشعربي؟", nameprep("ليهمابتكلموشعربي؟").unwrap()); assert_eq!("他们为什么不说中文", nameprep("他们为什么不说中文").unwrap()); assert_eq!("למההםפשוטלאמדבריםעברית", nameprep("למההםפשוטלאמדבריםעברית").unwrap()); assert_eq!("почемужеонинеговорятпорусски", nameprep("почемужеонинеговорятпорусски").unwrap()); assert_eq!("tạisaohọkhôngthểchỉnóitiếngviệt", nameprep("TạisaohọkhôngthểchỉnóitiếngViệt").unwrap()); assert_eq!("ひとつ屋根の下2", nameprep("ひとつ屋根の下2").unwrap()); assert_eq!("pročprostěnemluvíčesky", nameprep("Pročprostěnemluvíčesky").unwrap()); assert_eq!("यहलोगहिन्दीक्योंनहींबोलसकतेहैं", nameprep("यहलोगहिन्दीक्योंनहींबोलसकतेहैं").unwrap()); assert_eq!("ພາສາລາວ", nameprep("ພາສາລາວ").unwrap()); assert_eq!("bonġusaħħa", nameprep("bonġusaħħa").unwrap()); assert_eq!("ελληνικά", nameprep("ελληνικά").unwrap()); } #[test] fn should_map_to_nothing() { let input = "foo\u{00ad}\u{034f}\u{1806}\u{180b}bar\u{200b}\u{2060}baz\u{fe00}\u{fe08}\u{fe0f}\u{feff}"; assert_eq!("foobarbaz", nameprep(input).unwrap()); } #[test] fn should_case_fold_ascii() { assert_eq!("cafe", nameprep("CAFE").unwrap()); } #[test] fn should_case_fold_8bit() { assert_eq!("ss", nameprep("\u{00df}").unwrap()); } #[test] fn should_case_fold_16bit() { assert_eq!("\u{0069}\u{0307}", nameprep("\u{0130}").unwrap()); } #[test] fn should_case_fold_multibyte() { let input = "\u{0143}\u{037a}"; let output = "\u{0144} \u{03b9}"; assert_eq!(output, nameprep(input).unwrap()); } #[test] fn should_case_fold() { let input = "\u{2121}\u{33c6}\u{1d7bb}"; let output = "telc\u{2215}\u{006b}\u{0067}\u{03c3}"; assert_eq!(output, nameprep(input).unwrap()); } #[test] fn should_normalize() { let input = "j\u{030c}\u{00a0}\u{00aa}"; let output = "\u{01f0} a"; assert_eq!(output, nameprep(input).unwrap()); } #[test] fn should_case_fold_and_normalize() { let input = "\u{1fb7}"; let output = "\u{1fb6}\u{03b9}"; assert_eq!(output, nameprep(input).unwrap()); } #[test]
#[test] fn should_permit_ascii_space() { assert_eq!(" ", nameprep(" ").unwrap()); } #[test] fn should_map_8bit_space() { assert_eq!(" ", nameprep("\u{00a0}").unwrap()); } #[test] fn should_prohibit_multibyte_space() { assert_prohibited_character(nameprep("\u{1680}")); } #[test] fn should_map_multibyte_space1() { assert_eq!(" ", nameprep("\u{2000}").unwrap()); } #[test] fn should_drop_zero_width_space() { assert_eq!("", nameprep("\u{200b}").unwrap()); } #[test] fn should_map_multibyte_space2() { assert_eq!(" ", nameprep("\u{3000}").unwrap()); } #[test] fn should_permit_ascii_control() { assert_eq!("\u{0010}\u{007f}", nameprep("\u{0010}\u{007f}").unwrap()); } #[test] fn should_prohibit_8bit_control() { assert_prohibited_character(nameprep("\u{0085}")); } #[test] fn should_prohibit_multibyte_control() { assert_prohibited_character(nameprep("\u{180e}")); } #[test] fn should_drop_zero_width_no_break_space() { assert_eq!("", nameprep("\u{feff}").unwrap()); } #[test] fn should_prohibit_non_ascii_control() { assert_prohibited_character(nameprep("\u{1d175}")); } #[test] fn should_prohibit_plane0_private_use() { assert_prohibited_character(nameprep("\u{f123}")); } #[test] fn should_prohibit_plane15_private_use() { assert_prohibited_character(nameprep("\u{f1234}")); } #[test] fn should_prohibit_plane16_private_use() { assert_prohibited_character(nameprep("\u{10f234}")); } #[test] fn should_prohibit_non_character1() { assert_prohibited_character(nameprep("\u{8fffe}")); } #[test] fn should_prohibit_non_character2() { assert_prohibited_character(nameprep("\u{10ffff}")); } #[test] fn should_prohibit_non_plain_text() { assert_prohibited_character(nameprep("\u{fffd}")); } #[test] fn should_prohibit_ideographic_description() { assert_prohibited_character(nameprep("\u{2ff5}")); } #[test] fn should_normalize_display_property() { assert_eq!("\u{0301}", nameprep("\u{0341}").unwrap()); } #[test] fn should_prohibit_left_to_right_mark() { assert_prohibited_character(nameprep("\u{200e}")); } #[test] fn should_prohibit_deprecated() { assert_prohibited_character(nameprep("\u{202a}")); } #[test] fn should_prohibit_language_tagging1() { assert_prohibited_character(nameprep("\u{e0001}")); } #[test] fn should_prohibit_language_tagging2() { assert_prohibited_character(nameprep("\u{e0042}")); } #[test] fn should_prohibit_randalcat_with_lcat1() { assert_prohibited_bidirectional_text(nameprep("foo\u{05be}bar")); } #[test] fn should_prohibit_randalcat_with_lcat2() { assert_prohibited_bidirectional_text(nameprep("foo\u{fd50}bar")); } #[test] fn should_permit_randalcat1() { assert_eq!("foo \u{064e}bar", nameprep("foo\u{fe76}bar").unwrap()); } #[test] fn should_prohibit_mixed_randalcat() { assert_prohibited_bidirectional_text(nameprep("\u{0672}\u{0031}")); } #[test] fn should_permit_randalcat2() { assert_eq!("\u{0627}\u{0031}\u{0628}", nameprep("\u{0627}\u{0031}\u{0628}").unwrap()); } #[test] fn should_prohibit_unassigned_code_point() { assert_prohibited_character(nameprep("\u{e0002}")); } #[test] fn should_shrink() { let input = "X\u{00ad}\u{00df}\u{0130}\u{2121}j\u{030c}\u{00a0}\u{00aa}\u{03b0}\u{2000}"; let output = "xssi\u{0307}tel\u{01f0} a\u{03b0}\u{0020}"; assert_eq!(output, nameprep(input).unwrap()); } #[test] fn should_expand() { let input = "X\u{00df}\u{3316}\u{0130}\u{2121}\u{249f}\u{3300}"; let output = "xss\u{30ad}\u{30ed}\u{30e1}\u{30fc}\u{30c8}\u{30eb}\u{0069}\u{0307}\u{0074}\u{0065}\u{006c}\u{0028}\u{0064}\u{0029}\u{30a2}\u{30d1}\u{30fc}\u{30c8}"; assert_eq!(output, nameprep(input).unwrap()); }
fn should_revert_case_fold_and_normalization() { let inputs = ["\u{01f0}", "\u{0390}", "\u{03b0}", "\u{1e96}", "\u{1f56}"]; for input in inputs.iter() { assert_eq!(input.clone(), nameprep(input).unwrap()); } }
function_block-full_function
[]
Rust
src/airport_gates.rs
DarrenTsung/interview-rs
aa195501f35ba8d0d1be1c681de29145a8c3c054
use binary_heap_plus::*; /* At an airport you have a timetable for arrivals and departures. You need to determine the minimum number of gates you'd need to provide so that all the planes can be placed at a gate as per their schedule. The arrival and departure times for each plane are presented in two arrays, sorted by arrival time, and you're told the total number of flights for the day. Assume that no planes remain overnight at the airport; all fly in and back out on the same day. Assume that if a plane departs in the same minute as another plane arrives, the arriving plane takes priority (i.e. you'll still need the gate for the departing plane). Write a function that returns the minimum number of gates needed for the schedules you're given. Example: arrQ = {900, 940, 950,1100,1500,1800} depQ = {910,1200,1120,1130,1900,2000} flights = 6 */ #[derive(Debug, Clone, Copy, PartialOrd, Ord, PartialEq, Eq)] pub struct Schedule { start: u32, end: u32, } impl From<(u32, u32)> for Schedule { fn from(v: (u32, u32)) -> Self { Self { start: v.0, end: v.1, } } } pub trait AirportGatesSolution { fn airport_gates(&self, schedules_sorted_by_arrival: &[(u32, u32)]) -> u32; } pub struct AirportGatesSolutionNaive; impl AirportGatesSolution for AirportGatesSolutionNaive { fn airport_gates(&self, schedules_sorted_by_arrival: &[(u32, u32)]) -> u32 { let mut gates: Vec<Schedule> = vec![]; for schedule in schedules_sorted_by_arrival { let schedule: Schedule = (*schedule).into(); let mut found_gate = false; for gate in &mut gates { if gate.end < schedule.start { gate.end = schedule.end; found_gate = true; break; } } if found_gate { continue; } gates.push(schedule); } gates.len() as u32 } } pub struct AirportGatesSolutionMoreEfficient; impl AirportGatesSolution for AirportGatesSolutionMoreEfficient { fn airport_gates(&self, schedules_sorted_by_arrival: &[(u32, u32)]) -> u32 { let mut gate_end_times: BinaryHeap<u32, MinComparator> = BinaryHeap::from_vec(vec![]); for schedule in schedules_sorted_by_arrival { let schedule: Schedule = (*schedule).into(); if let Some(mut min_end_time) = gate_end_times.peek_mut() { if *min_end_time < schedule.start { *min_end_time = schedule.end; continue; } } gate_end_times.push(schedule.end); } gate_end_times.len() as u32 } } pub struct AirportGatesSolutionCounter; impl AirportGatesSolution for AirportGatesSolutionCounter { fn airport_gates(&self, schedules_sorted_by_arrival: &[(u32, u32)]) -> u32 { #[derive(PartialEq, Eq, PartialOrd, Ord)] enum EventType { Arrival, Departure, } let sorted_events = { let mut events = vec![]; for (arrival, departure) in schedules_sorted_by_arrival { events.push((arrival, EventType::Arrival)); events.push((departure, EventType::Departure)); } events.sort(); events }; let mut max_gate_count = 0; let mut gate_count = 0; for (_time, event_type) in sorted_events { match event_type { EventType::Arrival => gate_count += 1, EventType::Departure => gate_count -= 1, } max_gate_count = std::cmp::max(gate_count, max_gate_count); } max_gate_count } } #[cfg(test)] mod tests { use super::*; fn check_correctness_for_all_solutions(assertions: impl Fn(&dyn AirportGatesSolution)) { assertions(&AirportGatesSolutionNaive); assertions(&AirportGatesSolutionMoreEfficient); assertions(&AirportGatesSolutionCounter); } #[test] fn trivial() { check_correctness_for_all_solutions(|s| { assert_eq!(s.airport_gates(&[(0, 10)]), 1); assert_eq!(s.airport_gates(&[(0, 10), (11, 30)]), 1); }) } #[test] fn arriving_at_same_time_as_departing() { check_correctness_for_all_solutions(|s| { assert_eq!(s.airport_gates(&[(0, 10), (10, 20)]), 2); }) } #[test] fn example() { check_correctness_for_all_solutions(|s| { assert_eq!( s.airport_gates(&[ (900, 910), (940, 1200), (950, 1120), (1100, 1130), (1500, 1900), (1800, 2000), ]), 3 ); }) } }
use binary_heap_plus::*; /* At an airport you have a timetable for arrivals and departures. You need to determine the minimum number of gates you'd need to provide so that all the planes can be placed at a gate as per their schedule. The arrival and departure times for each plane are presented in two arrays, sorted by arrival time, and you're told the total number of flights for the day. Assume that no planes remain overnight at the airport; all fly in and back out on the same day. Assume that if a plane departs in the same minute as another plane arrives, the arriving plane takes priority (i.e. you'll still need the gate for the departing plane). Write a function that returns the minimum number of gates needed for the schedules you're given. Example: arrQ = {900, 940, 950,1100,1500,1800} depQ = {910,1200,1120,1130,1900,2000} flights = 6 */ #[derive(Debug, Clone, Copy, PartialOrd, Ord, PartialEq, Eq)] pub struct Schedule { start: u32, end: u32, } impl From<(u32, u32)> for Schedule { fn from(v: (u32, u32)) -> Self { Self { start: v.0, end: v.1, } } } pub trait AirportGatesSolution { fn airport_gates(&self, schedules_sorted_by_arrival: &[(u32, u32)]) -> u32; } pub struct AirportGatesSolutionNaive; impl AirportGatesSolution for AirportGatesSolutionNaive { fn airport_gates(&self, schedules_sorted_by_arrival: &[(u32, u32)]) -> u32 { let mut gates: Vec<Schedule> = vec![]; for schedule in schedules_sorted_by_arrival { let schedule: Schedule = (*schedule).into(); let mut found_gate = false; for gate in &mut gates { if gate.end < schedule.start { gate.end = schedule.end; found_gate = true; break; } } if found_gate { continue; } gates.push(schedule); } gates.len() as u32 } } pub struct AirportGatesSolutionMoreEfficient; impl AirportGatesSolution for AirportGatesSolutionMoreEfficient { fn airport_gates(&self, schedules_sorted_by_arrival: &[(u32, u32)]) -> u32 { let mut gate_end_times: BinaryHeap<u32, MinComparator> = BinaryHeap::from_vec(vec![]); for schedule in schedules_sorted_by_arrival { let schedule: Schedule = (*schedule).into(); if let Some(mut min_end_time) = gate_end_times.peek_mut() { if *min_end_time < schedule.start { *min_end_time = schedule.end; continue; } } gate_end_times.push(schedule.end); } gate_end_times.len() as u32 } } pub struct AirportGatesSolutionCounter; impl AirportGatesSolution for AirportGatesSolutionCounter {
} #[cfg(test)] mod tests { use super::*; fn check_correctness_for_all_solutions(assertions: impl Fn(&dyn AirportGatesSolution)) { assertions(&AirportGatesSolutionNaive); assertions(&AirportGatesSolutionMoreEfficient); assertions(&AirportGatesSolutionCounter); } #[test] fn trivial() { check_correctness_for_all_solutions(|s| { assert_eq!(s.airport_gates(&[(0, 10)]), 1); assert_eq!(s.airport_gates(&[(0, 10), (11, 30)]), 1); }) } #[test] fn arriving_at_same_time_as_departing() { check_correctness_for_all_solutions(|s| { assert_eq!(s.airport_gates(&[(0, 10), (10, 20)]), 2); }) } #[test] fn example() { check_correctness_for_all_solutions(|s| { assert_eq!( s.airport_gates(&[ (900, 910), (940, 1200), (950, 1120), (1100, 1130), (1500, 1900), (1800, 2000), ]), 3 ); }) } }
fn airport_gates(&self, schedules_sorted_by_arrival: &[(u32, u32)]) -> u32 { #[derive(PartialEq, Eq, PartialOrd, Ord)] enum EventType { Arrival, Departure, } let sorted_events = { let mut events = vec![]; for (arrival, departure) in schedules_sorted_by_arrival { events.push((arrival, EventType::Arrival)); events.push((departure, EventType::Departure)); } events.sort(); events }; let mut max_gate_count = 0; let mut gate_count = 0; for (_time, event_type) in sorted_events { match event_type { EventType::Arrival => gate_count += 1, EventType::Departure => gate_count -= 1, } max_gate_count = std::cmp::max(gate_count, max_gate_count); } max_gate_count }
function_block-full_function
[ { "content": "pub fn has_two_movies_for_flight(flight_length: u32, movie_lengths: Vec<u32>) -> bool {\n\n let mut complement_movie_lengths = HashSet::new();\n\n\n\n for movie_length in movie_lengths {\n\n // If movie is not valid, ignore.\n\n if movie_length > flight_length {\n\n continue;\n\n }\n\n\n\n // If movie_length exists in this set, then that means that\n\n // a movie exists with the complement movie length.\n\n if complement_movie_lengths.contains(&movie_length) {\n\n return true;\n\n }\n\n\n\n complement_movie_lengths.insert(flight_length - movie_length);\n\n }\n\n\n\n false\n\n}\n", "file_path": "src/inflight_entertainment.rs", "rank": 0, "score": 185528.7082837946 }, { "content": "// Given various subsequences of an array of unique integers, reconstruct the original array:\n\n//\n\n// Example: [1, 3, 5], [1, 3, 9], [9, 5]\n\n// Output : [1, 3, 9, 5]\n\n//\n\n// There may be multiple valid reconstructions of the original array. Return a valid reconstruction.\n\n//\n\n// Example: [1, 3], [5, 3]\n\n// Output : [1, 5, 3] or [5, 1, 3]\n\npub fn subsequence_reconstruction(subsequences: &[Vec<u32>]) -> Vec<u32> {\n\n // Iterate through subsequences to create a hashmap containing all directed edges.\n\n let mut directed_edges = HashMap::new();\n\n for subsequence in subsequences {\n\n let mut prev = None;\n\n for &id in subsequence {\n\n directed_edges.entry(id).or_insert_with(HashSet::new);\n\n\n\n if let Some(prev) = prev {\n\n // Previously is always added to the HashMap in a previous iteration^.\n\n directed_edges.get_mut(&prev).expect(\"exists\").insert(id);\n\n }\n\n\n\n prev = Some(id);\n\n }\n\n }\n\n\n\n // Traverse the directed edges and count number of nodes reachable.\n\n let mut nodes_reachable = HashMap::new();\n\n for (&id, _edges) in &directed_edges {\n", "file_path": "src/subsequence_reconstruction.rs", "rank": 1, "score": 167545.573277989 }, { "content": "pub fn graph_shortest_path<T: Eq + Hash + Clone>(\n\n adjacency_list: &HashMap<T, Vec<T>>,\n\n source: T,\n\n destination: T,\n\n) -> Option<Vec<T>> {\n\n if source == destination {\n\n return Some(vec![source]);\n\n }\n\n\n\n let mut backwards_pointers = HashMap::new();\n\n backwards_pointers.insert(source.clone(), None);\n\n let mut to_visit = VecDeque::new();\n\n to_visit.push_back(source);\n\n\n\n while let Some(node_id) = to_visit.pop_front() {\n\n if !adjacency_list.contains_key(&node_id) {\n\n panic!(\"Graph must have an entry in adjacency_list for each node reachable.\");\n\n }\n\n\n\n for adj_node_id in &adjacency_list[&node_id] {\n", "file_path": "src/graph_shortest_path.rs", "rank": 3, "score": 140612.04891848104 }, { "content": "pub fn get_max_profit(stock_prices: Vec<u32>) -> i64 {\n\n if stock_prices.is_empty() {\n\n return 0;\n\n }\n\n\n\n let mut max_profit = None;\n\n let mut lowest = stock_prices[0];\n\n for price in stock_prices.into_iter().skip(1) {\n\n let profit = price as i64 - lowest as i64;\n\n\n\n lowest = cmp::min(lowest, price);\n\n\n\n if let Some(prev_max_profit) = max_profit {\n\n max_profit = Some(cmp::max(prev_max_profit, profit));\n\n } else {\n\n max_profit = Some(profit);\n\n }\n\n }\n\n\n\n max_profit.unwrap_or(0)\n", "file_path": "src/apple_stocks.rs", "rank": 4, "score": 137981.1067713654 }, { "content": "pub fn second_largest_value_in_bst<T: PartialEq + Debug + Clone>(\n\n tree: &BinaryTree<T>,\n\n) -> Option<T> {\n\n let naive = naive(tree);\n\n let better = better(tree);\n\n let best = best(tree);\n\n assert_eq!(better, naive);\n\n assert_eq!(best, naive);\n\n naive\n\n}\n\n\n\n/*\n\nNaive solution:\n\nTraverse tree and store max in circular buffer of size 2.\n\nReturn the end element in the circular buffer.\n\n\n\nTime complexity: O(N) - traversal takes N time.\n\nSpace complexity: O(H) - traversal takes H space (height of tree).\n\n*/\n", "file_path": "src/binary_tree_second_largest.rs", "rank": 5, "score": 132829.55068805354 }, { "content": "pub fn products_of_all_except_at_index(values: Vec<i32>) -> Vec<i32> {\n\n if values.len() < 2 {\n\n return vec![];\n\n }\n\n\n\n let mut products = vec![];\n\n for _ in &values {\n\n products.push(1);\n\n }\n\n\n\n let mut product_before_index = 1;\n\n for (index, &value) in values.iter().enumerate() {\n\n products[index] *= product_before_index;\n\n product_before_index *= value;\n\n }\n\n\n\n let mut product_after_index = 1;\n\n for (index, &value) in values.iter().enumerate().rev() {\n\n products[index] *= product_after_index;\n\n product_after_index *= value;\n", "file_path": "src/product_of_all_other_numbers.rs", "rank": 6, "score": 132802.24698530982 }, { "content": "// Computes the n-th fibonocci number.\n\npub fn fib(n: u32) -> u32 {\n\n if n == 0 {\n\n return 0;\n\n }\n\n\n\n if n == 1 {\n\n return 1;\n\n }\n\n\n\n let mut prev = 0;\n\n let mut current = 1;\n\n\n\n let mut n = (n as i64) - 2;\n\n while n >= 0 {\n\n let new_current = prev + current;\n\n prev = current;\n\n current = new_current;\n\n n -= 1;\n\n }\n\n current\n", "file_path": "src/fib.rs", "rank": 7, "score": 131643.1385126128 }, { "content": "pub fn merge_meetings(mut meetings: Vec<Meeting>) -> HashSet<Meeting> {\n\n meetings.sort_by_key(|m| m.start);\n\n\n\n let mut merged = HashSet::new();\n\n\n\n let mut meetings = meetings.into_iter();\n\n let mut current = if let Some(meeting) = meetings.next() {\n\n meeting\n\n } else {\n\n // Meetings is empty\n\n return merged;\n\n };\n\n\n\n for meeting in meetings {\n\n // If current does not overlap with meeting, then we know it\n\n // does not overlap with any other (since elements are sorted).\n\n if current.end < meeting.start {\n\n let current = mem::replace(&mut current, meeting);\n\n merged.insert(current);\n\n continue;\n", "file_path": "src/merging_meetings.rs", "rank": 8, "score": 130661.70840496919 }, { "content": "pub fn is_binary_search_tree<T: Ord + Clone>(tree: &BinaryTree<T>) -> bool {\n\n calc_search_bounds(tree.root()).is_some()\n\n}\n\n\n", "file_path": "src/binary_tree_search.rs", "rank": 9, "score": 122881.39402421437 }, { "content": "pub fn making_change(amount: u32, denominations: &[u32]) -> u32 {\n\n let recursive = recursive(amount, denominations);\n\n let bottom_up = bottom_up(amount, denominations);\n\n assert_eq!(bottom_up, recursive);\n\n recursive\n\n}\n\n\n", "file_path": "src/making_change.rs", "rank": 10, "score": 122700.30161635311 }, { "content": "pub fn validate_fifo_orders(take_out: Vec<i32>, dine_in: Vec<i32>, served: Vec<i32>) -> bool {\n\n let mut take_out_index = 0;\n\n let mut dine_in_index = 0;\n\n\n\n for id in served {\n\n // If this id is not found in the pointers for take_out / dine_in,\n\n // then the customers were not served in fifo order.\n\n let was_take_out = take_out_index < take_out.len() && id == take_out[take_out_index];\n\n let was_dine_in = dine_in_index < dine_in.len() && id == dine_in[dine_in_index];\n\n\n\n if !was_take_out && !was_dine_in {\n\n return false;\n\n }\n\n\n\n if was_take_out {\n\n take_out_index += 1;\n\n }\n\n if was_dine_in {\n\n dine_in_index += 1;\n\n }\n", "file_path": "src/cafe_orders.rs", "rank": 11, "score": 122057.40600399127 }, { "content": "pub fn merge_sorted_lists<T: std::fmt::Debug + PartialOrd>(\n\n mut a: VecDeque<T>,\n\n mut b: VecDeque<T>,\n\n) -> Vec<T> {\n\n let mut sorted = vec![];\n\n\n\n loop {\n\n match (a.pop_front(), b.pop_front()) {\n\n (Some(item_a), Some(item_b)) => {\n\n if item_a > item_b {\n\n sorted.push(item_b);\n\n a.push_front(item_a);\n\n } else {\n\n sorted.push(item_a);\n\n b.push_front(item_b);\n\n }\n\n }\n\n (Some(a), None) => sorted.push(a),\n\n (None, Some(b)) => sorted.push(b),\n\n (None, None) => break,\n", "file_path": "src/merge_lists.rs", "rank": 12, "score": 117223.334988783 }, { "content": "/// Given a message like \"help me\", it should mutate message to: \"me help\".\n\npub fn reverse_words_order(message: &mut [char]) {\n\n // Find alphabetic boundaries.\n\n let mut first_alpha = 0;\n\n let mut last_alpha = message.len() - 1;\n\n\n\n while first_alpha <= last_alpha {\n\n if message[first_alpha].is_alphabetic() {\n\n break;\n\n }\n\n first_alpha += 1;\n\n }\n\n\n\n while last_alpha >= first_alpha {\n\n if message[last_alpha].is_alphabetic() {\n\n break;\n\n }\n\n last_alpha -= 1;\n\n }\n\n\n\n message[first_alpha..=last_alpha].reverse();\n\n reverse_words(&mut message[first_alpha..=last_alpha]);\n\n}\n\n\n", "file_path": "src/reverse_words.rs", "rank": 13, "score": 107284.2123822254 }, { "content": "fn find_nth_max<T: Debug + Clone>(node: Option<BinaryNodeRef<T>>, n: &mut u32) -> Option<T> {\n\n let node = node?;\n\n\n\n if let Some(found) = find_nth_max(node.right(), n) {\n\n return Some(found);\n\n }\n\n\n\n *n -= 1;\n\n if *n == 0 {\n\n return Some(node.value().clone());\n\n }\n\n\n\n if let Some(found) = find_nth_max(node.left(), n) {\n\n return Some(found);\n\n }\n\n\n\n None\n\n}\n\n\n\n/*\n", "file_path": "src/binary_tree_second_largest.rs", "rank": 14, "score": 107066.90543877038 }, { "content": "pub fn string_permutation(input: &str) -> Vec<String> {\n\n recursive(input.chars().collect())\n\n}\n\n\n", "file_path": "src/string_permutations.rs", "rank": 15, "score": 104011.75883846439 }, { "content": "// Find the duplicate given the following constraints:\n\n// 1. The integers are in the range 1..n\n\n// 2. The list has a length of n+1\n\npub fn find_duplicate(values: &Vec<i32>) -> i32 {\n\n let binary_search_value = find_duplicate_binary_search(values);\n\n let graph_value = find_duplicate_graph(values);\n\n assert_eq!(binary_search_value, graph_value);\n\n binary_search_value\n\n}\n\n\n", "file_path": "src/find_duplicate_space.rs", "rank": 16, "score": 101780.137243242 }, { "content": "fn in_order_traversal<T>(node: Option<BinaryNodeRef<T>>, f: &mut impl FnMut(&T)) {\n\n let node = if let Some(node) = node {\n\n node\n\n } else {\n\n return;\n\n };\n\n\n\n in_order_traversal(node.left(), f);\n\n f(node.value());\n\n in_order_traversal(node.right(), f);\n\n}\n\n\n\n/*\n\nBetter solution:\n\nInstead of traversing the entire tree, we can recursively go down\n\nthe right side and early exit if value is found.\n\n\n\nThis works easily for different values of N-th largest.\n\n\n\nTime complexity: O(H) where H is height of tree.\n\nSpace complexity: O(H) due to recursive traversal requirements.\n\n*/\n", "file_path": "src/binary_tree_second_largest.rs", "rank": 17, "score": 100387.35224527726 }, { "content": "pub fn highest_product_of_three(integers: Vec<i32>) -> i64 {\n\n if integers.len() < 3 {\n\n return 0;\n\n }\n\n\n\n // An array holding lowest numbers, carries 2 elements by the end.\n\n let mut lowest = vec![];\n\n // An array holding highest numbers, carries 3 elements by the end.\n\n let mut highest = vec![];\n\n\n\n for i in integers {\n\n let i = i as i64;\n\n\n\n if lowest.len() < 2 {\n\n lowest.push(i);\n\n } else {\n\n let max_of_lowest = *lowest.iter().max().expect(\"exists\");\n\n if i < max_of_lowest {\n\n let index = lowest\n\n .iter()\n", "file_path": "src/highest_product_of_three.rs", "rank": 18, "score": 99696.20515168822 }, { "content": "fn recursive(cake_types: &[CakeType], bag_size: u32, cache: &mut HashMap<u32, f32>) -> f32 {\n\n if cache.contains_key(&bag_size) {\n\n return cache[&bag_size];\n\n }\n\n\n\n let mut max_value = 0.0;\n\n for i in 0..cake_types.len() {\n\n let current_type = &cake_types[i];\n\n // Handle edge case where cakes can have no weight.\n\n if current_type.weight == 0 {\n\n if current_type.value > 0.0 {\n\n // Set max value to infinity if a cake has value and no weight.\n\n max_value = std::f32::INFINITY;\n\n break;\n\n } else {\n\n // Ignore cakes that have no weight + no value / negative value.\n\n continue;\n\n }\n\n }\n\n\n", "file_path": "src/cake_thief.rs", "rank": 19, "score": 98833.70762146062 }, { "content": "fn max_bag_value(cake_types: &[CakeType], bag_size: u32, cache: &mut HashMap<u32, f32>) -> f32 {\n\n if cache.contains_key(bag_size) {\n\n return cache[bag_size];\n\n }\n\n\n\n let mut max_value = 0;\n\n for i in 0..cake_types.len() {\n\n let current_type = cake_types[i];\n\n // Handle edge case where cakes can have no weight.\n\n if current_type.weight == 0 {\n\n if current_type.value > 0 {\n\n // Set max value to infinity if a cake has value and no weight.\n\n max_value = std::f32::INFINITY;\n\n break;\n\n } else {\n\n // Ignore cakes that have no weight + no value.\n\n continue;\n\n }\n\n }\n\n\n", "file_path": "src/cake_thief.rs", "rank": 20, "score": 96064.36248177581 }, { "content": "pub fn needle_in_haystack(haystack: &str, needle: &str) -> Vec<usize> {\n\n let naive = needle_in_haystack_naive(haystack, needle);\n\n let rk = needle_in_haystack_rk(haystack, needle);\n\n assert_eq!(naive, rk);\n\n naive\n\n}\n\n\n\n/*\n\nNaive Solution:\n\nFor each index in the haystack (up until the remaining slice is smaller\n\nthan the needle), check if slice matches the needle.\n\n\n\nTime complexity:\n\nO(h*n) where h is # of chars in haystack and n is # of chars in needle.\n\nThis is since we have to potentially check n characters for each index in haystack.\n\n\n\nSpace complexity:\n\nO(1) since we're just keeping track of indicies.\n\n*/\n", "file_path": "src/needle_in_haystack.rs", "rank": 21, "score": 95891.63942127541 }, { "content": "pub fn max_bag_value(cake_types: &[CakeType], bag_size: u32) -> f32 {\n\n let mut recursive_cache = HashMap::new();\n\n let recursive = recursive(cake_types, bag_size, &mut recursive_cache);\n\n let bottoms_up = bottoms_up(cake_types, bag_size);\n\n assert_eq!(bottoms_up, recursive);\n\n recursive\n\n}\n\n\n", "file_path": "src/cake_thief.rs", "rank": 22, "score": 92378.88396471957 }, { "content": "fn recursive(amount: u32, denominations: &[u32]) -> u32 {\n\n let greatest_to_least_denominations = {\n\n let mut d = denominations.iter().cloned().collect::<Vec<_>>();\n\n d.sort();\n\n d.reverse();\n\n d\n\n };\n\n\n\n recursive_helper(amount, &greatest_to_least_denominations)\n\n}\n\n\n", "file_path": "src/making_change.rs", "rank": 23, "score": 90921.57331462568 }, { "content": "fn bottom_up(amount: u32, denominations: &[u32]) -> u32 {\n\n let least_to_greatest_denominations = {\n\n let mut d = denominations.iter().cloned().collect::<Vec<_>>();\n\n d.sort();\n\n d\n\n };\n\n\n\n // This is a vec prefilled with `amount` -> 0\n\n let mut ways_to_make_change = {\n\n let mut ways = vec![];\n\n for _ in 0..=amount {\n\n ways.push(0);\n\n }\n\n ways\n\n };\n\n ways_to_make_change[0] = 1;\n\n\n\n for denomination in least_to_greatest_denominations {\n\n // Calculate ways to make change for current denominations based on previous denominations.\n\n for inner_amount in 1..=amount {\n", "file_path": "src/making_change.rs", "rank": 24, "score": 90921.57331462568 }, { "content": "fn recursive_helper(amount: u32, greatest_to_least_denominations: &[u32]) -> u32 {\n\n if amount == 0 {\n\n return 1;\n\n }\n\n\n\n let mut ways_to_make_change = 0;\n\n for index in 0..greatest_to_least_denominations.len() {\n\n let denomination = greatest_to_least_denominations[index];\n\n if denomination > amount {\n\n continue;\n\n }\n\n\n\n let amount = amount - denomination;\n\n ways_to_make_change += recursive_helper(amount, &greatest_to_least_denominations[index..]);\n\n }\n\n\n\n ways_to_make_change\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/making_change.rs", "rank": 25, "score": 86723.09936084815 }, { "content": "/// Returns None if subtree is not search tree.\n\nfn calc_search_bounds<T: Ord + Clone>(node: BinaryNodeRef<T>) -> Option<MinMax<T>> {\n\n let mut min = node.value().clone();\n\n let mut max = node.value().clone();\n\n\n\n if let Some(left) = node.left() {\n\n let bounds = calc_search_bounds(left)?;\n\n // Not search tree if left sub-tree max value is greater than node value.\n\n if &bounds.max > node.value() {\n\n return None;\n\n }\n\n\n\n min = cmp::min(min, bounds.min);\n\n max = cmp::max(max, bounds.max);\n\n }\n\n\n\n if let Some(right) = node.right() {\n\n let bounds = calc_search_bounds(right)?;\n\n // Not search tree if right sub-tree min value is less than node value.\n\n if &bounds.min < node.value() {\n\n return None;\n", "file_path": "src/binary_tree_search.rs", "rank": 26, "score": 85627.89703026687 }, { "content": "/// Returns true if element is in the tree. Uses iterative breadth-first search.\n\npub fn breadth_first_find_iterative<T: PartialEq>(tree: &Tree<T>, elem: &T) -> bool {\n\n let mut nodes = VecDeque::new();\n\n nodes.push_front(tree.root());\n\n\n\n while let Some(node) = nodes.pop_front() {\n\n if node.value() == elem {\n\n return true;\n\n }\n\n\n\n for child in node.children() {\n\n nodes.push_back(child);\n\n }\n\n }\n\n\n\n false\n\n}\n\n\n", "file_path": "src/tree_traversals.rs", "rank": 27, "score": 85306.08027280198 }, { "content": "/// Returns true if element is in the tree. Uses recursive depth-first search.\n\npub fn depth_first_find_recursive<T: PartialEq>(tree: &Tree<T>, elem: &T) -> bool {\n\n depth_first_find_recursive_helper(tree.root(), elem)\n\n}\n\n\n", "file_path": "src/tree_traversals.rs", "rank": 28, "score": 85306.08027280198 }, { "content": "pub fn depth_first_find_iterative<T: PartialEq>(tree: &Tree<T>, elem: &T) -> bool {\n\n let mut nodes = VecDeque::new();\n\n nodes.push_front(tree.root());\n\n\n\n while let Some(node) = nodes.pop_front() {\n\n if node.value() == elem {\n\n return true;\n\n }\n\n\n\n for child in node.children() {\n\n nodes.push_front(child);\n\n }\n\n }\n\n\n\n false\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n", "file_path": "src/tree_traversals.rs", "rank": 29, "score": 85297.68720527593 }, { "content": "// Recursive solution, not optimized for time or space (as asked by interviewcake).\n\nfn recursive(input: Vec<char>) -> Vec<String> {\n\n if input.len() == 1 {\n\n return vec![input[0].to_string()];\n\n }\n\n\n\n let mut permutations = vec![];\n\n for (index, &c) in input.iter().enumerate() {\n\n let mut inner_input = input.clone();\n\n inner_input.remove(index);\n\n for mut permutation in recursive(inner_input) {\n\n permutation.insert(0, c);\n\n permutations.push(permutation);\n\n }\n\n }\n\n\n\n permutations\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "src/string_permutations.rs", "rank": 30, "score": 83085.98417767684 }, { "content": "fn calculate_hash(slice: &str, modulo: u32) -> u32 {\n\n let mut hash = 0u32;\n\n for c in slice.chars() {\n\n let ascii_code = c as u8;\n\n hash = (hash * 256 + ascii_code as u32) % modulo;\n\n }\n\n hash\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn works_with_example() {\n\n assert_eq!(\n\n needle_in_haystack(\"aaabcdddbbddddabcdefghi\", \"abc\"),\n\n vec![2, 14]\n\n );\n\n }\n", "file_path": "src/needle_in_haystack.rs", "rank": 31, "score": 81718.39323547867 }, { "content": "/// Reverses each word in a message. For example, given a message like:\n\n/// \"pleh em\", it should mutate to: \"help me\".\n\nfn reverse_words(message: &mut [char]) {\n\n let mut current_head = 0;\n\n let message_len = message.len();\n\n for index in 0..message_len {\n\n if message[index].is_alphabetic() {\n\n continue;\n\n }\n\n\n\n // Found word, reverse.\n\n if index > 0 && current_head < index - 1 {\n\n reverse(&mut message[current_head..index])\n\n }\n\n\n\n // Assume next character is alphabetic, current_head will\n\n // be replaced if not so anyways.\n\n current_head = index + 1;\n\n }\n\n\n\n // Handle ending word.\n\n if current_head < message_len - 1 {\n\n reverse(&mut message[current_head..message_len])\n\n }\n\n}\n\n\n", "file_path": "src/reverse_words.rs", "rank": 32, "score": 74098.23197607361 }, { "content": "fn reverse<T>(slice: &mut [T]) {\n\n let mut front = 0;\n\n let mut back = slice.len() - 1;\n\n\n\n while front < back {\n\n slice.swap(front, back);\n\n front += 1;\n\n back -= 1;\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n // Helper for taking in strings during tests.\n\n fn reverse_words_order_in_string(s: &mut String) {\n\n let mut chars = s.chars().collect::<Vec<_>>();\n\n reverse_words_order(&mut chars);\n\n *s = chars.into_iter().collect();\n", "file_path": "src/reverse_words.rs", "rank": 33, "score": 72548.58666491618 }, { "content": "pub fn color_undirected_graph_with_max_degree(\n\n adjacency_table: &HashMap<NodeId, Vec<NodeId>>,\n\n max_degree: usize,\n\n) -> HashMap<NodeId, ColorId> {\n\n if adjacency_table.is_empty() {\n\n return HashMap::new();\n\n }\n\n\n\n let mut coloring = HashMap::new();\n\n\n\n // Get a random element in the adjacency_table.\n\n let (&node_id, _) = adjacency_table.iter().next().expect(\"exists\");\n\n coloring.insert(node_id, ColorId(0));\n\n\n\n // Breadth-first traversal through the graph.\n\n let mut node_ids = VecDeque::new();\n\n node_ids.push_back(node_id);\n\n while let Some(node_id) = node_ids.pop_front() {\n\n for &adj_node_id in &adjacency_table[&node_id] {\n\n if coloring.contains_key(&adj_node_id) {\n", "file_path": "src/graph_coloring.rs", "rank": 34, "score": 71878.24229710051 }, { "content": "pub fn has_permutation_palindrome(s: &str) -> bool {\n\n if s.is_empty() {\n\n return false;\n\n }\n\n\n\n let mut char_counts = HashMap::new();\n\n for c in s.chars() {\n\n *char_counts.entry(c).or_insert(0) += 1;\n\n }\n\n\n\n let mut has_char_with_odd_count = false;\n\n for (_c, count) in char_counts {\n\n if count % 2 == 0 {\n\n continue;\n\n }\n\n\n\n // Only one char with odd count is okay since the extra\n\n // character could be in the middle for a palindrome.\n\n if has_char_with_odd_count {\n\n return false;\n", "file_path": "src/permutation_palindrome.rs", "rank": 35, "score": 68934.16418381296 }, { "content": "fn find_duplicate_graph(values: &Vec<i32>) -> i32 {\n\n let head_index = values.len() - 1;\n\n\n\n // Follows the value at the current index to arrive to a new index.\n\n // Casting is okay since all values must be in range 1..n.\n\n let advance_index = |index| (values[index] - 1) as usize;\n\n\n\n // Walk through the graph N times to ensure we're in the cycle.\n\n let index_in_the_cycle = {\n\n let mut curr = head_index;\n\n for _ in 0..values.len() {\n\n curr = advance_index(curr);\n\n }\n\n curr\n\n };\n\n\n\n // Find size of cycle by walking until reaching the same index again.\n\n let size_of_cycle = {\n\n let mut next = advance_index(index_in_the_cycle);\n\n let mut count = 1;\n", "file_path": "src/find_duplicate_space.rs", "rank": 36, "score": 66661.8315919124 }, { "content": "fn find_duplicate_binary_search(values: &Vec<i32>) -> i32 {\n\n let n = values.len() - 1;\n\n\n\n let mut lower = 1;\n\n let mut upper = n as i32;\n\n\n\n loop {\n\n let mid = (lower + upper) / 2;\n\n let mut in_lower = 0;\n\n let mut in_upper = 0;\n\n let mut in_mid = 0;\n\n for &value in values {\n\n if lower <= value && value < mid {\n\n in_lower += 1;\n\n } else if mid < value && value <= upper {\n\n in_upper += 1;\n\n } else if mid == value {\n\n in_mid += 1;\n\n }\n\n }\n", "file_path": "src/find_duplicate_space.rs", "rank": 37, "score": 65278.45521487893 }, { "content": "pub fn find_rotation_point(list: &[&str]) -> usize {\n\n let mut lower_index = 0;\n\n let mut upper_index = list.len() - 1;\n\n\n\n while lower_index < upper_index - 1 {\n\n let upper = list[upper_index];\n\n\n\n let mid_index = (lower_index + upper_index) / 2;\n\n let mid = list[mid_index];\n\n\n\n if mid > upper {\n\n lower_index = mid_index;\n\n } else {\n\n upper_index = mid_index;\n\n }\n\n }\n\n\n\n upper_index\n\n}\n\n\n", "file_path": "src/find_rotation_point.rs", "rank": 38, "score": 64168.958301034494 }, { "content": "fn bottoms_up(cake_types: &[CakeType], bag_size: u32) -> f32 {\n\n let mut max_bag_values = vec![];\n\n for curr_bag_size in 0..=bag_size {\n\n let mut max_bag_value = 0.0;\n\n for cake_type in cake_types {\n\n // Handle cakes with no weight edge case (if value, return INFINITY, otherwise ignore cake).\n\n if cake_type.weight == 0 {\n\n if cake_type.value > 0.0 {\n\n return std::f32::INFINITY;\n\n } else {\n\n continue;\n\n }\n\n }\n\n\n\n // Can't hold cake in the current bag size.\n\n if cake_type.weight > curr_bag_size {\n\n continue;\n\n }\n\n\n\n let other_bag_value =\n", "file_path": "src/cake_thief.rs", "rank": 39, "score": 64087.86421106697 }, { "content": "pub fn intersection(a: &Rect, b: &Rect) -> Option<Rect> {\n\n let max_y = cmp::min(a.max_y(), b.max_y());\n\n let min_y = cmp::max(a.min_y, b.min_y);\n\n let max_x = cmp::min(a.max_x(), b.max_x());\n\n let min_x = cmp::max(a.min_x, b.min_x);\n\n\n\n // No intersection if no width / height.\n\n if max_y <= min_y || max_x <= min_x {\n\n return None;\n\n }\n\n\n\n Some(Rect {\n\n min_x,\n\n min_y,\n\n width: (max_x - min_x) as u32,\n\n height: (max_y - min_y) as u32,\n\n })\n\n}\n\n\n\n#[cfg(test)]\n", "file_path": "src/rectangular_love.rs", "rank": 40, "score": 63861.934989081594 }, { "content": "// Given a string (\"hello ellie\") and a search string (\"el\"), return\n\n// the HTML highlighted string: \"h<b>el</b>lo <b>el</b>lie\".\n\npub fn highlight(input: &str, search: &str) -> String {\n\n let mut highlighted = String::new();\n\n\n\n let mut in_progress_match = None;\n\n for i in 0..input.len() {\n\n let i_end = i + search.len();\n\n\n\n let can_compare_with_search = i_end <= input.len();\n\n if can_compare_with_search && &input[i..i_end] == search {\n\n in_progress_match = Some(if let Some((start, _end)) = in_progress_match {\n\n (start, i_end)\n\n } else {\n\n (i, i_end)\n\n });\n\n } else {\n\n match in_progress_match {\n\n None => highlighted.push_str(&input[i..=i]),\n\n Some((start, end)) => {\n\n if i == end {\n\n highlighted.push_str(\"<b>\");\n", "file_path": "src/highlight_search.rs", "rank": 41, "score": 63295.06924703343 }, { "content": "fn needle_in_haystack_rk(haystack: &str, needle: &str) -> Vec<usize> {\n\n if needle.is_empty() {\n\n return vec![];\n\n }\n\n\n\n let mut found_indices = vec![];\n\n let haystack_chars = haystack.chars().collect::<Vec<_>>();\n\n\n\n // Use a large prime smaller than than u32::MAX / BASE (256).\n\n const PRIME: u32 = 104_173;\n\n\n\n // max_base is equal to the base (256) to the needle.len() - 1 power.\n\n // This is used to 'pop' the char being removed from the hash.\n\n let max_base = {\n\n let mut e = 256;\n\n for _ in 2..needle.len() {\n\n e = (e * e) % PRIME;\n\n }\n\n e\n\n };\n", "file_path": "src/needle_in_haystack.rs", "rank": 42, "score": 62723.07369327019 }, { "content": "fn needle_in_haystack_naive(haystack: &str, needle: &str) -> Vec<usize> {\n\n if needle.is_empty() {\n\n return vec![];\n\n }\n\n\n\n let mut found_indices = vec![];\n\n\n\n for i in 0..haystack.len() {\n\n // If end of slice is out-of-bounds, exit.\n\n let slice_end = i + needle.len();\n\n if slice_end > haystack.len() {\n\n break;\n\n }\n\n\n\n if &haystack[i..slice_end] == needle {\n\n found_indices.push(i);\n\n }\n\n }\n\n\n\n found_indices\n", "file_path": "src/needle_in_haystack.rs", "rank": 43, "score": 62723.07369327019 }, { "content": "pub fn is_superbalanced<T>(tree: &BinaryTree<T>) -> bool {\n\n let mut min_leaf_depth = None;\n\n let mut max_leaf_depth = None;\n\n\n\n // Min and max must be populated by this function.\n\n populate_min_max_leaf_height(tree.root(), 0, &mut min_leaf_depth, &mut max_leaf_depth);\n\n\n\n let height_diff = max_leaf_depth.expect(\"exists\") - min_leaf_depth.expect(\"exists\");\n\n height_diff <= 1\n\n}\n\n\n", "file_path": "src/binary_tree_superbalanced.rs", "rank": 44, "score": 60448.2991729843 }, { "content": "fn naive<T: Debug + Clone>(tree: &BinaryTree<T>) -> Option<T> {\n\n let mut highest_buffer = VecDeque::with_capacity(2);\n\n in_order_traversal(Some(tree.root()), &mut |val| {\n\n highest_buffer.push_back(val.clone());\n\n if highest_buffer.len() > 2 {\n\n highest_buffer.pop_front();\n\n }\n\n });\n\n\n\n if highest_buffer.len() < 2 {\n\n None\n\n } else {\n\n Some(highest_buffer[0].clone())\n\n }\n\n}\n\n\n", "file_path": "src/binary_tree_second_largest.rs", "rank": 53, "score": 57863.23368081935 }, { "content": "fn best<T: Debug + Clone>(tree: &BinaryTree<T>) -> Option<T> {\n\n let (fully_right, fully_right_parent) = {\n\n let mut parent = None;\n\n let mut current = tree.root();\n\n while let Some(right) = current.right() {\n\n parent = Some(current);\n\n current = right;\n\n }\n\n (current, parent)\n\n };\n\n\n\n match (fully_right.left(), fully_right_parent) {\n\n (Some(mut left), _) => {\n\n let fully_right_of_left_subtree = {\n\n while let Some(right) = left.right() {\n\n left = right;\n\n }\n\n left\n\n };\n\n Some(fully_right_of_left_subtree)\n", "file_path": "src/binary_tree_second_largest.rs", "rank": 54, "score": 57863.23368081935 }, { "content": "fn better<T: Debug + Clone>(tree: &BinaryTree<T>) -> Option<T> {\n\n let mut n = 2;\n\n find_nth_max(Some(tree.root()), &mut n)\n\n}\n\n\n", "file_path": "src/binary_tree_second_largest.rs", "rank": 55, "score": 57863.23368081935 }, { "content": "fn depth_first_find_recursive_helper<T: PartialEq>(node: NodeRef<T>, elem: &T) -> bool {\n\n if node.value() == elem {\n\n return true;\n\n }\n\n\n\n for child in node.children() {\n\n let found_in_child_tree = depth_first_find_recursive_helper(child, elem);\n\n if found_in_child_tree {\n\n return true;\n\n }\n\n }\n\n\n\n false\n\n}\n\n\n", "file_path": "src/tree_traversals.rs", "rank": 56, "score": 55151.831202833186 }, { "content": "struct MinMax<T> {\n\n min: T,\n\n max: T,\n\n}\n\n\n", "file_path": "src/binary_tree_search.rs", "rank": 57, "score": 49300.38171371381 }, { "content": "fn populate_nodes_reachable(\n\n id: u32,\n\n directed_edges: &HashMap<u32, HashSet<u32>>,\n\n nodes_reachable_cache: &mut HashMap<u32, u32>,\n\n) {\n\n // If already in cache, don't compute.\n\n if nodes_reachable_cache.contains_key(&id) {\n\n return;\n\n }\n\n\n\n let mut nodes_reachable = 0;\n\n for &other_id in &directed_edges[&id] {\n\n populate_nodes_reachable(other_id, directed_edges, nodes_reachable_cache);\n\n nodes_reachable += 1 + nodes_reachable_cache[&other_id];\n\n }\n\n nodes_reachable_cache.insert(id, nodes_reachable);\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n", "file_path": "src/subsequence_reconstruction.rs", "rank": 58, "score": 33061.02018461237 }, { "content": " }\n\n\n\n products\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn works() {\n\n assert_eq!(\n\n products_of_all_except_at_index(vec![1, 7, 3, 4]),\n\n vec![84, 12, 28, 21]\n\n );\n\n }\n\n\n\n #[test]\n\n fn has_zeros() {\n\n assert_eq!(\n", "file_path": "src/product_of_all_other_numbers.rs", "rank": 59, "score": 29398.787168655803 }, { "content": " products_of_all_except_at_index(vec![1, 0, 3, 4]),\n\n vec![0, 12, 0, 0]\n\n );\n\n }\n\n\n\n #[test]\n\n fn negatives() {\n\n assert_eq!(\n\n products_of_all_except_at_index(vec![1, -1, 3, 4]),\n\n vec![-12, 12, -4, -3]\n\n );\n\n }\n\n\n\n #[test]\n\n fn less_than_2_elems() {\n\n assert_eq!(products_of_all_except_at_index(vec![3]), vec![]);\n\n assert_eq!(products_of_all_except_at_index(vec![]), vec![]);\n\n }\n\n}\n", "file_path": "src/product_of_all_other_numbers.rs", "rank": 60, "score": 29397.571830300152 }, { "content": "fn populate_min_max_leaf_height<T>(\n\n node: BinaryNodeRef<T>,\n\n depth: u32,\n\n min_leaf_depth: &mut Option<u32>,\n\n max_leaf_depth: &mut Option<u32>,\n\n) {\n\n let left = node.left();\n\n let right = node.right();\n\n if left.is_none() && right.is_none() {\n\n // Found leaf node, populate min / max if necessary.\n\n if min_leaf_depth.is_none() || depth < min_leaf_depth.expect(\"exists\") {\n\n *min_leaf_depth = Some(depth);\n\n }\n\n if max_leaf_depth.is_none() || depth > max_leaf_depth.expect(\"exists\") {\n\n *max_leaf_depth = Some(depth);\n\n }\n\n return;\n\n }\n\n\n\n if let Some(left) = left {\n", "file_path": "src/binary_tree_superbalanced.rs", "rank": 61, "score": 28957.59898060479 }, { "content": "\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn works() {\n\n assert_eq!(has_two_movies_for_flight(10, vec![8, 1, 1, 2, 7]), true);\n\n }\n\n\n\n #[test]\n\n fn movie_longer_than_flight_works() {\n\n assert_eq!(has_two_movies_for_flight(10, vec![8, 1, 2000, 7, 2]), true);\n\n }\n\n\n\n #[test]\n\n fn works_with_movie_length_zero() {\n\n assert_eq!(has_two_movies_for_flight(10, vec![0, 1, 7, 10]), true);\n\n }\n\n\n", "file_path": "src/inflight_entertainment.rs", "rank": 62, "score": 16.19657779624813 }, { "content": "use std::cmp;\n\nuse std::collections::HashSet;\n\nuse std::mem;\n\n\n\n#[derive(Debug, PartialEq, Eq, Hash)]\n\npub struct Meeting {\n\n start: i32,\n\n end: i32,\n\n}\n\n\n\nimpl Meeting {\n\n pub fn new(start: i32, end: i32) -> Self {\n\n assert!(start <= end);\n\n Self { start, end }\n\n }\n\n}\n\n\n", "file_path": "src/merging_meetings.rs", "rank": 63, "score": 15.74223641879873 }, { "content": "/*\n\nProblem:\n\nGiven two Rectangles, return the intersection between the rectangles, if it exists.\n\n*/\n\nuse std::cmp;\n\n\n\n#[derive(Clone, Debug, PartialEq)]\n\npub struct Rect {\n\n min_x: i32,\n\n min_y: i32,\n\n\n\n width: u32,\n\n height: u32,\n\n}\n\n\n\nimpl Rect {\n\n fn max_x(&self) -> i32 {\n\n self.min_x + (self.width as i32)\n\n }\n\n\n\n fn max_y(&self) -> i32 {\n\n self.min_y + (self.height as i32)\n\n }\n\n}\n\n\n", "file_path": "src/rectangular_love.rs", "rank": 64, "score": 14.456655145214805 }, { "content": " #[test]\n\n fn no_pair_with_half_length() {\n\n assert_eq!(has_two_movies_for_flight(10, vec![8, 5, 1, 1, 4, 7]), false);\n\n }\n\n\n\n #[test]\n\n fn pair_with_half_length() {\n\n assert_eq!(\n\n has_two_movies_for_flight(10, vec![8, 5, 1, 1, 4, 5, 7]),\n\n true\n\n );\n\n }\n\n}\n", "file_path": "src/inflight_entertainment.rs", "rank": 65, "score": 13.759575482652254 }, { "content": "use std::collections::{HashMap, HashSet, VecDeque};\n\n/*\n\nProblem:\n\nGiven an undirected graph with maximum degree D,\n\nreturn a coloring of the graph with D + 1 colors.\n\n*/\n\n\n\n/*\n\nNaive solution:\n\n1. Choose a random color for a starting node.\n\n2. Use breadth-first traversal, for each node not colored:\n\n 2a. Find all unavailable colors from adj nodes.\n\n 2b. Choose first available color.\n\n 2c. Add newly colored nodes to be visited.\n\n\n\nTime complexity: O(N * D)\n\n - Visit each node and do O(D) operations.\n\n - But actually we're only traversing each edge for a node at most twice\n\n for the entire algorithm, so we're actually doing O(N + M).\n\nSpace complexity: O(N)\n\n - At most need to store O(D) per iteration which is bounded by O(N)\n\n - Breadth-first traversal queue is at most O(N) size.\n\n*/\n\n\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub struct NodeId(u32);\n\n#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]\n\npub struct ColorId(usize);\n\n\n", "file_path": "src/graph_coloring.rs", "rank": 66, "score": 13.313396347968578 }, { "content": "use std::collections::{HashMap, HashSet};\n\n\n\n// Given various subsequences of an array of unique integers, reconstruct the original array:\n\n//\n\n// Example: [1, 3, 5], [1, 3, 9], [9, 5]\n\n// Output : [1, 3, 9, 5]\n\n//\n\n// There may be multiple valid reconstructions of the original array. Return a valid reconstruction.\n\n//\n\n// Example: [1, 3], [5, 3]\n\n// Output : [1, 5, 3] or [5, 1, 3]\n", "file_path": "src/subsequence_reconstruction.rs", "rank": 67, "score": 11.69700463399788 }, { "content": " }\n\n\n\n true\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn non_fifo() {\n\n let is_fifo = validate_fifo_orders(vec![1, 3, 5], vec![2, 4, 6], vec![1, 2, 4, 6, 5, 3]);\n\n assert_eq!(is_fifo, false);\n\n }\n\n\n\n #[test]\n\n fn fifo() {\n\n let is_fifo = validate_fifo_orders(vec![1, 3, 5], vec![2, 4, 6], vec![1, 2, 3, 5, 4, 6]);\n\n assert_eq!(is_fifo, true);\n\n }\n", "file_path": "src/cafe_orders.rs", "rank": 68, "score": 10.130128452957274 }, { "content": " populate_nodes_reachable(id, &directed_edges, &mut nodes_reachable);\n\n }\n\n\n\n // Reconstruction is created by ordering the ids by the number of nodes reachable.\n\n let mut reconstruction = nodes_reachable.keys().cloned().collect::<Vec<_>>();\n\n reconstruction.sort_by_key(|id| nodes_reachable[id]);\n\n // We want it to be sorted in descending order.\n\n reconstruction.reverse();\n\n reconstruction\n\n}\n\n\n", "file_path": "src/subsequence_reconstruction.rs", "rank": 69, "score": 10.074549789795348 }, { "content": "use ego_tree::{NodeRef, Tree};\n\nuse std::collections::VecDeque;\n\n\n\n/// Returns true if element is in the tree. Uses iterative breadth-first search.\n", "file_path": "src/tree_traversals.rs", "rank": 70, "score": 9.931620837299153 }, { "content": " use ego_tree::tree;\n\n\n\n macro_rules! find_tests {\n\n ($test_name:ident, $find_fn:expr) => {\n\n #[test]\n\n fn $test_name() {\n\n let tree = tree!('a' => { 'b', 'c' => { 'd', 'e' } });\n\n assert_eq!($find_fn(&tree, &'a'), true);\n\n assert_eq!($find_fn(&tree, &'b'), true);\n\n assert_eq!($find_fn(&tree, &'c'), true);\n\n assert_eq!($find_fn(&tree, &'d'), true);\n\n assert_eq!($find_fn(&tree, &'e'), true);\n\n assert_eq!($find_fn(&tree, &'f'), false);\n\n }\n\n };\n\n }\n\n\n\n find_tests!(bfs_iter_works, breadth_first_find_iterative);\n\n find_tests!(dfs_iter_works, depth_first_find_iterative);\n\n find_tests!(dfs_recur_works, depth_first_find_recursive);\n\n}\n", "file_path": "src/tree_traversals.rs", "rank": 71, "score": 9.69500211031351 }, { "content": " }\n\n }\n\n\n\n sorted\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn merge_sorted_lists_works() {\n\n let my_list = vec![3, 4, 6, 10, 11, 15].into_iter().collect();\n\n let alices_list = vec![1, 5, 8, 12, 14, 19].into_iter().collect();\n\n\n\n assert_eq!(\n\n merge_sorted_lists(my_list, alices_list),\n\n vec![1, 3, 4, 5, 6, 8, 10, 11, 12, 14, 15, 19]\n\n );\n\n }\n\n}\n", "file_path": "src/merge_lists.rs", "rank": 72, "score": 9.69134452349852 }, { "content": "/*\n\nProblem:\n\nGiven an amount and coin denominations, determine the number of ways you\n\ncan give change to meet that amount.\n\n\n\nExample:\n\nAmount = 4, Denominations: [1, 2, 3]\n\nWays of making change = 4:\n\n 1. [1, 1, 1, 1]\n\n 2. [2, 1, 1]\n\n 3. [2, 2]\n\n 4. [3, 1]\n\n*/\n", "file_path": "src/making_change.rs", "rank": 73, "score": 9.306899367415062 }, { "content": "the length of the array, we can start from the last element in the array (as there\n\nare no values to that position).\n\n\n\nThe duplicate value is also the position at the beginning of the cycle.\n\n\n\nThe solution is as follows:\n\n1. We can start from the head of the graph (the last element) and advance N\n\n steps to ensure that we're in the cycle that must exist.\n\n2. We can then count the size of the cycle by advancing until we reach the same position.\n\n3. We can then find the beginning of the cycle by keeping two pointers, one at the head and\n\n one which is CYCLE_SIZE positions ahead of the head. By advancing these pointers together,\n\n we can find the beginning of the cycle when they are at the same position.\n\n4. The position of the beginning of the cycle is a duplicate value. (Return this).\n\n\n\nTime: O(N) - Ending up in the cycle takes N ops, finding the cycle is bounded by N, and\n\n finding the beginning of the cycle is bounded by N.\n\nSpace: O(1) - We end up keeping a constant number of pointers / counters.\n\n*/\n", "file_path": "src/find_duplicate_space.rs", "rank": 74, "score": 9.265832549624983 }, { "content": "\n\n #[test]\n\n fn empty() {\n\n let is_fifo = validate_fifo_orders(vec![], vec![2, 4, 6], vec![2, 4, 6]);\n\n assert_eq!(is_fifo, true);\n\n }\n\n\n\n #[test]\n\n fn repeats() {\n\n let is_fifo = validate_fifo_orders(vec![10], vec![2, 4, 6, 4], vec![2, 10, 4, 6, 4]);\n\n assert_eq!(is_fifo, true);\n\n }\n\n}\n", "file_path": "src/cafe_orders.rs", "rank": 75, "score": 9.048239146925907 }, { "content": "\n\n #[test]\n\n fn ends_in_cycle_of_size_one() {\n\n assert_eq!(find_duplicate(&vec![1, 2, 3, 2]), 2);\n\n }\n\n\n\n #[test]\n\n fn more_examples() {\n\n assert_eq!(find_duplicate(&vec![3, 4, 2, 3, 1, 5]), 3);\n\n assert_eq!(find_duplicate(&vec![3, 1, 2, 2]), 2);\n\n assert_eq!(find_duplicate(&vec![4, 3, 1, 1, 4]), 4);\n\n }\n\n}\n", "file_path": "src/find_duplicate_space.rs", "rank": 76, "score": 8.56633828693576 }, { "content": "mod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn works_for_example() {\n\n assert_eq!(making_change(4, &[1, 2, 3]), 4);\n\n }\n\n\n\n #[test]\n\n fn works_for_another_case() {\n\n assert_eq!(making_change(6, &[1, 2]), 4);\n\n }\n\n\n\n #[test]\n\n fn when_amount_is_zero() {\n\n assert_eq!(making_change(0, &[1, 2]), 1);\n\n }\n\n\n\n #[test]\n\n fn when_amount_can_not_be_made() {\n\n assert_eq!(making_change(4, &[3]), 0);\n\n }\n\n}\n", "file_path": "src/making_change.rs", "rank": 77, "score": 8.362948537354612 }, { "content": " assert_eq!(highest_product_of_three(vec![-4, 3, 1, 4]), 12);\n\n }\n\n\n\n #[test]\n\n fn two_negative_highest() {\n\n assert_eq!(highest_product_of_three(vec![1, 2, -2, -3, 4]), 24);\n\n }\n\n\n\n #[test]\n\n fn all_negative() {\n\n assert_eq!(highest_product_of_three(vec![-1, -2, -3, -4]), -6);\n\n }\n\n}\n", "file_path": "src/highest_product_of_three.rs", "rank": 78, "score": 8.284197329823527 }, { "content": " use super::*;\n\n\n\n #[test]\n\n fn works_on_example() {\n\n let reconstruction =\n\n subsequence_reconstruction(&vec![vec![1, 3, 5], vec![1, 3, 9], vec![9, 5]]);\n\n assert_eq!(reconstruction, vec![1, 3, 9, 5]);\n\n }\n\n\n\n #[test]\n\n fn works_on_missing_information() {\n\n let reconstruction = subsequence_reconstruction(&vec![vec![1, 3], vec![5, 3]]);\n\n assert_eq!(reconstruction.len(), 3);\n\n let index1 = reconstruction.iter().position(|&x| x == 1).expect(\"in\");\n\n let index3 = reconstruction.iter().position(|&x| x == 3).expect(\"in\");\n\n let index5 = reconstruction.iter().position(|&x| x == 5).expect(\"in\");\n\n assert!(index1 < index3);\n\n assert!(index5 < index3);\n\n }\n\n\n", "file_path": "src/subsequence_reconstruction.rs", "rank": 79, "score": 8.223325631630157 }, { "content": "use std::collections::{HashMap, VecDeque};\n\nuse std::hash::Hash;\n\n/*\n\nProblem:\n\nGiven an graph, return any valid shortest path from the start node to\n\nthe destination node.\n\n*/\n\n\n\n/*\n\nIterative Solution:\n\n1. Traverse the graph in breadth-first order, at each node:\n\n 1a. Keep a hashmap mantaining backwards pointers from node -> previous node.\n\n 1a. For each adjacent node:\n\n 1aa. If adjacent node has a previous node already, then continue.\n\n 1ab. If adjacent node is the destination node, traverse the backward pointers\n\n to reconstruct the path to the destination node.\n\n 1ac. Add backward pointer for adjacent node -> current node.\n\n 1ad. Add adjacent node to queue to explore.\n\n2. If reach end of traversal, no path to destination.\n\n\n\nTime: O(N + M), need to visit all nodes in graph, and also consider each edge\n\n in the graph.\n\nSpace: O(N), traversal and backwards pointers is bounded by number of nodes.\n\n*/\n", "file_path": "src/graph_shortest_path.rs", "rank": 80, "score": 8.010689095615593 }, { "content": " if current_type.weight > bag_size {\n\n continue;\n\n }\n\n\n\n let new_size = bag_size - current_type.weight;\n\n let inner_max_value = max_bag_value(cake_types, new_size, cache) + current_type.value;\n\n max_value = cmp::max(max_value, inner_max_value);\n\n }\n\n cache.insert(bag_size, max_value);\n\n max_value\n\n}\n\n\n\nTime complexity: O(N * M) where N is the bag size given and M is the number of cake types.\n\n - We calculate the max_bag_value for at max N values (0..bag_size) and each calculation\n\n requires looking through each cake type.\n\nSpace complexity: O(N)\n\n - Need O(N) for the cache.\n\n - Need O(N) for the stack size (worst case we remove 1 from bag size).\n\n*/\n\nuse std::collections::HashMap;\n\n\n\n#[derive(Debug)]\n\npub struct CakeType {\n\n weight: u32,\n\n value: f32,\n\n}\n\n\n", "file_path": "src/cake_thief.rs", "rank": 81, "score": 7.892679871914705 }, { "content": " }\n\n\n\n has_char_with_odd_count = true;\n\n }\n\n\n\n true\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn works() {\n\n assert_eq!(has_permutation_palindrome(\"civic\"), true);\n\n }\n\n\n\n #[test]\n\n fn works_with_permutation() {\n\n assert_eq!(has_permutation_palindrome(\"ivicc\"), true);\n", "file_path": "src/permutation_palindrome.rs", "rank": 82, "score": 7.865404049903042 }, { "content": "\n\n let needle_hash = calculate_hash(needle, PRIME);\n\n let mut slice_hash = None;\n\n for i in 0..haystack.len() {\n\n // If end of slice is out-of-bounds, exit.\n\n let slice_end = i + needle.len();\n\n if slice_end > haystack.len() {\n\n break;\n\n }\n\n\n\n let current_hash = if let Some(mut hash) = slice_hash {\n\n // Calculate new hash with new character.\n\n let to_remove = (haystack_chars[i - 1] as u32 * max_base) % PRIME;\n\n if to_remove > hash {\n\n hash = PRIME - (to_remove - hash);\n\n } else {\n\n hash = (hash - to_remove) % PRIME;\n\n }\n\n hash = (hash * 256) % PRIME;\n\n hash = (hash + haystack_chars[slice_end - 1] as u32) % PRIME;\n", "file_path": "src/needle_in_haystack.rs", "rank": 83, "score": 7.812314941586566 }, { "content": "}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn works() {\n\n assert_eq!(get_max_profit(vec![10, 7, 5, 8, 11, 9]), 6);\n\n }\n\n\n\n #[test]\n\n fn negative_profit() {\n\n assert_eq!(get_max_profit(vec![13, 11, 8, 6, 1, 0]), -1);\n\n }\n\n\n\n #[test]\n\n fn empty() {\n\n assert_eq!(get_max_profit(vec![]), 0);\n\n }\n\n\n\n #[test]\n\n fn only_one() {\n\n assert_eq!(get_max_profit(vec![5]), 0);\n\n }\n\n}\n", "file_path": "src/apple_stocks.rs", "rank": 84, "score": 7.54111554681552 }, { "content": "#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn works() {\n\n let words = vec![\"p\", \"r\", \"s\", \"u\", \"x\", \"a\", \"b\", \"c\", \"e\", \"k\", \"o\"];\n\n assert_eq!(find_rotation_point(&words), 5);\n\n\n\n let words = vec![\n\n \"p\", \"r\", \"s\", \"u\", \"x\", \"x\", \"x\", \"x\", \"y\", \"a\", \"b\", \"c\", \"e\", \"k\", \"o\",\n\n ];\n\n assert_eq!(find_rotation_point(&words), 9);\n\n\n\n let words = vec![\"p\", \"s\", \"b\", \"o\"];\n\n assert_eq!(find_rotation_point(&words), 2);\n\n }\n\n}\n", "file_path": "src/find_rotation_point.rs", "rank": 85, "score": 7.469907366882088 }, { "content": " adjacency_table.insert(2, vec![1, 4, 3]);\n\n adjacency_table.insert(3, vec![1, 4, 2]);\n\n adjacency_table.insert(4, vec![2, 3, 1]);\n\n\n\n let shortest_path = graph_shortest_path(&adjacency_table, 2, 3);\n\n assert!(shortest_path.is_some());\n\n assert_that!(&shortest_path.expect(\"exists\"), eq(vec![2, 3]));\n\n }\n\n\n\n #[test]\n\n fn works_for_single_node_going_to_self() {\n\n // Graph looks like:\n\n // (1)\n\n let mut adjacency_table = HashMap::new();\n\n adjacency_table.insert(1, vec![]);\n\n\n\n let shortest_path = graph_shortest_path(&adjacency_table, 1, 1);\n\n assert!(shortest_path.is_some());\n\n assert_that!(&shortest_path.expect(\"exists\"), eq(vec![1]));\n\n }\n\n}\n", "file_path": "src/graph_shortest_path.rs", "rank": 86, "score": 7.290916929408825 }, { "content": " use super::*;\n\n\n\n #[test]\n\n fn works_on_basic() {\n\n assert_eq!(\n\n string_permutation(\"abc\"),\n\n vec![\"abc\", \"acb\", \"bac\", \"bca\", \"cab\", \"cba\"]\n\n );\n\n }\n\n\n\n #[test]\n\n fn works_on_empty() {\n\n assert_eq!(string_permutation(\"\"), Vec::<String>::new());\n\n }\n\n\n\n #[test]\n\n fn works_on_single() {\n\n assert_eq!(string_permutation(\"a\"), vec![\"a\"]);\n\n }\n\n}\n", "file_path": "src/string_permutations.rs", "rank": 87, "score": 7.2535162972128395 }, { "content": " backwards_pointers.insert(adj_node_id.clone(), Some(node_id.clone()));\n\n to_visit.push_back(adj_node_id.clone());\n\n }\n\n }\n\n\n\n // Traversal finished, but no path to destination was found.\n\n None\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use galvanic_assert::matchers::*;\n\n use galvanic_assert::*;\n\n\n\n #[test]\n\n fn works_for_simple() {\n\n // Graph looks like:\n\n // /---(2)----\\\n\n // (1) (4)\n", "file_path": "src/graph_shortest_path.rs", "rank": 88, "score": 7.231661769646108 }, { "content": "/*\n\nProblem:\n\nGiven a list of cake types (a cake type has a weight and a monetary value), of which you can take\n\nan unlimited amount, what is the maximum value you can store inside a bag of size S?\n\n\n\nExample:\n\ncake_types = [(7, 160), (3, 90), (2, 15)]\n\ncapacity = 20\n\n\n\n(6 of middle cake and 1 of last type of cake)\n\nmax_bag_value(cake_types, capacity) = 555\n\n*/\n\n\n\n/*\n\nNotes:\n\nThinking about sorting the list of cake tuples, perhaps by (value / weight).\n\nFor the example:\n\n(7, 160) = 160/7 = 22.85\n\n(3, 90) = 90/3 = 30\n\n(2, 15) = 15/2 = 7.5\n", "file_path": "src/cake_thief.rs", "rank": 89, "score": 7.102802213194279 }, { "content": "use ego_binary_tree::{BinaryNodeRef, BinaryTree};\n\nuse std::collections::VecDeque;\n\nuse std::fmt::Debug;\n\n\n\n/*\n\nProblem:\n\nReturn the 2nd largest value in a binary search tree.\n\n*/\n", "file_path": "src/binary_tree_second_largest.rs", "rank": 90, "score": 7.041291320904869 }, { "content": " }\n\n\n\n #[test]\n\n fn false_cases_work() {\n\n assert_eq!(has_permutation_palindrome(\"civil\"), false);\n\n assert_eq!(has_permutation_palindrome(\"livci\"), false);\n\n }\n\n\n\n #[test]\n\n fn odd_character_counts_greater_than_1() {\n\n assert_eq!(has_permutation_palindrome(\"iii\"), true);\n\n assert_eq!(has_permutation_palindrome(\"iiiii\"), true);\n\n assert_eq!(has_permutation_palindrome(\"iiiiib\"), false);\n\n }\n\n\n\n #[test]\n\n fn empty_string_returns_false() {\n\n assert_eq!(has_permutation_palindrome(\"\"), false);\n\n }\n\n}\n", "file_path": "src/permutation_palindrome.rs", "rank": 91, "score": 6.860668194550228 }, { "content": " // \\---(3)----/\n\n let mut adjacency_table = HashMap::new();\n\n adjacency_table.insert(1, vec![2, 3]);\n\n adjacency_table.insert(2, vec![1, 4]);\n\n adjacency_table.insert(3, vec![1, 4]);\n\n adjacency_table.insert(4, vec![2, 3]);\n\n\n\n let shortest_path = graph_shortest_path(&adjacency_table, 2, 3);\n\n assert!(shortest_path.is_some());\n\n assert_that!(\n\n &shortest_path.expect(\"exists\"),\n\n any_of!(eq(vec![2, 1, 3]), eq(vec![2, 4, 3]))\n\n );\n\n }\n\n\n\n #[test]\n\n fn works_for_simple_with_no_path() {\n\n // Graph looks like:\n\n // /---(2)----\\\n\n // (1) (4) (5)\n", "file_path": "src/graph_shortest_path.rs", "rank": 92, "score": 6.796045068433519 }, { "content": " while behind != ahead {\n\n behind = advance_index(behind);\n\n ahead = advance_index(ahead);\n\n }\n\n\n\n // Now that we're at the beginning of the cycle, the\n\n // duplicate value is the \"position\" (index + 1).\n\n (behind + 1) as i32\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn works() {\n\n assert_eq!(find_duplicate(&vec![1, 3, 3, 2]), 3);\n\n assert_eq!(find_duplicate(&vec![1, 2, 3, 2]), 2);\n\n assert_eq!(find_duplicate(&vec![2, 3, 1, 1]), 1);\n\n }\n", "file_path": "src/find_duplicate_space.rs", "rank": 93, "score": 6.736938097666915 }, { "content": "pub mod airport_gates;\n\npub mod apple_stocks;\n\npub mod binary_tree_search;\n\npub mod binary_tree_second_largest;\n\npub mod binary_tree_superbalanced;\n\npub mod cafe_orders;\n\npub mod cake_thief;\n\npub mod fib;\n\npub mod find_duplicate_space;\n\npub mod find_rotation_point;\n\npub mod graph_coloring;\n\npub mod graph_shortest_path;\n\npub mod highest_product_of_three;\n\npub mod highlight_search;\n\npub mod inflight_entertainment;\n\npub mod making_change;\n\npub mod merge_lists;\n\npub mod merging_meetings;\n\npub mod needle_in_haystack;\n\npub mod permutation_palindrome;\n\npub mod product_of_all_other_numbers;\n\npub mod rectangular_love;\n\npub mod reverse_words;\n\npub mod string_permutations;\n\npub mod subsequence_reconstruction;\n\npub mod tree_traversals;\n", "file_path": "src/lib.rs", "rank": 94, "score": 6.636249573820267 }, { "content": " }\n\n\n\n #[test]\n\n fn reverse_words_order_works() {\n\n let mut input = \"cake pound steal\".into();\n\n let output = \"steal pound cake\";\n\n\n\n reverse_words_order_in_string(&mut input);\n\n assert_eq!(input, output);\n\n }\n\n\n\n #[test]\n\n fn reverse_words_order_works_ends_with_non_alphabetic() {\n\n let mut input = \"foo bar \".into();\n\n let output = \"bar foo \";\n\n\n\n reverse_words_order_in_string(&mut input);\n\n assert_eq!(input, output);\n\n }\n\n}\n", "file_path": "src/reverse_words.rs", "rank": 95, "score": 6.572934574052624 }, { "content": "The greedy approach would work in this case because it would take from middle\n\nuntil there was only 1 left for the last cake.\n\n\n\nHowever, the greedy approach does not work in this case:\n\ncake_types = [(3, 6), (2, 3.9), (1, 1)]\n\ncapacity = 4\n\n\n\nEven though (3, 6) has a better price to weight ratio, remaining capacity can only fit (1, 1),\n\nit's better to take two (2, 3.9) for a value of 7.8.\n\n\n\n---\n\n\n\nA bottoms-up approach might be more appropriate here. We can calculate the maximum value\n\nfrom the answer of the same problem with different capacity.\n\n\n\nNamely, for the example that does not work for greedy above:\n\ncake_types = [(3, 6), (2, 3.9), (1, 1)]\n\ncapacity = 4\n\n\n\nf(4) = max(f(1) + 6, f(2) + 3.9, f(3) + 1).\n", "file_path": "src/cake_thief.rs", "rank": 96, "score": 6.557971156872741 }, { "content": " .into_iter()\n\n .collect()\n\n )\n\n }\n\n\n\n #[test]\n\n fn merge_meetings_works_minimum_overlap() {\n\n assert_eq!(\n\n merge_meetings(vec![\n\n Meeting::new(3, 5),\n\n Meeting::new(5, 8),\n\n Meeting::new(0, 1),\n\n ]),\n\n vec![Meeting::new(0, 1), Meeting::new(3, 8)]\n\n .into_iter()\n\n .collect()\n\n )\n\n }\n\n\n\n #[test]\n", "file_path": "src/merging_meetings.rs", "rank": 97, "score": 6.52300254131711 }, { "content": " let highest_integer = *highest.iter().max().expect(\"exists\");\n\n cmp::max(\n\n highest.iter().product(),\n\n lowest.iter().product::<i64>() * highest_integer,\n\n )\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn all_positive() {\n\n assert_eq!(highest_product_of_three(vec![1, 3, 4, 2]), 24);\n\n }\n\n\n\n #[test]\n\n fn one_negative() {\n\n assert_eq!(highest_product_of_three(vec![-1, 2, 3, 4]), 24);\n\n assert_eq!(highest_product_of_three(vec![-1, 3, 4]), -12);\n", "file_path": "src/highest_product_of_three.rs", "rank": 98, "score": 6.506801687547258 }, { "content": "\n\n #[test]\n\n fn all_matching() {\n\n assert_eq!(needle_in_haystack(\"aaaa\", \"aa\"), vec![0, 1, 2]);\n\n }\n\n\n\n #[test]\n\n fn haystack_smaller_than_hash() {\n\n assert_eq!(needle_in_haystack(\"aa\", \"aaa\"), vec![]);\n\n }\n\n\n\n #[test]\n\n fn no_matches() {\n\n assert_eq!(needle_in_haystack(\"abcd\", \"aa\"), vec![]);\n\n }\n\n\n\n #[test]\n\n fn needle_empty() {\n\n assert_eq!(needle_in_haystack(\"aaaa\", \"\"), vec![]);\n\n }\n\n}\n", "file_path": "src/needle_in_haystack.rs", "rank": 99, "score": 6.009579271159608 } ]
Rust
src/server/entry_api.rs
bingryan/quake
be1aae0ff36a22d47bdef5c99797d95293792a33
use std::collections::HashMap; use std::fs; use std::fs::File; use std::path::PathBuf; use rocket::fs::NamedFile; use rocket::response::status::NotFound; use rocket::response::Redirect; use rocket::serde::json::Json; use rocket::serde::{Deserialize, Serialize}; use rocket::tokio::task::spawn_blocking; use rocket::State; use rocket::{get, post}; use quake_core::entry::entry_file::EntryFile; use quake_core::entry::entry_paths::EntryPaths; use quake_core::helper::file_filter; use quake_core::usecases::entry_usecases; use quake_core::QuakeConfig; use crate::server::helper::csv_to_json::csv_to_json; use crate::server::ApiError; #[get("/<entry_type>")] pub(crate) async fn get_entries(entry_type: &str, config: &State<QuakeConfig>) -> Redirect { let request_url = format!("{:}/indexes/{:}/search", &config.search_url, entry_type); Redirect::to(request_url) } #[get("/<entry_type>/from_csv")] pub(crate) async fn get_entries_from_csv( entry_type: String, config: &State<QuakeConfig>, ) -> Result<Json<String>, NotFound<Json<ApiError>>> { let path = PathBuf::from(config.workspace.clone()) .join(entry_type) .join(EntryPaths::entries_csv()); let content = spawn_blocking(|| { let mut rdr = csv::Reader::from_reader(File::open(path).unwrap()); csv_to_json(&mut rdr).unwrap().to_string() }) .await .map_err(|err| ApiError { msg: err.to_string(), }) .unwrap(); Ok(Json(content)) } #[get("/<entry_type>/csv")] pub(crate) async fn get_entries_csv( entry_type: &str, config: &State<QuakeConfig>, ) -> Option<NamedFile> { let paths = EntryPaths::init(&config.workspace, &entry_type.to_string()); let file = NamedFile::open(paths.entries_csv); file.await.ok() } #[derive(Debug, Clone, Deserialize, Serialize)] #[serde(crate = "rocket::serde")] struct EntryResponse { content: String, } #[post("/<entry_type>/new?<text>")] pub(crate) async fn create_entry( entry_type: String, text: String, config: &State<QuakeConfig>, ) -> Result<Json<EntryFile>, NotFound<Json<ApiError>>> { let workspace = config.workspace.to_string(); return match entry_usecases::create_entry(&workspace, &entry_type, &text) { Ok((_path, file)) => Ok(Json(file)), Err(err) => Err(NotFound(Json(ApiError { msg: err.to_string(), }))), }; } #[get("/<entry_type>/<id>")] pub(crate) async fn get_entry( entry_type: &str, id: usize, config: &State<QuakeConfig>, ) -> Result<Json<EntryFile>, NotFound<Json<ApiError>>> { let base_path = PathBuf::from(&config.workspace).join(entry_type); let index = id; let prefix = EntryFile::file_prefix(index); let vec = file_filter::filter_by_prefix(base_path, prefix); if vec.len() == 0 { return Err(NotFound(Json(ApiError { msg: "file not found".to_string(), }))); } let file_path = vec[0].clone(); let str = fs::read_to_string(file_path).expect("cannot read entry type"); let file = EntryFile::from(str.as_str(), id).unwrap(); return Ok(Json(file)); } #[derive(Debug, Clone, Deserialize, Serialize)] #[serde(crate = "rocket::serde")] pub struct EntryUpdate { fields: HashMap<String, String>, } #[post("/<entry_type>/<id>", data = "<entry>")] pub(crate) async fn update_entry( entry_type: &str, id: usize, entry: Json<EntryUpdate>, config: &State<QuakeConfig>, ) -> Result<Json<EntryFile>, NotFound<Json<ApiError>>> { let path = PathBuf::from(&config.workspace).join(entry_type); return match entry_usecases::update_entry_fields(path, entry_type, id, &entry.fields) { Ok(file) => Ok(Json(file)), Err(err) => Err(NotFound(Json(ApiError { msg: err.to_string(), }))), }; }
use std::collections::HashMap; use std::fs; use std::fs::File; use std::path::PathBuf; use rocket::fs::NamedFile; use rocket::response::status::NotFound; use rocket::response::Redirect; use rocket::serde::json::Json; use rocket::serde::{Deserialize, Serialize}; use rocket::tokio::task::spawn_blocking; use rocket::State; use rocket::{get, post}; use quake_core::entry::entry_file::EntryFile; use quake_core::entry::entry_paths::EntryPaths; use quake_core::helper::file_filter; use quake_core::usecases::entry_usecases; use quake_core::QuakeConfig; use crate::server::helper::csv_to_json::csv_to_json; use crate::server::ApiError; #[get("/<entry_type>")] pub(crate) async fn get_entries(entry_type: &str, config: &State<QuakeConfig>) -> Redirect { let request_url = format!("{:}/indexes/{:}/search", &config.search_url, entry_type); Redirect::to(request_url) } #[get("/<entry_type>/from_csv")] pub(crate) async fn get_entries_from_csv( entry_type: String, config: &State<QuakeConfig>, ) -> Result<Json<String>, NotFound<Json<ApiError>>> { let path = PathBuf::from(config.workspace.clone()) .join(entry_type) .join(EntryPaths::entries_csv()); let content = spawn_blocking(|| { let mut rdr = csv::Reader::from_reader(File::open(path).unwrap()); csv_to_json(&mut rdr).unwrap().to_string() }) .await .map_err(|err| ApiError { msg: err.to_string(), }) .unwrap(); Ok(Json(content)) } #[get("/<entry_type>/csv")] pub(crate) async fn get_entries_csv( entry_type: &str, config: &State<QuakeConfig>, ) -> Option<NamedFile> { let paths = EntryPaths::init(&config.workspace, &entry_type.to_string()); let file = NamedFile::open(paths.entries_csv); file.await.ok() } #[derive(Debug, Clone, Deserialize, Serialize)] #[serde(crate = "rocket::serde")] struct EntryResponse { content: String, } #[post("/<entry_type>/new?<text>")] p
#[get("/<entry_type>/<id>")] pub(crate) async fn get_entry( entry_type: &str, id: usize, config: &State<QuakeConfig>, ) -> Result<Json<EntryFile>, NotFound<Json<ApiError>>> { let base_path = PathBuf::from(&config.workspace).join(entry_type); let index = id; let prefix = EntryFile::file_prefix(index); let vec = file_filter::filter_by_prefix(base_path, prefix); if vec.len() == 0 { return Err(NotFound(Json(ApiError { msg: "file not found".to_string(), }))); } let file_path = vec[0].clone(); let str = fs::read_to_string(file_path).expect("cannot read entry type"); let file = EntryFile::from(str.as_str(), id).unwrap(); return Ok(Json(file)); } #[derive(Debug, Clone, Deserialize, Serialize)] #[serde(crate = "rocket::serde")] pub struct EntryUpdate { fields: HashMap<String, String>, } #[post("/<entry_type>/<id>", data = "<entry>")] pub(crate) async fn update_entry( entry_type: &str, id: usize, entry: Json<EntryUpdate>, config: &State<QuakeConfig>, ) -> Result<Json<EntryFile>, NotFound<Json<ApiError>>> { let path = PathBuf::from(&config.workspace).join(entry_type); return match entry_usecases::update_entry_fields(path, entry_type, id, &entry.fields) { Ok(file) => Ok(Json(file)), Err(err) => Err(NotFound(Json(ApiError { msg: err.to_string(), }))), }; }
ub(crate) async fn create_entry( entry_type: String, text: String, config: &State<QuakeConfig>, ) -> Result<Json<EntryFile>, NotFound<Json<ApiError>>> { let workspace = config.workspace.to_string(); return match entry_usecases::create_entry(&workspace, &entry_type, &text) { Ok((_path, file)) => Ok(Json(file)), Err(err) => Err(NotFound(Json(ApiError { msg: err.to_string(), }))), }; }
function_block-function_prefixed
[ { "content": "fn highlight_content(string: &str, lang: &str) {\n\n use syntect::easy::HighlightLines;\n\n use syntect::highlighting::{Style, ThemeSet};\n\n use syntect::parsing::SyntaxSet;\n\n use syntect::util::{as_24_bit_terminal_escaped, LinesWithEndings};\n\n\n\n // Load these once at the start of your program\n\n let ps = SyntaxSet::load_defaults_newlines();\n\n let ts = ThemeSet::load_defaults();\n\n\n\n let syntax = ps.find_syntax_by_extension(lang).unwrap();\n\n let mut h = HighlightLines::new(syntax, &ts.themes[\"base16-ocean.dark\"]);\n\n for line in LinesWithEndings::from(string) {\n\n let ranges: Vec<(Style, &str)> = h.highlight(line, &ps);\n\n let escaped = as_24_bit_terminal_escaped(&ranges[..], true);\n\n println!(\"{}\", escaped);\n\n }\n\n}\n\n\n", "file_path": "src/cli/entry_action.rs", "rank": 0, "score": 232730.78477830318 }, { "content": "pub fn csv_to_terminal_table(rdr: &mut Reader<File>) -> Table {\n\n let mut table = Table::new();\n\n\n\n let mut header = vec![];\n\n for record in rdr.headers() {\n\n for str in record {\n\n header.push(String::from(str))\n\n }\n\n }\n\n\n\n table.set_header(header);\n\n\n\n for result in rdr.records() {\n\n let record = result.unwrap();\n\n let mut row = vec![];\n\n for str in &record {\n\n row.push(String::from(str));\n\n }\n\n table.add_row(row);\n\n }\n\n\n\n table\n\n}\n", "file_path": "src/cli/helper/table_process.rs", "rank": 1, "score": 221454.37670985685 }, { "content": "fn load_config(path: &String) -> Result<QuakeConfig, Box<dyn Error>> {\n\n let content = fs::read_to_string(path)?;\n\n let conf: QuakeConfig = serde_yaml::from_str(content.as_str())?;\n\n\n\n Ok(conf)\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 2, "score": 209435.16105417034 }, { "content": "pub fn entry_file_by_path(path: &PathBuf) -> Result<(String, EntryFile), Box<dyn Error>> {\n\n let typ = type_from_md_path(&path).ok_or(\"\")?;\n\n let file_name = path.file_name().ok_or(\"\")?;\n\n\n\n if file_name == \"\" || typ == \"\" {\n\n return Err(Box::new(QuakeError(format!(\n\n \"emtpy typ {:?} or file_name {:?}\",\n\n typ, file_name\n\n ))));\n\n }\n\n\n\n let id = EntryFile::id_from_name(format!(\"{:}\", file_name.to_str().unwrap()).as_str())?;\n\n let content = fs::read_to_string(&path)?;\n\n let file = EntryFile::from(content.as_str(), id)?;\n\n Ok((typ, file))\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::path::PathBuf;\n", "file_path": "src/helper/entry_watcher.rs", "rank": 3, "score": 206204.4206205416 }, { "content": "pub fn export(db_name: &str, sql: &str, path: PathBuf) -> Result<(), Box<dyn Error>> {\n\n let conn = Connection::open(db_name)?;\n\n let mut query = conn.prepare(sql)?;\n\n\n\n let mut rows = query.query([])?;\n\n\n\n let mut id: usize = 1;\n\n while let Some(row) = rows.next()? {\n\n write_file(&path, row, id);\n\n id = id + 1;\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "quake_importer/src/sqlite_to_file.rs", "rank": 4, "score": 204220.34113334975 }, { "content": "pub fn filter_by_prefix(path: PathBuf, prefix: String) -> Vec<PathBuf> {\n\n let mut files = vec![];\n\n for entry in WalkDir::new(path).into_iter().filter_map(|e| e.ok()) {\n\n if is_with_prefix(&entry, &prefix) {\n\n files.push(entry.into_path());\n\n }\n\n }\n\n files\n\n}\n\n\n", "file_path": "quake_core/src/helper/file_filter.rs", "rank": 5, "score": 203534.5387219928 }, { "content": "#[allow(dead_code)]\n\npub fn csv_to_json(rdr: &mut Reader<File>) -> Result<JsonValue, Box<dyn Error>> {\n\n let mut json: JsonValue = array![];\n\n\n\n let mut header = vec![];\n\n for record in rdr.headers() {\n\n for str in record {\n\n header.push(String::from(str))\n\n }\n\n }\n\n\n\n for result in rdr.records() {\n\n let mut element = object! {};\n\n let record = result.unwrap();\n\n for (index, str) in record.iter().enumerate() {\n\n element[header[index].clone()] = str.into();\n\n }\n\n\n\n json.push(element.clone())?;\n\n }\n\n\n", "file_path": "src/server/helper/csv_to_json.rs", "rank": 6, "score": 198743.64474353427 }, { "content": "pub fn type_from_md_path(buf: &PathBuf) -> Option<String> {\n\n let mut ancestors = buf.ancestors();\n\n ancestors.next()?;\n\n let typ = ancestors.next()?.file_name()?;\n\n let str = typ.to_str()?.to_string();\n\n Some(str)\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::helper::file_filter::type_from_md_path;\n\n use std::path::PathBuf;\n\n\n\n #[test]\n\n fn type_from() {\n\n let buf = PathBuf::from(\"examples\")\n\n .join(\"todo\")\n\n .join(\"0001-time-support.md\");\n\n\n\n let typ = type_from_md_path(&buf).unwrap();\n\n assert_eq!(typ, \"todo\".to_string());\n\n }\n\n}\n", "file_path": "quake_core/src/helper/file_filter.rs", "rank": 7, "score": 186759.80330375888 }, { "content": "pub fn replace_string_markers(input: &str) -> String {\n\n match input.chars().next().unwrap() {\n\n '\"' => input.replace('\"', \"\"),\n\n '\\'' => input.replace('\\'', \"\"),\n\n '`' => input.replace('`', \"\"),\n\n _ => unreachable!(\"error: {:?}\", input),\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::parser::ast::{SourceUnitPart, TransflowEnum};\n\n use crate::parser::parser::parse;\n\n\n\n #[test]\n\n fn should_parse_add_todo() {\n\n let unit = parse(\"todo.add: 添加 todo 的支持\").unwrap();\n\n assert_eq!(1, unit.0.len());\n\n\n\n match &unit.0[0] {\n", "file_path": "quake_core/src/parser/parser.rs", "rank": 8, "score": 183879.48285120662 }, { "content": "pub fn dump_phodal_com(db_path: &str, path: PathBuf) {\n\n let sql = \"SELECT blog_blogpost.keywords_string as keywords, blog_blogpost.title, blog_blogpost.description, blog_blogpost.slug, blog_blogpost.content,\n\n auth_user.first_name, auth_user.last_name, auth_user.email, created as created_date, updated as updated_date\n\nFROM blog_blogpost\n\n INNER JOIN auth_user\n\n ON blog_blogpost.user_id = auth_user.id\n\n\";\n\n\n\n let _ = fs::create_dir(&path);\n\n if let Err(err) = sqlite_to_file::export(db_path, sql, path) {\n\n println!(\"{:?}\", err);\n\n };\n\n}\n\n\n", "file_path": "quake_importer/src/main.rs", "rank": 9, "score": 181801.82481633386 }, { "content": "/// refs: https://www.swiftforensics.com/2018/02/reading-notes-database-on-macos.html\n\npub fn dump_apple_notes(db_path: &str, path: PathBuf) {\n\n let sql = \"\n", "file_path": "quake_importer/src/main.rs", "rank": 10, "score": 181801.82481633386 }, { "content": "pub fn edit_file(editor: String, file: String) -> Result<(), Box<dyn Error>> {\n\n if editor == \"~\" || editor == \"\" {\n\n return Ok(());\n\n }\n\n\n\n let cmd = format!(\"{:} {:?}\", editor, file);\n\n exec_runner::cmd_runner(cmd)?;\n\n\n\n Ok(())\n\n}\n", "file_path": "src/helper/exec_wrapper/editor_exec.rs", "rank": 11, "score": 178358.87216699996 }, { "content": "pub fn slugify(text: &str) -> String {\n\n let lower = text.trim().to_lowercase();\n\n let result = SPECIAL.replace_all(lower.as_str(), \"-\").to_string();\n\n let result = LEADING.replace_all(result.as_str(), \"\").to_string();\n\n\n\n result\n\n .replace(\",\", \"\")\n\n .replace(\"。\", \"\")\n\n .replace(\" \", \"-\")\n\n .replace(\"?\", \"-\")\n\n .replace(\"#\", \"-\")\n\n .replace(\":\", \"-\")\n\n .replace(\"-/-\", \"\")\n\n .replace(\"/\", \"\")\n\n .replace(\"——\", \"-\")\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::entry::slug::slugify;\n", "file_path": "quake_core/src/entry/slug.rs", "rank": 12, "score": 178333.41954648396 }, { "content": "pub fn simple_escape(value: String) -> String {\n\n format!(\n\n \"{:}\",\n\n value\n\n .replace(\",\", \"-\")\n\n .replace(\"\\u{2005}\", \" \")\n\n .replace(\" \", \" \")\n\n .replace(\"\b\", \" \")\n\n .replace(\"\u001b\", \" \")\n\n .replace(\"\u001d\", \" \")\n\n .replace(\"​\", \" \")\n\n )\n\n}\n", "file_path": "quake_importer/src/sqlite_to_file.rs", "rank": 13, "score": 169111.52978199176 }, { "content": "pub fn write_file(path: &PathBuf, row: &Row, id: usize) {\n\n let mut file = EntryFile::default();\n\n let mut title = \"\".to_string();\n\n\n\n for (index, name) in row.column_names().iter().enumerate() {\n\n let value: String = match row.get_ref(index).unwrap() {\n\n ValueRef::Null => \"\".to_string(),\n\n ValueRef::Integer(int) => int.to_string(),\n\n ValueRef::Real(real) => real.to_string(),\n\n ValueRef::Text(text) => std::str::from_utf8(text).unwrap().to_string(),\n\n ValueRef::Blob(bool) => std::str::from_utf8(bool).unwrap().to_string(),\n\n };\n\n\n\n let name = name.to_string();\n\n if name.eq(\"content\") {\n\n file.content = \"\\n\\n\".to_string();\n\n file.content.push_str(&*value);\n\n } else {\n\n if name.eq(\"title\") {\n\n title = value.clone();\n", "file_path": "quake_importer/src/sqlite_to_file.rs", "rank": 14, "score": 158265.83255634498 }, { "content": "pub fn dump_microsoft_todo(todos_lists: Vec<OutputList>, path: &PathBuf) {\n\n let _ = fs::create_dir(&path);\n\n let mut index = 1;\n\n for list in todos_lists {\n\n for todo in list.children {\n\n let mut file = EntryFile::default();\n\n\n\n let title = todo.title;\n\n file.add_field(\"category\", format!(\"{:?}\", list.display_name));\n\n file.add_field(\"title\", format!(\"{:?}\", simple_escape(title.clone())));\n\n file.add_field(\"created_date\", todo.created_date_time);\n\n file.add_field(\"updated_date\", todo.last_modified_date_time);\n\n\n\n let time = match todo.reminder_date_time {\n\n None => \"\".to_string(),\n\n Some(dat) => dat.date_time,\n\n };\n\n file.add_field(\"reminder_date\", format!(\"{:?}\", time));\n\n\n\n let completed_date = match todo.completed_date_time {\n", "file_path": "quake_importer/src/todo_to_file.rs", "rank": 15, "score": 147906.02910114639 }, { "content": "fn is_with_prefix(entry: &DirEntry, prefix: &String) -> bool {\n\n entry\n\n .file_name()\n\n .to_str()\n\n .map(|s| s.starts_with(prefix))\n\n .unwrap_or(false)\n\n}\n\n\n", "file_path": "quake_core/src/helper/file_filter.rs", "rank": 16, "score": 146294.59894604352 }, { "content": "pub fn find_entry_define(paths: &EntryPaths, target_entry: &String) -> EntryDefine {\n\n let entries: Vec<EntryDefine> = entry_define::entries_define_from_path(&paths.entries_define)\n\n .into_iter()\n\n .filter(|define| define.entry_type.eq(target_entry))\n\n .collect();\n\n\n\n let entries_define = if entries.len() == 0 {\n\n EntryDefine::default()\n\n } else {\n\n entries[0].clone()\n\n };\n\n entries_define\n\n}\n\n\n", "file_path": "quake_core/src/usecases/entry_usecases.rs", "rank": 17, "score": 146212.4914323977 }, { "content": "pub fn update_entry_info(entry_info_path: &PathBuf, entry_info: &mut EntryNodeInfo) {\n\n let result = serde_yaml::to_string(&entry_info).expect(\"cannot convert to yaml\");\n\n fs::write(&entry_info_path, result).expect(\"cannot write to file\");\n\n}\n\n\n", "file_path": "quake_core/src/usecases/entry_usecases.rs", "rank": 18, "score": 143048.9063561383 }, { "content": "fn walk_in_path(path: PathBuf) -> FilterEntry<IntoIter, fn(&DirEntry) -> bool> {\n\n WalkDir::new(path)\n\n .min_depth(1)\n\n .max_depth(1)\n\n .into_iter()\n\n .filter_entry(|e| !is_hidden(e))\n\n}\n\n\n", "file_path": "src/cli/quake_action.rs", "rank": 19, "score": 134744.1523026475 }, { "content": "pub fn transflow_action(action: String, _conf: &QuakeConfig) -> Result<(), Box<dyn Error>> {\n\n match action.as_str() {\n\n \"define\" => {}\n\n _ => {}\n\n }\n\n Ok(())\n\n}\n", "file_path": "src/cli/transflow_action.rs", "rank": 20, "score": 134161.69907037888 }, { "content": "pub fn quake_action(action: String, conf: &QuakeConfig) -> Result<(), Box<dyn Error>> {\n\n match action.as_str() {\n\n \"sync\" => {\n\n sync_defines(conf)?;\n\n }\n\n \"migration\" => {\n\n // todo: add migrations for on entries\n\n }\n\n \"feed\" => {\n\n feed_data(conf)?;\n\n }\n\n _ => {\n\n return Err(Box::new(QuakeError(format!(\n\n \"unknow quake action: {:?}\",\n\n action\n\n ))))\n\n }\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cli/quake_action.rs", "rank": 21, "score": 134161.69907037888 }, { "content": "#[get(\"/query?<input>\")]\n\npub fn parse_query(input: String) -> String {\n\n let result = QuakeActionNode::action_from_text(input.as_str());\n\n let output = match result {\n\n Ok(value) => serde_json::to_string(&value).unwrap(),\n\n Err(err) => serde_json::to_string(&ApiError {\n\n msg: format!(\"{:?}\", err),\n\n })\n\n .unwrap(),\n\n };\n\n\n\n output\n\n}\n\n\n\n#[get(\"/suggest\")]\n\npub async fn suggest(config: &State<QuakeConfig>) -> Json<ActionSuggest> {\n\n let mut suggest = ActionSuggest::default();\n\n let path = PathBuf::from(&config.workspace).join(\"entries-define.yaml\");\n\n\n\n suggest.entries = spawn_blocking(|| {\n\n let entries_str = fs::read_to_string(path).expect(\"cannot read entries-define.yaml\");\n", "file_path": "src/server/action_api.rs", "rank": 22, "score": 133851.10081863383 }, { "content": "fn show_entrysets(path: &PathBuf) {\n\n let mut rdr = csv::Reader::from_reader(File::open(path).expect(\"cannot open file\"));\n\n let table = table_process::csv_to_terminal_table(&mut rdr);\n\n\n\n // todo: change to terminal ui\n\n println!(\"{}\", table);\n\n}\n\n\n", "file_path": "src/cli/entry_action.rs", "rank": 23, "score": 133549.86538093266 }, { "content": "pub fn entries_define_from_path(config_path: &PathBuf) -> Vec<EntryDefine> {\n\n let entries_str = fs::read_to_string(config_path).expect(\"cannot read entries-define.yaml\");\n\n let entries: EntryDefines = serde_yaml::from_str(&*entries_str).unwrap();\n\n\n\n entries.entries\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use indexmap::IndexMap;\n\n\n\n use crate::entry::entry_define::EntryDefine;\n\n use crate::meta::MetaField;\n\n\n\n fn custom_entry_from_yaml() -> Vec<EntryDefine> {\n\n let yaml = \"\n\n- type: todo\n\n display: Todo\n\n fields:\n\n - title: Title\n", "file_path": "quake_core/src/entry/entry_define.rs", "rank": 24, "score": 132456.86237489845 }, { "content": "fn string_from_pair(pair: Pair<Rule>) -> String {\n\n replace_string_markers(pair.as_str())\n\n}\n\n\n", "file_path": "quake_core/src/parser/parser.rs", "rank": 25, "score": 124915.85338185192 }, { "content": "pub fn date_now() -> String {\n\n let local: DateTime<Local> = Local::now();\n\n local.format(FORMAT).to_string()\n\n}\n", "file_path": "quake_core/src/helper/quake_time.rs", "rank": 26, "score": 122961.66742908797 }, { "content": "pub fn draw<B: Backend>(f: &mut Frame<B>, app: &mut App) {\n\n let chunks = Layout::default()\n\n .direction(Direction::Vertical)\n\n .margin(2)\n\n .constraints([Constraint::Length(3), Constraint::Min(1)].as_ref())\n\n .split(f.size());\n\n let command_bar = Paragraph::new(app.command.as_ref())\n\n .style(match app.mode {\n\n Mode::Normal => Style::default(),\n\n Mode::Command => Style::default().fg(Color::Yellow),\n\n })\n\n .block(Block::default().borders(Borders::ALL).title(\"Action\"));\n\n f.render_widget(command_bar, chunks[0]);\n\n match app.mode {\n\n Mode::Normal => {}\n\n Mode::Command => f.set_cursor(\n\n chunks[0].x + app.command.width() as u16 + 1,\n\n chunks[0].y + 1,\n\n ),\n\n }\n\n\n\n draw_main(app, f, chunks[1]);\n\n}\n\n\n", "file_path": "quake_tui/src/ui.rs", "rank": 27, "score": 119398.6775067019 }, { "content": "fn value(decl: Pair<Rule>) -> String {\n\n let mut value: String = \"\".to_string();\n\n for pair in decl.into_inner() {\n\n match pair.as_rule() {\n\n Rule::double_quoted_string | Rule::single_quoted_string => {\n\n value = string_from_pair(pair);\n\n }\n\n _ => {\n\n value = String::from(pair.as_str());\n\n }\n\n }\n\n }\n\n\n\n value\n\n}\n\n\n", "file_path": "quake_core/src/parser/parser.rs", "rank": 28, "score": 117101.11245209131 }, { "content": "pub fn feed_settings(index_name: &String, search_url: &String) -> Result<(), Box<dyn Error>> {\n\n let url = format!(\"{:}/indexes/{:}/settings\", search_url, index_name);\n\n let cmd_line = format!(\n\n \"curl -i -X POST '{:}' \\\n\n --header 'content-type: application/json' \\\n\n --data-binary @resources/search_rule.json\",\n\n url\n\n );\n\n\n\n info!(\"{:?}\", cmd_line);\n\n exec_runner::cmd_runner(cmd_line)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/helper/exec_wrapper/meili_exec.rs", "rank": 29, "score": 115309.41516502724 }, { "content": "pub fn feed_command(index_name: &String, search_url: &String) -> Result<(), Box<dyn Error>> {\n\n let url = format!(\"{:}/indexes/{:}/documents\", search_url, index_name);\n\n let cmd_line = format!(\n\n \"curl -i -X POST '{:}' \\\n\n --header 'content-type: application/json' \\\n\n --data-binary @dump.json\",\n\n url\n\n );\n\n\n\n info!(\"{:?}\", cmd_line);\n\n exec_runner::cmd_runner(cmd_line)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/helper/exec_wrapper/meili_exec.rs", "rank": 30, "score": 115309.41516502724 }, { "content": "fn run_app<B: Backend>(terminal: &mut Terminal<B>, mut app: App) -> io::Result<()> {\n\n // TODO: refactor\n\n loop {\n\n terminal.draw(|f| {\n\n draw(f, &mut app);\n\n })?;\n\n\n\n if let Event::Key(key) = event::read()? {\n\n match app.mode {\n\n Mode::Normal => {\n\n if let KeyCode::Char(':') = key.code {\n\n app.mode = Mode::Command;\n\n }\n\n }\n\n Mode::Command => match key.code {\n\n KeyCode::Enter => {\n\n let command: String = app.command.drain(..).collect();\n\n match command.as_str() {\n\n \"quit\" => return Ok(()),\n\n \"listAll\" => app.main_widget = MainWidget::Dirs,\n", "file_path": "quake_tui/src/lib.rs", "rank": 31, "score": 115126.90576610802 }, { "content": "fn init_projects(config: Init) -> Result<(), Box<dyn Error>> {\n\n fs::create_dir_all(&config.path)?;\n\n\n\n let path = PathBuf::from(&config.path).join(\".quake.yaml\");\n\n let define = PathBuf::from(&config.path).join(\"entries-define.yaml\");\n\n\n\n let config = QuakeConfig {\n\n workspace: config.path.clone(),\n\n editor: \"\".to_string(),\n\n search_url: \"http://127.0.0.1:7700\".to_string(),\n\n server_location: \"web\".to_string(),\n\n };\n\n\n\n fs::write(&path, serde_yaml::to_string(&config)?)?;\n\n debug!(\"create .quake.yaml in {:?}\", &path.display());\n\n\n\n let todo_define = \"\n\n- type: todo\n\n display: Todo\n\n fields:\n", "file_path": "src/main.rs", "rank": 32, "score": 113193.22785991745 }, { "content": "pub fn events_to_text(markdown: Vec<Event>) -> String {\n\n let mut buffer = String::new();\n\n cmark_with_options(\n\n markdown.iter(),\n\n &mut buffer,\n\n None,\n\n pulldown_cmark_to_cmark::Options::default(),\n\n )\n\n .expect(\"formatting to string not expected to fail\");\n\n\n\n // buffer.push('\\n');\n\n buffer\n\n}\n\n\n", "file_path": "quake_core/src/markdown/md_processor.rs", "rank": 33, "score": 111121.76901567688 }, { "content": "/// parse pure text to `QuakeIt` collections which include all\n\n/// - QuakeAction , the action for handle data in Quake\n\n/// - QuakeTransflowNode , the data transform in Quake\n\npub fn quake(text: &str) -> Result<QuakeIt, Box<dyn Error>> {\n\n let mut quakes = QuakeIt::default();\n\n let unit = parse(text)?;\n\n\n\n for part in unit.0 {\n\n match part {\n\n SourceUnitPart::Action(decl) => {\n\n let mut action = QuakeActionNode::default();\n\n\n\n action.action = decl.action;\n\n action.object = decl.object;\n\n action.text = decl.text;\n\n\n\n for parameter in decl.parameters {\n\n action.parameters.push(parameter.value);\n\n }\n\n\n\n quakes.actions.push(action);\n\n }\n\n SourceUnitPart::Transflow(decl) => {\n\n let transflow = build_transflow(decl);\n\n quakes.transflows.push(transflow);\n\n }\n\n }\n\n }\n\n\n\n Ok(quakes)\n\n}\n\n\n", "file_path": "quake_core/src/parser/quake.rs", "rank": 34, "score": 104769.4999771806 }, { "content": "pub fn cmd_runner(editor_cmd: String) -> Result<(), Box<dyn Error>> {\n\n if cfg!(target_os = \"windows\") {\n\n Command::new(\"cmd\")\n\n .args([\"/C\", editor_cmd.as_str()])\n\n .spawn()?\n\n .wait()?;\n\n } else {\n\n Command::new(\"sh\")\n\n .arg(\"-c\")\n\n .arg(editor_cmd)\n\n .spawn()?\n\n .wait()?;\n\n };\n\n\n\n Ok(())\n\n}\n", "file_path": "src/helper/exec_wrapper/exec_runner.rs", "rank": 35, "score": 103765.12647982076 }, { "content": "/// parse text to SourceUnit\n\n/// convert support:\n\n/// - Action\n\n/// - Transflow\n\npub fn parse(text: &str) -> Result<SourceUnit, Box<dyn Error>> {\n\n let pairs = match QuakeParser::parse(Rule::earth, text) {\n\n Ok(pairs) => pairs,\n\n Err(e) => {\n\n let string = format!(\"{:?}\", e);\n\n return Err(Box::new(QuakeParserError::new(&*string)));\n\n }\n\n };\n\n\n\n let mut parts = vec![];\n\n for pair in pairs {\n\n for inner_pair in pair.into_inner() {\n\n match inner_pair.as_rule() {\n\n Rule::action_decl => {\n\n parts.push(SourceUnitPart::Action(action_decl(inner_pair)));\n\n }\n\n Rule::transflow_decl => {\n\n parts.push(SourceUnitPart::Transflow(transflow_decl(inner_pair)));\n\n }\n\n _ => println!(\"rule: {}\", inner_pair),\n\n };\n\n }\n\n }\n\n\n\n Ok(SourceUnit(parts))\n\n}\n\n\n", "file_path": "quake_core/src/parser/parser.rs", "rank": 36, "score": 103695.40258007529 }, { "content": "fn feed_by_event(event: Event, search_url: &String) -> Result<(), Box<dyn Error>> {\n\n // only for data modify\n\n // todo: looking for better way\n\n match &event.kind {\n\n EventKind::Modify(modify) => match modify {\n\n ModifyKind::Data(_data) => {}\n\n _ => return Ok(()),\n\n },\n\n _ => return Ok(()),\n\n }\n\n\n\n debug!(\"feed_by_event {:?}\", &event);\n\n for path in event.paths {\n\n if path.is_dir() {\n\n continue;\n\n }\n\n\n\n if let Some(ext) = path.extension() {\n\n if !ext.eq(\"md\") {\n\n continue;\n", "file_path": "src/helper/entry_watcher.rs", "rank": 37, "score": 103030.22988700701 }, { "content": "fn draw_main<B>(app: &App, frame: &mut Frame<B>, area: Rect)\n\nwhere\n\n B: Backend,\n\n{\n\n match app.main_widget {\n\n MainWidget::Home => {\n\n let help_messages = vec![\n\n Spans::from(vec![\n\n Span::raw(\"Press \"),\n\n Span::styled(\":\", Style::default().add_modifier(Modifier::BOLD)),\n\n Span::raw(\" into command mode, \"),\n\n Span::styled(\"Esc\", Style::default().add_modifier(Modifier::BOLD)),\n\n Span::raw(\" back to normal mode.\"),\n\n ]),\n\n Spans::from(vec![\n\n Span::raw(\"Command \"),\n\n Span::styled(\"listAll\", Style::default().add_modifier(Modifier::BOLD)),\n\n Span::raw(\" list all workspace.\"),\n\n ]),\n\n Spans::from(vec![\n", "file_path": "quake_tui/src/ui.rs", "rank": 38, "score": 102149.88603392651 }, { "content": "pub fn dump_by_path(paths: &EntryPaths) -> Result<(), Box<dyn Error>> {\n\n let map = Entrysets::jsonify(&paths.base)?;\n\n fs::write(\"dump.json\", map)?;\n\n Ok(())\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use quake_core::parser::quake::QuakeActionNode;\n\n use quake_core::quake_config::QuakeConfig;\n\n\n\n use crate::cli::action;\n\n\n\n #[test]\n\n fn throw_editor_empty() {\n\n let expr = QuakeActionNode::action_from_text(\"todo.edit(1)\").unwrap();\n\n let mut config = QuakeConfig::default();\n\n config.workspace = \"examples\".to_string();\n\n config.editor = \"\".to_string();\n\n\n\n let expected = action(expr, config).expect_err(\"cannot process\");\n\n assert_eq!(format!(\"{:?}\", expected), \"QuakeError(\\\"editor is empty\\\")\");\n\n }\n\n}\n", "file_path": "src/cli/entry_action.rs", "rank": 39, "score": 99847.42767347532 }, { "content": "pub fn entry_info_from_path(entry_info_path: &PathBuf) -> EntryNodeInfo {\n\n if !entry_info_path.exists() {\n\n let info = EntryNodeInfo::default();\n\n fs::write(\n\n entry_info_path,\n\n serde_yaml::to_string(&info).expect(\"cannot serial\"),\n\n )\n\n .expect(\"cannot write to file\");\n\n\n\n return info;\n\n }\n\n\n\n let text = fs::read_to_string(&entry_info_path).expect(\"cannot read entry-info.yaml\");\n\n let entry_info = serde_yaml::from_str(&*text).unwrap();\n\n entry_info\n\n}\n", "file_path": "quake_core/src/entry/entry_node_info.rs", "rank": 40, "score": 98694.39430587788 }, { "content": "pub fn find_entry_path(\n\n entry_path: PathBuf,\n\n entry_type: &String,\n\n index: usize,\n\n) -> Result<PathBuf, Box<QuakeError>> {\n\n #[allow(unused_assignments)]\n\n let mut target_file = PathBuf::new();\n\n\n\n let prefix = EntryFile::file_prefix(index);\n\n let vec = file_filter::filter_by_prefix(entry_path, prefix);\n\n if vec.len() > 0 {\n\n target_file = vec[0].clone();\n\n } else {\n\n return Err(Box::new(QuakeError(format!(\n\n \"cannot find {:} file {:}\",\n\n entry_type, index\n\n ))));\n\n }\n\n\n\n Ok(target_file)\n\n}\n\n\n", "file_path": "quake_core/src/usecases/entry_usecases.rs", "rank": 41, "score": 98179.76517619926 }, { "content": "/// generate entries.csv from by paths\n\npub fn sync_in_path(paths: &EntryPaths) -> Result<(), Box<dyn Error>> {\n\n let (size, content) = Entrysets::generate(&paths.base)?;\n\n\n\n if size > 0 {\n\n fs::write(&paths.entries_csv, content)?;\n\n update_entry_info(&paths.entry_node_info, &mut EntryNodeInfo { index: size });\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "quake_core/src/usecases/entry_usecases.rs", "rank": 42, "score": 98104.14663131715 }, { "content": "/// create really entry file\n\npub fn create_entry_file(\n\n entry_define: &EntryDefine,\n\n target_file: &mut PathBuf,\n\n entry_text: String,\n\n) -> EntryFile {\n\n let mut entry_file = EntryFile::default();\n\n entry_file.set_fields(entry_define.create_default_fields(entry_text));\n\n fs::write(&target_file, entry_file.to_string()).expect(\"cannot write to file\");\n\n entry_file\n\n}\n\n\n", "file_path": "quake_core/src/usecases/entry_usecases.rs", "rank": 43, "score": 98008.10471099979 }, { "content": "fn config_quake(cmd: &Command) -> Result<QuakeConfig, Box<dyn Error>> {\n\n let mut conf = load_config(&cmd.config)?;\n\n\n\n if !cmd.editor.is_empty() {\n\n conf.editor = cmd.editor.clone();\n\n }\n\n\n\n Ok(conf)\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 44, "score": 97581.02114848897 }, { "content": "fn show_entry_detail(expr: &QuakeActionNode, paths: &EntryPaths) -> Result<(), Box<dyn Error>> {\n\n let index = expr.index_from_parameter();\n\n let target_file = find_entry_path(paths.base.clone(), &expr.object, index)?;\n\n info!(\"show file: {:}\", &target_file.display());\n\n let content = fs::read_to_string(target_file)?;\n\n let file = EntryFile::from(content.as_str(), index)?;\n\n\n\n highlight_content(format!(\"{:?}\", file.fields).as_str(), \"json\");\n\n\n\n println!(\"{:}\", file.content);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/cli/entry_action.rs", "rank": 45, "score": 90920.80852083009 }, { "content": "fn feed_data(conf: &QuakeConfig) -> Result<(), Box<dyn Error>> {\n\n let path = PathBuf::from(&conf.workspace);\n\n let temp_file = \"dump.json\";\n\n\n\n for entry in walk_in_path(path) {\n\n let entry = entry.unwrap();\n\n if !entry.path().is_dir() {\n\n continue;\n\n }\n\n\n\n let table = entry.path().join(\"entries.csv\");\n\n if !table.exists() {\n\n continue;\n\n }\n\n\n\n let path_name = format!(\"{:}\", entry.path().file_name().unwrap().to_str().unwrap());\n\n let paths = EntryPaths::init(&conf.workspace, &path_name);\n\n\n\n let map = Entrysets::jsonify(&paths.base)?;\n\n fs::write(temp_file, map)?;\n", "file_path": "src/cli/quake_action.rs", "rank": 46, "score": 88169.90384859627 }, { "content": "fn sync_defines(conf: &QuakeConfig) -> Result<(), Box<dyn Error>> {\n\n let path = PathBuf::from(&conf.workspace);\n\n\n\n let mut define_file = EntryDefines::default();\n\n for entry in walk_in_path(path) {\n\n let entry = entry.unwrap();\n\n if !entry.path().is_dir() {\n\n continue;\n\n }\n\n\n\n let path_name = format!(\"{:}\", entry.path().file_name().unwrap().to_str().unwrap());\n\n if path_name.eq(&conf.server_location) {\n\n continue;\n\n }\n\n\n\n let paths = EntryPaths::init(&conf.workspace, &path_name);\n\n entry_usecases::sync_in_path(&paths)?;\n\n\n\n let csv = entry.path().join(\"entries.csv\");\n\n if csv.exists() {\n", "file_path": "src/cli/quake_action.rs", "rank": 47, "score": 88169.90384859627 }, { "content": "fn async_watcher() -> notify::Result<(RecommendedWatcher, Receiver<notify::Result<Event>>)> {\n\n let (mut tx, rx) = channel(1);\n\n let mut watcher = RecommendedWatcher::new(move |res| {\n\n futures::executor::block_on(async {\n\n tx.send(res).await.unwrap();\n\n })\n\n })?;\n\n\n\n let _ = watcher.configure(notify::Config::OngoingEvents(Some(Duration::from_secs(2))));\n\n\n\n Ok((watcher, rx))\n\n}\n\n\n\n// todo: add type merge for ranges\n\npub async fn async_watch<P: AsRef<Path>>(path: P, search_url: String) -> notify::Result<()> {\n\n debug!(\"start watch: {:?}\", path.as_ref());\n\n let (mut watcher, mut rx) = async_watcher()?;\n\n watcher.watch(path.as_ref(), RecursiveMode::Recursive)?;\n\n\n\n while let Some(res) = rx.next().await {\n", "file_path": "src/helper/entry_watcher.rs", "rank": 48, "score": 86364.14169911318 }, { "content": "pub fn action(expr: QuakeActionNode, conf: QuakeConfig) -> Result<(), Box<dyn Error>> {\n\n match expr.object.as_str() {\n\n \"quake\" => quake_action::quake_action(expr.action, &conf),\n\n \"flow\" => transflow_action::transflow_action(expr.action, &conf),\n\n \"transflow\" => Ok(()),\n\n _ => entry_action::entry_action(&expr, conf),\n\n }\n\n}\n", "file_path": "src/cli/mod.rs", "rank": 49, "score": 82463.05927654183 }, { "content": "export const config: Config = {\n\n namespace: 'dashboard',\n\n buildEs5: true,\n\n outputTargets: [\n\n {\n\n type: 'dist',\n\n esmLoaderPath: '../loader',\n\n },\n\n {\n\n type: 'dist-custom-elements-bundle',\n\n },\n\n {\n\n type: 'docs-readme',\n\n },\n\n {\n\n type: 'www',\n\n serviceWorker: null, // disable service workers\n\n },\n\n ],\n", "file_path": "quake_webapp/dashboard/stencil.config.ts", "rank": 50, "score": 80731.88229531178 }, { "content": "pub fn entry_action(expr: &QuakeActionNode, conf: QuakeConfig) -> Result<(), Box<dyn Error>> {\n\n let paths = EntryPaths::init(&conf.workspace, &expr.object);\n\n\n\n // todo: export api for search\n\n match expr.action.as_str() {\n\n \"add\" => {\n\n let target_file =\n\n entry_usecases::create_entry(&conf.workspace, &expr.object, &expr.text)?.0;\n\n if conf.editor != \"\" {\n\n editor_exec::edit_file(conf.editor, format!(\"{:}\", target_file.display()))?;\n\n }\n\n\n\n entry_usecases::sync_in_path(&paths)?\n\n }\n\n \"edit\" => {\n\n let target_file =\n\n find_entry_path(paths.base, &expr.object, expr.index_from_parameter())?;\n\n\n\n if conf.editor != \"\" {\n\n editor_exec::edit_file(conf.editor, format!(\"{:}\", target_file.display()))?;\n", "file_path": "src/cli/entry_action.rs", "rank": 51, "score": 80477.00164283659 }, { "content": "export const config: Config = {\n\n namespace: 'quake-render',\n\n outputTargets: [\n\n {\n\n type: 'dist',\n\n esmLoaderPath: '../loader',\n\n },\n\n {\n\n type: 'dist-custom-elements-bundle',\n\n },\n\n {\n\n type: 'docs-readme',\n\n },\n\n {\n\n type: 'www',\n\n serviceWorker: null, // disable service workers\n\n },\n\n ],\n", "file_path": "quake_webapp/quake-render/stencil.config.ts", "rank": 52, "score": 80244.58327522277 }, { "content": "let content = fs.readFileSync('index.html');\n", "file_path": "quake_webapp/build.js", "rank": 53, "score": 73756.43706258139 }, { "content": "#[derive(Parser)]\n\n#[clap(version = \"0.0.1\", author = \"Phodal HUANG<h@phodal.com>\")]\n\nstruct Opts {\n\n #[clap(subcommand)]\n\n cmd: ImportCmd,\n\n}\n\n\n", "file_path": "quake_importer/src/main.rs", "rank": 54, "score": 72431.56292258257 }, { "content": "#[derive(Parser)]\n\n#[grammar = \"parser/quake.pest\"]\n\nstruct QuakeParser;\n\n\n", "file_path": "quake_core/src/parser/parser.rs", "rank": 56, "score": 70979.77920236293 }, { "content": "fn setup_log() {\n\n use tracing_subscriber::prelude::*;\n\n let filter_layer = tracing_subscriber::filter::LevelFilter::DEBUG;\n\n let fmt_layer = tracing_subscriber::fmt::layer().with_target(true);\n\n\n\n tracing_subscriber::registry()\n\n .with(filter_layer)\n\n .with(fmt_layer)\n\n .init();\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 57, "score": 67062.83681639035 }, { "content": "fn main() {\n\n let opts: Opts = Opts::parse();\n\n match opts.cmd {\n\n ImportCmd::SQLITE(sqlite) => {\n\n let output = PathBuf::from(sqlite.output);\n\n let path = sqlite.path.as_str();\n\n\n\n match sqlite.inside_type.as_str() {\n\n \"mezzanine\" => {\n\n dump_phodal_com(path, output);\n\n return;\n\n }\n\n \"apple-notes\" => {\n\n dump_apple_notes(path, output);\n\n return;\n\n }\n\n &_ => {}\n\n }\n\n\n\n if sqlite.sql.len() > 0 {\n", "file_path": "quake_importer/src/main.rs", "rank": 58, "score": 66317.34590152517 }, { "content": "fn main() {}\n", "file_path": "quake_gui/src/main.rs", "rank": 59, "score": 66317.34590152517 }, { "content": "fn main() {}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use std::collections::HashMap;\n\n use std::fs;\n\n use std::path::PathBuf;\n\n\n\n use jieba_rs::Jieba;\n\n use walkdir::{DirEntry, WalkDir};\n\n\n\n fn is_markdown(entry: &DirEntry) -> bool {\n\n entry\n\n .file_name()\n\n .to_str()\n\n .map(|s| s.ends_with(\".md\"))\n\n .unwrap_or(false)\n\n }\n\n\n\n fn count_by_path(path: PathBuf) {\n", "file_path": "quake_analysis/src/main.rs", "rank": 60, "score": 66317.34590152517 }, { "content": "module.exports = {\n\n webpack: function(config, env) {\n\n config.optimization.splitChunks = {\n\n cacheGroups: {\n\n default: false\n\n }\n\n };\n\n\n\n config.output = {\n\n ...config.output,\n\n filename: `static/quake-editor.min.js`,\n\n };\n\n\n\n config.optimization.runtimeChunk = false;\n\n return config;\n\n }\n\n}\n", "file_path": "quake_webapp/quake-editor/config-overrides.js", "rank": 61, "score": 64754.047936099436 }, { "content": "module.exports = {\n\n webpack: function(config, env) {\n\n config.optimization.splitChunks = {\n\n cacheGroups: {\n\n default: false\n\n }\n\n };\n\n\n\n config.output = {\n\n ...config.output,\n\n filename: `static/quake-calendar-timeline.min.js`,\n\n };\n\n\n\n config.optimization.runtimeChunk = false;\n\n return config;\n\n }\n\n}\n", "file_path": "quake_webapp/packages/calendar/config-overrides.js", "rank": 62, "score": 64754.047936099436 }, { "content": "/// create entry by `path`, `type`, `text`\n\n/// process:\n\n/// 1. looking for entry define file\n\n/// 2. create entry file\n\n/// 3. update entry node info index\n\n///\n\npub fn create_entry(\n\n quake_path: &String,\n\n entry_type: &String,\n\n entry_text: &String,\n\n) -> Result<(PathBuf, EntryFile), Box<dyn Error>> {\n\n let paths = EntryPaths::init(quake_path, entry_type);\n\n let entries_define = find_entry_define(&paths, entry_type);\n\n let mut entry_info = entry_node_info::entry_info_from_path(&paths.entry_node_info);\n\n\n\n let new_index = entry_info.index + 1;\n\n let index = new_index;\n\n let text = entry_text.as_str();\n\n let new_md_file = EntryFile::file_name(index, text);\n\n let mut target_path = paths.base.join(new_md_file);\n\n File::create(&target_path)?;\n\n\n\n let mut entry_file =\n\n create_entry_file(&entries_define, &mut target_path, entry_text.to_string());\n\n entry_file.id = new_index;\n\n\n\n entry_info.inc();\n\n update_entry_info(&paths.entry_node_info, &mut entry_info);\n\n\n\n Ok((target_path, entry_file))\n\n}\n\n\n", "file_path": "quake_core/src/usecases/entry_usecases.rs", "rank": 63, "score": 61682.84887651456 }, { "content": "pub fn feed_entry(\n\n index_name: &String,\n\n content: &String,\n\n search_url: &String,\n\n) -> Result<(), Box<dyn Error>> {\n\n let url = format!(\"{:}/indexes/{:}/documents\", search_url, index_name);\n\n let cmd_line = format!(\n\n \"curl -i -X POST '{:}' \\\n\n --header 'content-type: application/json' \\\n\n --data-binary {:?}\",\n\n url, content\n\n );\n\n\n\n info!(\"{:?}\", cmd_line);\n\n exec_runner::cmd_runner(cmd_line)?;\n\n\n\n Ok(())\n\n}\n\n\n\n// todo: add sort by date\n", "file_path": "src/helper/exec_wrapper/meili_exec.rs", "rank": 64, "score": 61674.79120280384 }, { "content": "pub fn update_entry_fields(\n\n type_path: PathBuf,\n\n entry_type: &str,\n\n index_id: usize,\n\n update_map: &HashMap<String, String>,\n\n) -> Result<EntryFile, Box<dyn Error>> {\n\n let entry_path = find_entry_path(type_path, &entry_type.to_string(), index_id)?;\n\n let string = fs::read_to_string(&entry_path)?;\n\n let mut entry_file = EntryFile::from(string.as_str(), index_id)?;\n\n\n\n for (key, value) in update_map {\n\n if key != \"content\" {\n\n entry_file.update_field(key, value);\n\n }\n\n }\n\n\n\n if let Some(_val) = entry_file.fields.get(\"updated_date\") {\n\n entry_file.update_field(&\"updated_date\".to_string(), &date_now())\n\n }\n\n\n", "file_path": "quake_core/src/usecases/entry_usecases.rs", "rank": 65, "score": 61050.76531585443 }, { "content": "pub fn quake_rocket() -> Rocket<Build> {\n\n let figment = Figment::from(rocket::Config::default())\n\n .merge(Serialized::defaults(Config::default()))\n\n .merge(Yaml::file(\".quake.yaml\"))\n\n .merge(Env::prefixed(\"APP_\").global())\n\n .select(Profile::from_env_or(\"workspace\", \".\"))\n\n .select(Profile::from_env_or(\"server_location\", \"web\"));\n\n\n\n let server: String = figment.extract_inner(\"server_location\").unwrap();\n\n rocket::custom(figment)\n\n .mount(\"/\", FileServer::from(server))\n\n .mount(\n\n \"/entry\",\n\n routes![\n\n entry_api::get_entries,\n\n entry_api::get_entries_csv,\n\n entry_api::get_entries_from_csv,\n\n entry_api::get_entry,\n\n entry_api::create_entry,\n\n entry_api::update_entry\n", "file_path": "src/server/mod.rs", "rank": 66, "score": 59211.637529207 }, { "content": "fn endway(decl: Pair<Rule>) -> Endway {\n\n let mut endway = Endway::default();\n\n for pair in decl.into_inner() {\n\n match pair.as_rule() {\n\n Rule::parameters => {\n\n endway.from = parameters(pair);\n\n }\n\n Rule::component_decl => {\n\n for name in pair.into_inner() {\n\n match name.as_rule() {\n\n Rule::component_name => endway.component = String::from(name.as_str()),\n\n _ => {\n\n println!(\"{}\", name);\n\n }\n\n }\n\n }\n\n }\n\n Rule::from | Rule::to | Rule::s_quote | Rule::e_quote => {}\n\n _ => {\n\n println!(\"{}\", pair);\n\n }\n\n }\n\n }\n\n\n\n endway\n\n}\n\n\n", "file_path": "quake_core/src/parser/parser.rs", "rank": 67, "score": 56663.529347142525 }, { "content": "pub fn is_hidden(entry: &DirEntry) -> bool {\n\n entry\n\n .file_name()\n\n .to_str()\n\n .map(|s| s.starts_with(\".\"))\n\n .unwrap_or(false)\n\n}\n\n\n", "file_path": "src/cli/quake_action.rs", "rank": 68, "score": 56663.529347142525 }, { "content": "fn midway(decl: Pair<Rule>) -> Midway {\n\n let mut midway = Midway::default();\n\n for pair in decl.into_inner() {\n\n match pair.as_rule() {\n\n Rule::parameters => {\n\n midway.from = parameters(pair);\n\n }\n\n Rule::parameter => {\n\n midway.end = value(pair);\n\n }\n\n Rule::from | Rule::to | Rule::s_quote | Rule::e_quote => {}\n\n _ => {\n\n println!(\"{}\", pair);\n\n }\n\n }\n\n }\n\n midway\n\n}\n\n\n", "file_path": "quake_core/src/parser/parser.rs", "rank": 69, "score": 56663.529347142525 }, { "content": "fn build_transflow(decl: TransflowDecl) -> QuakeTransflowNode {\n\n let mut transflow = QuakeTransflowNode::default();\n\n transflow.routes = decl\n\n .flows\n\n .iter()\n\n .map(|flow_decl| {\n\n let mut route = Route::default();\n\n match flow_decl {\n\n TransflowEnum::Midway(way) => {\n\n route.to = way.end.clone();\n\n for param in &way.from {\n\n route.from.push(param.value.clone())\n\n }\n\n\n\n route.naming();\n\n }\n\n TransflowEnum::Endway(way) => {\n\n route.to = way.component.clone();\n\n route.is_end_way = true;\n\n for param in &way.from {\n", "file_path": "quake_core/src/parser/quake.rs", "rank": 70, "score": 56189.97380641497 }, { "content": "fn transflow_decl(decl: Pair<Rule>) -> TransflowDecl {\n\n let mut action = TransflowDecl::default();\n\n for pair in decl.into_inner() {\n\n match pair.as_rule() {\n\n Rule::transflow_expr => {\n\n if let Some(flow) = transflow_expr(pair) {\n\n action.flows.push(flow);\n\n }\n\n }\n\n _ => {\n\n println!(\"{}\", pair);\n\n }\n\n }\n\n }\n\n action\n\n}\n\n\n", "file_path": "quake_core/src/parser/parser.rs", "rank": 71, "score": 55476.588370423764 }, { "content": "fn action_decl(decl: Pair<Rule>) -> ActionDecl {\n\n let mut action = ActionDecl::new();\n\n for pair in decl.into_inner() {\n\n match pair.as_rule() {\n\n Rule::parameters => {\n\n action.parameters = parameters(pair);\n\n }\n\n Rule::action => {\n\n action.action = String::from(pair.as_str());\n\n }\n\n Rule::object => {\n\n action.object = String::from(pair.as_str());\n\n }\n\n Rule::text => {\n\n action.text = String::from(pair.as_str());\n\n }\n\n _ => {\n\n println!(\"{}\", pair);\n\n }\n\n }\n\n }\n\n\n\n action\n\n}\n\n\n", "file_path": "quake_core/src/parser/parser.rs", "rank": 72, "score": 55476.588370423764 }, { "content": "fn tag_to_owned<'a>(tag: Tag) -> Tag<'a> {\n\n match tag {\n\n Tag::Paragraph => Tag::Paragraph,\n\n Tag::Heading(level) => Tag::Heading(level),\n\n Tag::BlockQuote => Tag::BlockQuote,\n\n Tag::CodeBlock(codeblock_kind) => Tag::CodeBlock(codeblock_kind_to_owned(codeblock_kind)),\n\n Tag::List(optional) => Tag::List(optional),\n\n Tag::Item => Tag::Item,\n\n Tag::FootnoteDefinition(cow_str) => {\n\n Tag::FootnoteDefinition(CowStr::from(cow_str.into_string()))\n\n }\n\n Tag::Table(alignment_vector) => Tag::Table(alignment_vector),\n\n Tag::TableHead => Tag::TableHead,\n\n Tag::TableRow => Tag::TableRow,\n\n Tag::TableCell => Tag::TableCell,\n\n Tag::Emphasis => Tag::Emphasis,\n\n Tag::Strong => Tag::Strong,\n\n Tag::Strikethrough => Tag::Strikethrough,\n\n Tag::Link(link_type, cow_str1, cow_str2) => Tag::Link(\n\n link_type,\n", "file_path": "quake_core/src/markdown/md_processor.rs", "rank": 73, "score": 54924.9072489041 }, { "content": "fn event_to_owned<'a>(event: Event) -> Event<'a> {\n\n match event {\n\n Event::Start(tag) => Event::Start(tag_to_owned(tag)),\n\n Event::End(tag) => Event::End(tag_to_owned(tag)),\n\n Event::Text(cowstr) => Event::Text(CowStr::from(cowstr.into_string())),\n\n Event::Code(cowstr) => Event::Code(CowStr::from(cowstr.into_string())),\n\n Event::Html(cowstr) => Event::Html(CowStr::from(cowstr.into_string())),\n\n Event::FootnoteReference(cowstr) => {\n\n Event::FootnoteReference(CowStr::from(cowstr.into_string()))\n\n }\n\n Event::SoftBreak => Event::SoftBreak,\n\n Event::HardBreak => Event::HardBreak,\n\n Event::Rule => Event::Rule,\n\n Event::TaskListMarker(checked) => Event::TaskListMarker(checked),\n\n }\n\n}\n\n\n", "file_path": "quake_core/src/markdown/md_processor.rs", "rank": 74, "score": 54924.9072489041 }, { "content": "fn parameters(decl: Pair<Rule>) -> Vec<Parameter> {\n\n let mut params = vec![];\n\n for pair in decl.into_inner() {\n\n match pair.as_rule() {\n\n Rule::parameter => {\n\n let mut param = Parameter::default();\n\n param.value = value(pair);\n\n\n\n params.push(param)\n\n }\n\n Rule::s_quote => {}\n\n Rule::e_quote => {}\n\n _ => {\n\n println!(\"{}\", pair);\n\n }\n\n }\n\n }\n\n\n\n params\n\n}\n\n\n", "file_path": "quake_core/src/parser/parser.rs", "rank": 75, "score": 54924.9072489041 }, { "content": "pub fn tui_main_loop() -> Result<(), Box<dyn Error>> {\n\n enable_raw_mode()?;\n\n let mut stdout = io::stdout();\n\n execute!(stdout, EnterAlternateScreen, EnableMouseCapture)?;\n\n let backend = CrosstermBackend::new(stdout);\n\n let mut terminal = Terminal::new(backend)?;\n\n\n\n let app = App::new();\n\n let res = run_app(&mut terminal, app);\n\n\n\n disable_raw_mode()?;\n\n execute!(\n\n terminal.backend_mut(),\n\n LeaveAlternateScreen,\n\n DisableMouseCapture\n\n )?;\n\n terminal.show_cursor()?;\n\n\n\n if let Err(err) = res {\n\n println!(\"{:?}\", err);\n\n }\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "quake_tui/src/lib.rs", "rank": 76, "score": 54341.28390043267 }, { "content": "fn transflow_expr(decl: Pair<Rule>) -> Option<TransflowEnum> {\n\n for pair in decl.into_inner() {\n\n match pair.as_rule() {\n\n Rule::midway => return Some(TransflowEnum::Midway(midway(pair))),\n\n Rule::endway => return Some(TransflowEnum::Endway(endway(pair))),\n\n _ => {\n\n println!(\"{}\", pair);\n\n }\n\n }\n\n }\n\n\n\n None\n\n}\n\n\n", "file_path": "quake_core/src/parser/parser.rs", "rank": 77, "score": 53776.40598892349 }, { "content": "fn codeblock_kind_to_owned<'a>(codeblock_kind: CodeBlockKind) -> CodeBlockKind<'a> {\n\n match codeblock_kind {\n\n CodeBlockKind::Indented => CodeBlockKind::Indented,\n\n CodeBlockKind::Fenced(cow_str) => {\n\n CodeBlockKind::Fenced(CowStr::from(cow_str.into_string()))\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::markdown::md_processor::MdProcessor;\n\n use std::fs;\n\n use std::path::PathBuf;\n\n\n\n #[ignore]\n\n #[test]\n\n fn markdown_test() {\n\n let base = PathBuf::from(\"_fixtures\").join(\"md\");\n\n let origin = base.join(\"origin.md\");\n", "file_path": "quake_core/src/markdown/md_processor.rs", "rank": 78, "score": 51687.37640399167 }, { "content": "fn list_entry_types() -> Result<Vec<ListItem<'static>>, Box<dyn Error>> {\n\n let config: QuakeConfig = serde_yaml::from_str(fs::read_to_string(\".quake.yaml\")?.as_str())?;\n\n let entry_defines_path = Path::new(&config.workspace).join(\"entries-define.yaml\");\n\n let entry_defines: EntryDefines =\n\n serde_yaml::from_str(&fs::read_to_string(entry_defines_path)?)?;\n\n\n\n Ok(entry_defines\n\n .entries\n\n .iter()\n\n .map(|define| {\n\n let entry_type = Spans::from(vec![Span::styled(\n\n define.entry_type.clone(),\n\n Style::default().fg(Color::Yellow),\n\n )]);\n\n\n\n ListItem::new(vec![entry_type])\n\n })\n\n .collect())\n\n}\n", "file_path": "quake_tui/src/ui.rs", "rank": 79, "score": 50771.37379888367 }, { "content": "/// load from `.quake`\n\n#[derive(Debug, Serialize, Deserialize)]\n\npub struct QuakeConfig {\n\n // set default editor\n\n pub editor: String,\n\n pub workspace: String,\n\n pub search_url: String,\n\n pub server_location: String,\n\n}\n\n\n\nimpl Default for QuakeConfig {\n\n fn default() -> Self {\n\n QuakeConfig {\n\n editor: \"\".to_string(),\n\n workspace: \"\".to_string(),\n\n search_url: \"\".to_string(),\n\n server_location: \"\".to_string(),\n\n }\n\n }\n\n}\n", "file_path": "quake_core/src/quake_config.rs", "rank": 80, "score": 40643.76366409962 }, { "content": "use std::fs;\n\nuse std::path::PathBuf;\n\n\n\nuse quake_microsoft_todo::tasks::{TodoTask, WellknownListName};\n\n\n\nuse crate::sqlite_to_file::simple_escape;\n\nuse quake_core::entry::entry_file::EntryFile;\n\n\n\n#[derive(Serialize, Deserialize, Debug)]\n\n#[serde(rename_all = \"camelCase\")]\n\npub struct OutputList {\n\n pub display_name: String,\n\n pub id: String,\n\n pub wellknown_list_name: WellknownListName,\n\n pub children: Vec<TodoTask>,\n\n}\n\n\n", "file_path": "quake_importer/src/todo_to_file.rs", "rank": 81, "score": 40378.99782056152 }, { "content": " None => \"\".to_string(),\n\n Some(dat) => dat.date_time,\n\n };\n\n file.add_field(\"completed_date\", format!(\"{:?}\", completed_date));\n\n\n\n let due_date = match todo.due_date_time {\n\n None => \"\".to_string(),\n\n Some(dat) => dat.date_time,\n\n };\n\n file.add_field(\"due_date\", format!(\"{:?}\", due_date));\n\n\n\n file.add_field(\"importance\", format!(\"{:?}\", todo.importance));\n\n file.add_field(\"status\", format!(\"{:?}\", todo.status));\n\n\n\n file.name = EntryFile::file_name(index, title.as_str());\n\n\n\n file.content = \"\\n\\n\".to_string();\n\n file.content.push_str(todo.body.content.as_str());\n\n\n\n match fs::write(path.join(file.name.clone()), file.to_string()) {\n", "file_path": "quake_importer/src/todo_to_file.rs", "rank": 82, "score": 40376.994178815294 }, { "content": " }\n\n\n\n file.add_field(name.as_str(), simple_escape(value));\n\n }\n\n }\n\n\n\n file.name = EntryFile::file_name(id, title.as_str());\n\n\n\n match fs::write(path.join(file.name.clone()), file.to_string()) {\n\n Ok(_) => {}\n\n Err(err) => {\n\n println!(\"{:?}\", file.name.clone());\n\n println!(\"{:?}\", err);\n\n }\n\n }\n\n}\n\n\n", "file_path": "quake_importer/src/sqlite_to_file.rs", "rank": 83, "score": 40375.402943440866 }, { "content": "use std::error::Error;\n\nuse std::fs;\n\nuse std::path::PathBuf;\n\n\n\nuse rusqlite::types::ValueRef;\n\nuse rusqlite::{Connection, Row};\n\n\n\nuse quake_core::entry::entry_file::EntryFile;\n\n\n", "file_path": "quake_importer/src/sqlite_to_file.rs", "rank": 84, "score": 40371.19066237591 }, { "content": " Ok(_) => {}\n\n Err(err) => {\n\n println!(\"{:?}\", file.name.clone());\n\n println!(\"{:?}\", err);\n\n }\n\n }\n\n\n\n index = index + 1\n\n }\n\n }\n\n}\n", "file_path": "quake_importer/src/todo_to_file.rs", "rank": 85, "score": 40361.808402742754 }, { "content": "use std::fs;\n\nuse std::path::PathBuf;\n\n\n\npub struct EntryPaths {\n\n pub base: PathBuf,\n\n pub entry_node_info: PathBuf,\n\n pub entries_define: PathBuf,\n\n pub entries_csv: PathBuf,\n\n pub transflows: PathBuf,\n\n}\n\n\n\nimpl EntryPaths {\n\n pub fn init(path: &String, object: &String) -> EntryPaths {\n\n let path = PathBuf::from(path);\n\n\n\n let obj_dir = &path.join(object);\n\n let _ = fs::create_dir(obj_dir);\n\n\n\n EntryPaths {\n\n base: PathBuf::from(&obj_dir),\n", "file_path": "quake_core/src/entry/entry_paths.rs", "rank": 86, "score": 39828.2962718395 }, { "content": " entries_csv: PathBuf::from(&obj_dir.join(\"entries.csv\")),\n\n entry_node_info: PathBuf::from(&obj_dir.join(\"entry-node-info.yaml\")),\n\n entries_define: PathBuf::from(&path.join(\"entries-define.yaml\")),\n\n transflows: PathBuf::from(&path.join(\"transflows.yaml\")),\n\n }\n\n }\n\n\n\n pub fn entries_csv() -> &'static str {\n\n \"entries.csv\"\n\n }\n\n\n\n pub fn transflows_yaml() -> &'static str {\n\n \"transflows.yaml\"\n\n }\n\n\n\n pub fn transfuncs() -> &'static str {\n\n \"transfuncs.js\"\n\n }\n\n}\n", "file_path": "quake_core/src/entry/entry_paths.rs", "rank": 87, "score": 39825.0648353126 }, { "content": "use std::error::Error;\n\nuse std::path::PathBuf;\n\n\n\nuse indexmap::IndexMap;\n\nuse serde::ser::SerializeMap;\n\nuse serde::{Deserialize, Serialize, Serializer};\n\nuse serde_yaml::Value;\n\n\n\nuse crate::entry::slug::slugify;\n\nuse crate::errors::QuakeError;\n\n\n\n#[derive(Deserialize, PartialEq, Debug)]\n\npub struct EntryFile {\n\n pub id: usize,\n\n pub path: PathBuf,\n\n pub name: String,\n\n pub fields: IndexMap<String, String>,\n\n pub content: String,\n\n}\n\n\n", "file_path": "quake_core/src/entry/entry_file.rs", "rank": 88, "score": 39654.13612879585 }, { "content": " path: Default::default(),\n\n name: \"\".to_string(),\n\n fields: IndexMap::default(),\n\n content: \"\".to_string(),\n\n }\n\n }\n\n}\n\n\n\nimpl ToString for EntryFile {\n\n fn to_string(&self) -> String {\n\n let mut output = vec![];\n\n output.push(\"---\".to_string());\n\n for (key, value) in &self.fields {\n\n if !key.eq(\"content\") {\n\n output.push(format!(\"{}: {}\", key, value));\n\n }\n\n }\n\n output.push(\"---\".to_string());\n\n\n\n let mut str = output.join(\"\\n\");\n", "file_path": "quake_core/src/entry/entry_file.rs", "rank": 89, "score": 39649.59552337719 }, { "content": " let index: usize = index_str.parse()?;\n\n Ok(index)\n\n }\n\n\n\n pub fn from(text: &str, index_id: usize) -> Result<EntryFile, Box<dyn Error>> {\n\n if !text.starts_with(\"---\") {\n\n return Ok(EntryFile::default());\n\n }\n\n\n\n let (front_matter, content) = Self::split_markdown(text);\n\n\n\n let mut fields: IndexMap<String, String> = IndexMap::new();\n\n for document in serde_yaml::Deserializer::from_str(&front_matter) {\n\n let value = match Value::deserialize(document) {\n\n Ok(value) => Ok(value),\n\n Err(err) => {\n\n println!(\"{}\", front_matter);\n\n println!(\"{:?}\", err);\n\n Err(err)\n\n }\n", "file_path": "quake_core/src/entry/entry_file.rs", "rank": 90, "score": 39647.57269424781 }, { "content": "impl Serialize for EntryFile {\n\n fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n\n {\n\n let mut map = serializer.serialize_map(Some(self.fields.len()))?;\n\n for (k, v) in &self.fields {\n\n map.serialize_entry(&k.to_string(), &v)?;\n\n }\n\n\n\n map.serialize_entry(\"id\", &self.id)?;\n\n map.serialize_entry(\"content\", &self.content)?;\n\n map.end()\n\n }\n\n}\n\n\n\nimpl Default for EntryFile {\n\n fn default() -> Self {\n\n EntryFile {\n\n id: 1,\n", "file_path": "quake_core/src/entry/entry_file.rs", "rank": 91, "score": 39646.84781134914 }, { "content": " }?;\n\n if let Value::Mapping(mapping) = value {\n\n for (v_key, v_value) in mapping {\n\n let key = ValueConverter::string(v_key);\n\n let value = ValueConverter::string(v_value);\n\n fields.insert(key, value);\n\n }\n\n }\n\n }\n\n\n\n Ok(EntryFile {\n\n id: index_id,\n\n path: Default::default(),\n\n name: \"\".to_string(),\n\n fields: fields,\n\n content: String::from(content),\n\n })\n\n }\n\n\n\n fn split_markdown(text: &str) -> (String, String) {\n", "file_path": "quake_core/src/entry/entry_file.rs", "rank": 92, "score": 39645.337836524996 }, { "content": " }\n\n\n\n pub fn update_field(&mut self, field: &String, value: &String) {\n\n match self.fields.get_mut(field) {\n\n None => {}\n\n Some(val) => {\n\n *val = value.to_string();\n\n }\n\n };\n\n }\n\n\n\n pub fn update_content(&mut self, content: &String) {\n\n if content.starts_with(\"\\n\") || content.starts_with(\"\\r\\n\") {\n\n self.content = content.to_string();\n\n return;\n\n }\n\n\n\n self.content = \"\\n\\n\".to_string();\n\n self.content.push_str(content);\n\n }\n", "file_path": "quake_core/src/entry/entry_file.rs", "rank": 93, "score": 39645.21653183594 }, { "content": "\n\n #[test]\n\n fn to_json() {\n\n let entry_file = EntryFile::from(demo_text().as_str(), 1).unwrap();\n\n assert_eq!(\n\n r#\"{\"title\":\"hello, world\",\"authors\":\"Phodal HUANG<h@phodal.com>\",\"description\":\"a hello, world\",\"created_date\":\"2021.11.23\",\"updated_date\":\"2021.11.21\",\"id\":1,\"content\":\"\\n\\nsample\\n\\n\"}\"#,\n\n serde_json::to_string(&entry_file).unwrap()\n\n );\n\n }\n\n\n\n fn demo_text() -> String {\n\n let text = \"---\n\ntitle: hello, world\n\nauthors: Phodal HUANG<h@phodal.com>\n\ndescription: a hello, world\n\ncreated_date: 2021.11.23\n", "file_path": "quake_core/src/entry/entry_file.rs", "rank": 94, "score": 39644.93398183258 }, { "content": " str.push_str(&*self.content);\n\n\n\n str\n\n }\n\n}\n\n\n\nimpl EntryFile {\n\n pub fn file_prefix(index: usize) -> String {\n\n format!(\"{:0>4}\", index)\n\n }\n\n\n\n pub fn file_name(index: usize, text: &str) -> String {\n\n format!(\"{:0>4}-{:}.md\", index, slugify(text))\n\n }\n\n\n\n pub fn id_from_name(file_name: &str) -> Result<usize, Box<dyn Error>> {\n\n if file_name.len() < 4 {\n\n return Err(Box::new(QuakeError(\"length < 4\".to_string())));\n\n }\n\n let index_str = &file_name[0..4];\n", "file_path": "quake_core/src/entry/entry_file.rs", "rank": 95, "score": 39642.113826398316 }, { "content": " }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use crate::entry::entry_file::EntryFile;\n\n\n\n #[test]\n\n fn parse_id_from_name() {\n\n let id = EntryFile::id_from_name(\"0001-demo.md\").unwrap();\n\n assert_eq!(id, 1);\n\n\n\n let msg = EntryFile::id_from_name(\"000\").expect_err(\"\");\n\n assert_eq!(\"QuakeError(\\\"length < 4\\\")\", format!(\"{:?}\", msg));\n\n\n\n let msg = EntryFile::id_from_name(\"demo.md\").expect_err(\"\");\n\n assert_eq!(\"ParseIntError { kind: InvalidDigit }\", format!(\"{:?}\", msg));\n\n }\n\n\n\n #[test]\n\n fn entry_parse() {\n\n let text = \"---\n\ntitle: hello, world\n\nauthors: Phodal HUANG<h@phodal.com>\n\ndescription: a hello, world\n\ncreated_date: 2021.11.23\n", "file_path": "quake_core/src/entry/entry_file.rs", "rank": 96, "score": 39641.41008205521 }, { "content": " (header, column)\n\n }\n\n\n\n pub fn insert_id(&mut self, value: usize) {\n\n self.fields.insert(\"id\".to_string(), value.to_string());\n\n }\n\n\n\n pub fn field(&self, field: &str) -> Option<String> {\n\n match self.fields.get(field) {\n\n None => None,\n\n Some(err) => return Some(err.to_string()),\n\n }\n\n }\n\n\n\n pub fn add_field(&mut self, key: &str, value: String) {\n\n self.fields.insert(key.to_string(), value);\n\n }\n\n\n\n pub fn set_fields(&mut self, fields: IndexMap<String, String>) {\n\n self.fields = fields;\n", "file_path": "quake_core/src/entry/entry_file.rs", "rank": 97, "score": 39640.41627001833 }, { "content": " }\n\n\n\n let front_matter = split_data.join(\"\\n\");\n\n others.push(\"\");\n\n let content = others.join(\"\\n\");\n\n (front_matter, content)\n\n }\n\n\n\n pub fn header_column(self, index: usize) -> (Vec<String>, Vec<String>) {\n\n let mut header: Vec<String> = vec![];\n\n let mut column: Vec<String> = vec![];\n\n column.push(index.to_string());\n\n\n\n for (key, value) in self.fields {\n\n if !key.eq(\"content\") {\n\n header.push(key);\n\n column.push(value);\n\n }\n\n }\n\n\n", "file_path": "quake_core/src/entry/entry_file.rs", "rank": 98, "score": 39640.36652653621 }, { "content": "use std::path::PathBuf;\n\n\n\nuse walkdir::{DirEntry, WalkDir};\n\n\n", "file_path": "quake_core/src/helper/file_filter.rs", "rank": 99, "score": 39636.67126118141 } ]
Rust
api/src/lib.rs
fdeantoni/ph-quakes
75d888276a091335436c9435f22d9c81f3870ad3
use serde_derive::*; pub use chrono::prelude::*; pub mod time { pub use ::chrono::Duration; } pub use geojson::{FeatureCollection, Feature, GeoJson, Geometry, Value}; use serde_json::{Map, to_value}; #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Quake { datetime: DateTime<Utc>, longitude: f64, latitude: f64, magnitude: f64, depth: u16, location: String, province: String, url: String, source: String, } fn format_f64(coord: &f64) -> String { format!("{:.6}", coord) } impl PartialEq for Quake { fn eq(&self, other: &Self) -> bool { let lng = format_f64(&self.longitude); let lat = format_f64(&self.latitude); let mag = format_f64(&self.magnitude); self.datetime.eq(&other.datetime) && lng.eq(&format_f64(&other.longitude)) && lat.eq(&format_f64(&other.latitude)) && mag.eq(&format_f64(&other.magnitude)) && self.depth.eq(&other.depth) } } impl Eq for Quake {} impl Hash for Quake { fn hash<H: Hasher>(&self, state: &mut H) { let lng = format_f64(&self.longitude); let lat = format_f64(&self.latitude); let mag = format_f64(&self.magnitude); self.datetime.hash(state); lng.hash(state); lat.hash(state); mag.hash(state); self.depth.hash(state); } } impl Ord for Quake { fn cmp(&self, other: &Self) -> Ordering { self.datetime.cmp(&other.datetime) } } impl PartialOrd for Quake { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } } impl Quake { pub fn get_datetime(&self) -> DateTime<Utc> { self.datetime.clone() } pub fn get_longitude(&self) -> f64 { self.longitude } pub fn get_latitude(&self) -> f64 { self.latitude } pub fn get_magnitude(&self) -> f64 { self.magnitude } pub fn get_depth(&self) -> u16 { self.depth } pub fn get_location(&self) -> String { self.location.clone() } pub fn get_province(&self) -> String { self.province.clone() } pub fn get_url(&self) -> String { self.url.clone() } pub fn get_source(&self) -> String { self.source.clone() } pub fn new(datetime: DateTime<Utc>, longitude: f64, latitude: f64, magnitude: f64, depth: u16, location: String, province: String, url: String, source: String) -> Quake { Quake { datetime, longitude, latitude, magnitude, depth, location, province, url, source, } } pub fn to_geojson_feature(&self) -> Feature { let geometry = Geometry::new( Value::Point(vec![self.longitude, self.latitude]) ); let mut properties = Map::new(); properties.insert( String::from("datetime"), to_value(format!("{:?}", self.datetime)).unwrap(), ); properties.insert( String::from("start"), to_value(format!("{:?}", self.datetime)).unwrap(), ); properties.insert( String::from("end"), to_value(format!("{:?}", self.datetime + chrono::Duration::days(1))).unwrap(), ); properties.insert( String::from("longitude"), to_value(self.longitude).unwrap(), ); properties.insert( String::from("latitude"), to_value(self.latitude).unwrap(), ); properties.insert( String::from("magnitude"), to_value(self.magnitude).unwrap(), ); properties.insert( String::from("depth"), to_value(self.depth).unwrap(), ); properties.insert( String::from("location"), to_value(self.location.clone()).unwrap(), ); properties.insert( String::from("province"), to_value(self.province.clone()).unwrap(), ); properties.insert( String::from("url"), to_value(self.url.clone()).unwrap(), ); properties.insert( String::from("source"), to_value(self.source.clone()).unwrap(), ); Feature { bbox: None, geometry: Some(geometry), id: None, properties: Some(properties), foreign_members: None, } } pub fn find_province(text: String) -> (String, String) { match text.rfind("(") { Some(pos) => { let len = text.len(); let province = &text[pos + 1..len - 1]; let location = &text[0..pos - 1]; (location.to_string(), province.to_string()) } None => { let location = text.clone(); let mut province = ""; if let Some(pos) = text.rfind("of ") { province = &text[pos + 3..text.len()] } (location, province.to_string()) } } } } #[derive(Debug, Clone)] pub struct QuakeList(Box<[Quake]>); impl QuakeList { pub fn list(&self) -> Box<[Quake]> { self.0.clone() } pub fn new(vec: Vec<Quake>) -> QuakeList { QuakeList(vec.into_boxed_slice()) } pub fn to_geojson(&self) -> GeoJson { let bbox = None; let foreign_members = None; let features: Vec<Feature> = self.0.iter().map(|quake| quake.to_geojson_feature()).collect(); GeoJson::FeatureCollection(FeatureCollection { bbox, features, foreign_members, } ) } } use std::borrow::Cow; use std::hash::{Hash, Hasher}; use std::cmp::Ordering; #[derive(Clone, Debug, PartialEq, Eq)] pub struct QuakeError { description: Cow<'static, str>, } impl QuakeError { pub fn new<S>(description: S) -> Self where S: Into<Cow<'static, str>>, { QuakeError { description: description.into(), } } } impl std::error::Error for QuakeError { fn description(&self) -> &str { &self.description } } impl std::fmt::Display for QuakeError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.write_str("Quake error: ")?; f.write_str(&self.description) } } impl From<std::io::Error> for QuakeError { fn from(error: std::io::Error) -> Self { QuakeError::new(format!( "IO error occurred! {}", error.to_string() )) } } #[cfg(test)] mod tests { use super::*; use std::collections::HashSet; fn test_quake() -> Quake { let datetime = Utc::now(); let longitude: f64 = 1.0; let latitude: f64 = 0.0; let magnitude: f64 = 2.4; let depth: u16 = 134; let location = "Some location".to_string(); let province = "Some province".to_string(); let url = "http://example.com".to_string(); let source = "Some source".to_string(); Quake::new(datetime, longitude, latitude, magnitude, depth, location, province, url, source) } #[test] fn geojson_conversion() { let quake = test_quake(); let feature = quake.to_geojson_feature(); let geojson = GeoJson::Feature(feature); println!("{}", geojson.to_string()); } #[test] fn compare_quakes() { let one = test_quake(); let mut two = one.clone(); two.url = "https://some.other.url".to_string(); assert_eq!(one, two); } #[test] fn compare_quakes_set() { let one = test_quake(); let mut two = one.clone(); two.url = "https://some.other.url".to_string(); let mut three = test_quake(); three.depth = 100; let one_vec = vec![one.clone()]; let two_vec = vec![two.clone(), three.clone()]; let mut set: HashSet<Quake> = HashSet::new(); set.extend(one_vec); set.extend(two_vec); let mut sorted = set.clone().into_iter().collect::<Vec<Quake>>(); sorted.sort(); println!("{:?}", &set); assert_eq!(sorted[0], one); assert_eq!(sorted[1], three); } #[actix_rt::test] async fn retrieve_philvolcs_quakes() { let quake = test_quake(); let list = QuakeList::new(vec![quake]); let geojson = list.to_geojson(); println!("{}", geojson.to_string()); } }
use serde_derive::*; pub use chrono::prelude::*; pub mod time { pub use ::chrono::Duration; } pub use geojson::{FeatureCollection, Feature, GeoJson, Geometry, Value}; use serde_json::{Map, to_value}; #[derive(Debug, Clone, Serialize, Deserialize)] pub struct Quake { datetime: DateTime<Utc>, longitude: f64, latitude: f64, magnitude: f64, depth: u16, location: String, province: String, url: String, source: String, } fn format_f64(coord: &f64) -> String { format!("{:.6}", coord) } impl PartialEq for Quake { fn eq(&self, other: &Self) -> bool { let lng = format_f64(&self.longitude); let lat = format_f64(&self.latitude); let mag = format_f64(&self.magnitude); self.datetime.eq(&other.datetime) && lng.eq(&format_f64(&other.longitude)) && lat.eq(&format_f64(&other.latitude)) && mag.eq(&format_f64(&other.magnitude)) && self.depth.eq(&other.depth) } } impl Eq for Quake {} impl Hash for Quake { fn hash<H: Hasher>(&self, state: &mut H) { let lng = format_f64(&self.longitude); let lat = format_f64(&self.latitude); let mag = format_f64(&self.magnitude); self.datetime.hash(state); lng.hash(state); lat.hash(state); mag.hash(state); self.depth.hash(state); } } impl Ord for Quake { fn cmp(&self, other: &Self) -> Ordering { self.datetime.cmp(&other.datetime) } } impl PartialOrd for Quake { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } } impl Quake { pub fn get_datetime(&self) -> DateTime<Utc> { self.datetime.clone() } pub fn get_longitude(&self) -> f64 { self.longitude } pub fn get_latitude(&self) -> f64 { self.latitude } pub fn get_magnitude(&self) -> f64 { self.magnitude } pub fn get_depth(&self) -> u16 { self.depth } pub fn get_location(&self) -> String { self.location.clone() } pub fn get_province(&self) -> String { self.province.clone() } pub fn get_url(&self) -> String { self.url.clone() } pub fn get_source(&self) -> String { self.source.clone() } pub fn new(datetime: DateTime<Utc>, longitude: f64, latitude: f64, magnitude: f64, depth: u16, location: String, province: String, url: String, source: String) -> Quake { Quake { datetime, longitude, latitude, magnitude, depth, location, province, url, source, } } pub fn to_geojson_feature(&self) -> Feature { let geometry = Geometry::new( Value::Point(vec![self.longitude, self.latitude]) ); let mut properties = Map::new(); properties.insert( String::from("datetime"), to_value(format!("{:?}", self.datetime)).unwrap(), ); properties.insert( String::from("start"), to_value(format!("{:?}", self.datetime)).unwrap(), ); properties.insert( String::from("end"), to_value(format!("{:?}", self.datetime + chrono::Duration::days(1))).unwrap(), ); properties.insert( String::from("longitude"), to_value(self.longitude).unwrap(), ); properties.insert( String::from("latitude"), to_value(self.latitude).unwrap(), ); properties.insert( String::from("magnitude"), to_value(self.magnitude).unwrap(), ); properties.insert( String::from("depth"), to_value(self.depth).unwrap(), ); properties.insert( String::from("location"), to_value(self.location.clone()).unwrap(), ); properties.insert( String::from("province"), to_value(self.province.clone()).unwrap(), ); properties.insert( String::from("url"), to_value(self.url.clone()).unwrap(), ); properties.insert( String::from("source"), to_value(self.source.clone()).unwrap(), ); Feature { bbox: None, geometry: Some(geometry), id: None, properties: Some(properties), foreign_members: None, } } pub fn find_province(text: String) -> (String, String) { match text.rfind("(") { Some(pos) => { let len = text.len(); let province = &text[pos + 1..len - 1]; let location = &text[
} #[derive(Debug, Clone)] pub struct QuakeList(Box<[Quake]>); impl QuakeList { pub fn list(&self) -> Box<[Quake]> { self.0.clone() } pub fn new(vec: Vec<Quake>) -> QuakeList { QuakeList(vec.into_boxed_slice()) } pub fn to_geojson(&self) -> GeoJson { let bbox = None; let foreign_members = None; let features: Vec<Feature> = self.0.iter().map(|quake| quake.to_geojson_feature()).collect(); GeoJson::FeatureCollection(FeatureCollection { bbox, features, foreign_members, } ) } } use std::borrow::Cow; use std::hash::{Hash, Hasher}; use std::cmp::Ordering; #[derive(Clone, Debug, PartialEq, Eq)] pub struct QuakeError { description: Cow<'static, str>, } impl QuakeError { pub fn new<S>(description: S) -> Self where S: Into<Cow<'static, str>>, { QuakeError { description: description.into(), } } } impl std::error::Error for QuakeError { fn description(&self) -> &str { &self.description } } impl std::fmt::Display for QuakeError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.write_str("Quake error: ")?; f.write_str(&self.description) } } impl From<std::io::Error> for QuakeError { fn from(error: std::io::Error) -> Self { QuakeError::new(format!( "IO error occurred! {}", error.to_string() )) } } #[cfg(test)] mod tests { use super::*; use std::collections::HashSet; fn test_quake() -> Quake { let datetime = Utc::now(); let longitude: f64 = 1.0; let latitude: f64 = 0.0; let magnitude: f64 = 2.4; let depth: u16 = 134; let location = "Some location".to_string(); let province = "Some province".to_string(); let url = "http://example.com".to_string(); let source = "Some source".to_string(); Quake::new(datetime, longitude, latitude, magnitude, depth, location, province, url, source) } #[test] fn geojson_conversion() { let quake = test_quake(); let feature = quake.to_geojson_feature(); let geojson = GeoJson::Feature(feature); println!("{}", geojson.to_string()); } #[test] fn compare_quakes() { let one = test_quake(); let mut two = one.clone(); two.url = "https://some.other.url".to_string(); assert_eq!(one, two); } #[test] fn compare_quakes_set() { let one = test_quake(); let mut two = one.clone(); two.url = "https://some.other.url".to_string(); let mut three = test_quake(); three.depth = 100; let one_vec = vec![one.clone()]; let two_vec = vec![two.clone(), three.clone()]; let mut set: HashSet<Quake> = HashSet::new(); set.extend(one_vec); set.extend(two_vec); let mut sorted = set.clone().into_iter().collect::<Vec<Quake>>(); sorted.sort(); println!("{:?}", &set); assert_eq!(sorted[0], one); assert_eq!(sorted[1], three); } #[actix_rt::test] async fn retrieve_philvolcs_quakes() { let quake = test_quake(); let list = QuakeList::new(vec![quake]); let geojson = list.to_geojson(); println!("{}", geojson.to_string()); } }
0..pos - 1]; (location.to_string(), province.to_string()) } None => { let location = text.clone(); let mut province = ""; if let Some(pos) = text.rfind("of ") { province = &text[pos + 3..text.len()] } (location, province.to_string()) } } }
function_block-function_prefixed
[ { "content": "type Row = HashMap<String, String>;\n\n\n\npub struct HtmlParser(Vec<Row>, String);\n\n\n\nimpl HtmlParser {\n\n\n\n pub async fn parse(html: String, source_url: String) -> HtmlParser {\n\n let mut collection: Vec<Row> = Vec::new();\n\n let expected_headers: HashSet<String> = [\n\n String::from(\"Date - Time\"),\n\n String::from(\"Latitude\"),\n\n String::from(\"Longitude\"),\n\n String::from(\"Depth\"),\n\n String::from(\"Mag\"),\n\n String::from(\"Location\")\n\n ].iter().cloned().collect();\n\n\n\n let document = Html::parse_document(&html);\n\n let table_selector = Selector::parse(\"table\").unwrap();\n\n let th_selector = Selector::parse(\"th p\").unwrap();\n", "file_path": "scraper/src/parser.rs", "rank": 1, "score": 74831.70008699964 }, { "content": "fn dummy_quakes() -> Vec<Quake> {\n\n vec![\n\n Quake::new(Utc::now(), 125.71, 9.15, 4.0, 1, \"TEST\".to_string(), \"TEST\".to_string(), \"https://example.com\".to_string(), \"https://twitter.com/phivolcs_dost/status/1226620233625948160\".to_string()),\n\n ]\n\n}\n\n\n\n#[actix_rt::main]\n\nasync fn main() -> std::io::Result<()> {\n\n\n\n dotenv().ok();\n\n\n\n if std::env::var(\"RUST_LOG\").is_err() {\n\n std::env::set_var(\"RUST_LOG\", \"actix_server=info,actix_web=info,quakes_server=info\");\n\n }\n\n env_logger::init();\n\n\n\n let is_test = std::env::var(\"TEST\").is_ok();\n\n if is_test {\n\n info!(\"Using dummy quakes for updates!\");\n\n } else {\n", "file_path": "server/src/main.rs", "rank": 2, "score": 69301.86833200928 }, { "content": "let geojson = {\n\n features: [],\n\n type: \"FeatureCollection\"\n", "file_path": "server/client/app/js/main.js", "rank": 3, "score": 67674.5736758512 }, { "content": "const source = require('vinyl-source-stream');\n", "file_path": "server/client/gulpfile.js", "rank": 4, "score": 49870.06623319667 }, { "content": "fn main() {\n\n resource_dir(\"./static\").build().unwrap();\n\n}\n", "file_path": "server/build.rs", "rank": 5, "score": 46473.299760126545 }, { "content": "#[derive(Template)]\n\n#[template(path = \"index.html\")]\n\nstruct Index;\n\n\n\nasync fn index() -> impl Responder {\n\n let body = Index.render().unwrap();\n\n HttpResponse::Ok().content_type(\"text/html\").body(body)\n\n}\n\n\n\nasync fn quakes_json(cache: web::Data<Addr<cache::CacheActor>>) -> impl Responder {\n\n match cache.send(cache::GetQuakes).await {\n\n Ok(response) => web::Json(response.0.to_geojson()),\n\n Err(error) => {\n\n error!(\"Failed to retrieve latest quakes: {:?}\", error);\n\n let quakes = QuakeList::new(Vec::new());\n\n web::Json(quakes.to_geojson())\n\n }\n\n }\n\n}\n\n\n\nasync fn get_quakes() -> Vec<Quake> {\n\n quakes_scraper::get_phivolcs_quakes().await.unwrap()\n\n}\n\n\n", "file_path": "server/src/main.rs", "rank": 6, "score": 45326.24893544182 }, { "content": "#[derive(Debug,Deserialize)]\n\nstruct TokenResponse {\n\n token_type: String,\n\n access_token: String\n\n}\n\n\n\n#[derive(Debug,Clone,Deserialize)]\n\npub(crate) struct Url {\n\n expanded_url: String\n\n}\n\n\n\n#[derive(Debug,Clone,Deserialize)]\n\npub(crate) struct Entities {\n\n urls: Vec<Url>,\n\n}\n\n\n\n#[derive(Debug,Clone,Deserialize)]\n\npub(crate) struct User {\n\n screen_name: String\n\n}\n\n\n", "file_path": "twitter/src/client.rs", "rank": 7, "score": 44264.60586124433 }, { "content": "struct WsActor {\n\n cache: Addr<cache::CacheActor>,\n\n hb: Instant,\n\n}\n\n\n\nimpl Actor for WsActor {\n\n type Context = ws::WebsocketContext<Self>;\n\n\n\n /// Method is called on actor start. We start the heartbeat process here.\n\n fn started(&mut self, ctx: &mut Self::Context) {\n\n debug!(\"Websocket actor started...\");\n\n self.hb(ctx);\n\n let addr = ctx.address().recipient();\n\n self.cache.do_send(cache::Connect {\n\n addr,\n\n });\n\n }\n\n\n\n fn stopped(&mut self, ctx: &mut Self::Context) {\n\n debug!(\"Websocket has stopped.\");\n", "file_path": "server/src/websocket.rs", "rank": 8, "score": 44261.27876852046 }, { "content": " static quakeMarker(latlng, magnitude, depth) {\n\n return new L.circleMarker(latlng, {\n\n className: \"marker-fade-in\",\n\n radius: QuakeMap.radius(magnitude, depth),\n\n fillColor: \"#ff3b33\",\n\n color: \"#ff3b33\",\n\n weight: 1,\n\n fillOpacity: 0.6\n\n });\n", "file_path": "server/client/app/js/common.js", "rank": 9, "score": 32511.94349517898 }, { "content": " static quakeListItemHtml(props) {\n\n return '<div class=\"quake-container\">' +\n\n '<span class=\"quake-magnitude\">' + props.magnitude + '</span>' +\n\n '<h1 class=\"quake-location\">' + props.province + '</h1>' +\n\n '<h2 class=\"quake-timestamp\">' + moment(props.start).tz('Asia/Manila').format() + '</h2>' +\n\n '<aside class=\"quake-aside\">' + props.depth + ' km</aside>' +\n\n '</div>';\n", "file_path": "server/client/app/js/common.js", "rank": 10, "score": 31585.401953844917 }, { "content": "!function i(a,u,s){function c(t,e){if(!u[t]){if(!a[t]){var n=\"function\"==typeof require&&require;if(!e&&n)return n(t,!0);if(l)return l(t,!0);var r=new Error(\"Cannot find module '\"+t+\"'\");throw r.code=\"MODULE_NOT_FOUND\",r}var o=u[t]={exports:{}};a[t][0].call(o.exports,function(e){return c(a[t][1][e]||e)},o,o.exports,i,a,u,s)}return u[t].exports}for(var l=\"function\"==typeof require&&require,e=0;e<s.length;e++)c(s[e]);return c}({1:[function(e,t,n){\"use strict\";function o(e,t){for(var n=0;n<t.length;n++){var r=t[n];r.enumerable=r.enumerable||!1,r.configurable=!0,\"value\"in r&&(r.writable=!0),Object.defineProperty(e,r.key,r)}}Object.defineProperty(n,\"__esModule\",{value:!0}),n.default=void 0;var r=function(){function r(e,t){!function(e,t){if(!(e instanceof t))throw new TypeError(\"Cannot call a class as a function\")}(this,r),this.map=L.map(e,{center:[15.5,120.91],zoom:7,maxZoom:18,zoomControl:!1});L.tileLayer(\"https://api.mapbox.com/styles/v1/{id}/tiles/{z}/{x}/{y}?access_token={accessToken}\",{attribution:'Map data &copy; <a href=\"https://www.openstreetmap.org/\">OpenStreetMap</a> contributors, <a href=\"https://creativecommons.org/licenses/by-sa/2.0/\">CC-BY-SA</a>, Imagery © <a href=\"https://www.mapbox.com/\">Mapbox</a>',id:\"mapbox/outdoors-v11\",accessToken:\"pk.eyJ1IjoiZmRlYW50b25pIiwiYSI6ImNrNWhhOHlueTAxcHAzZHA3Nnd1MDhveWkifQ.kTW32UkDDmHFl9MGhnNrbw\",tileSize:512,zoomOffset:-1}).addTo(this.map),L.control.sidebar(t).addTo(this.map),L.control.zoom({position:\"topright\"}).addTo(this.map)}var e,t,n;return e=r,n=[{key:\"radius\",value:function(e,t){var n=Math.ceil(Math.exp(e)/t);return n<5&&(n=5),t<2&&(8<e?n=140:7<e?n=120:6<e?n=100:5<e&&(n=80)),n}},{key:\"quakeMarker\",value:function(e,t,n){return new L.circleMarker(e,{className:\"marker-fade-in\",radius:r.radius(t,n),fillColor:\"#ff3b33\",color:\"#ff3b33\",weight:1,fillOpacity:.6})}},{key:\"markerPopup\",value:function(e,t){if(e.properties){var n=e.properties,r=\"<h3>\"+n.province+\" \"+n.longitude+\", \"+n.latitude+\"</h3>\",o='<ul style=\"list-style-type:none;padding-left: 0;\"><li><b>Magnitude: </b>'+n.magnitude+\"</li><li><b>Depth: </b>\"+n.depth+\"</li><li><b>Location: </b>\"+n.location+\"</li><li><b>Timestamp: </b>\"+n.datetime+'</li><li><b>Source: </b><a href=\"'+n.url+'\" target=\"_blank\">phivolcs</li></ul>';t.bindPopup(r+o)}}},{key:\"quakeListItemHtml\",value:function(e){return'<div class=\"quake-container\"><span class=\"quake-magnitude\">'+e.magnitude+'</span><h1 class=\"quake-location\">'+e.province+'</h1><h2 class=\"quake-timestamp\">'+moment(e.start).tz(\"Asia/Manila\").format()+'</h2><aside class=\"quake-aside\">'+e.depth+\" km</aside></div>\"}}],(t=[{key:\"leafletMap\",get:function(){return this.map}}])&&o(e.prototype,t),n&&o(e,n),r}();n.default=r},{}],2:[function(e,t,n){\"use strict\";Object.defineProperty(n,\"__esModule\",{value:!0}),n.default=void 0;var r,i=(r=e(\"./common.js\"))&&r.__esModule?r:{default:r};function u(e){return(u=\"function\"==typeof Symbol&&\"symbol\"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&\"function\"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?\"symbol\":typeof e})(e)}function a(e,t){for(var n=0;n<t.length;n++){var r=t[n];r.enumerable=r.enumerable||!1,r.configurable=!0,\"value\"in r&&(r.writable=!0),Object.defineProperty(e,r.key,r)}}function s(e,t){return(s=Object.setPrototypeOf||function(e,t){return e.__proto__=t,e})(e,t)}function c(i){var a=function(){if(\"undefined\"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if(\"function\"==typeof Proxy)return!0;try{return Date.prototype.toString.call(Reflect.construct(Date,[],function(){})),!0}catch(e){return!1}}();return function(){var e,t,n,r=l(i);if(a){var o=l(this).constructor;e=Reflect.construct(r,arguments,o)}else e=r.apply(this,arguments);return t=this,!(n=e)||\"object\"!==u(n)&&\"function\"!=typeof n?function(e){if(void 0!==e)return e;throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\")}(t):n}}function l(e){return(l=Object.setPrototypeOf?Object.getPrototypeOf:function(e){return e.__proto__||Object.getPrototypeOf(e)})(e)}var o=function(){!function(e,t){if(\"function\"!=typeof t&&null!==t)throw new TypeError(\"Super expression must either be null or a function\");e.prototype=Object.create(t&&t.prototype,{constructor:{value:e,writable:!0,configurable:!0}}),t&&s(e,t)}(u,i[\"default\"]);var e,t,n,o=c(u);function u(e,t,n){var r;return function(e,t){if(!(e instanceof t))throw new TypeError(\"Cannot call a class as a function\")}(this,u),(r=o.call(this,e,t)).list=document.getElementById(n),r.map.spin(!0),r.initialized=!1,r}return e=u,n=[{key:\"currentMarkers\",value:function(e){return L.geoJSON(e,{pointToLayer:function(e,t){if(e.properties)return u.quakeMarker(t,e.properties.magnitude,e.properties.depth)},onEachFeature:u.markerPopup})}},{key:\"filterOld\",value:function(e){var t=moment().utc().subtract(24,\"hours\");return{type:\"FeatureCollection\",features:e.features.filter(function(e){return moment.utc(e.properties.datetime).isAfter(t)})}}},{key:\"updateList\",value:function(o,i,a){var e=o.getLayers().sort(function(e,t){return moment(e.feature.properties.datetime)-moment(t.feature.properties.datetime)});if(e.forEach(function(t){var e=o.getLayerId(t),n=t.feature.properties,r=document.createElement(\"li\");r.setAttribute(\"data-layer-id\",e),r.innerHTML=u.quakeListItemHtml(n),r.onclick=function(e){i.flyTo(t.getLatLng(),10),i.once(\"moveend\",function(){t.openPopup()})},a.prepend(r),setTimeout(function(){r.className=r.className+\"quake-show\"},50)}),0<e.length){var t=e[e.length-1];i.flyTo(t.getLatLng(),7)}}},{key:\"clusterIcon\",value:function(e,t){var n='<div class=\"quakes-marker-icon\" style=\"'+(\"width: \"+e+\"px; height: \"+e+\"px; line-height: \"+e+\"px;\")+'\"><b>'+t+\"</b></div>\";return L.divIcon({html:n,className:\"quakes-cluster\",iconSize:L.point(e,e)})}}],(t=[{key:\"add\",value:function(e){var t=u.filterOld(e),n=u.currentMarkers(t);if(this.initialized)this.layer.addLayers(n),this.layer.refreshClusters(n),u.updateList(n,this.map,this.list);else{var r=L.markerClusterGroup({maxClusterRadius:function(e){return e<=6?80:1},iconCreateFunction:function(e){var t=40*Math.log(e.getChildCount());return u.clusterIcon(t,e.getChildCount())}});r.addLayer(n),this.map.addLayer(r),u.updateList(r,this.map,this.list),this.layer=r,this.map.spin(!1),this.initialized=!0}}},{key:\"clear\",value:function(){this.layer.clearLayers(),this.list.innerHTML=\"\"}}])&&a(e.prototype,t),n&&a(e,n),u}();n.default=o},{\"./common.js\":1}],3:[function(e,t,n){\"use strict\";Object.defineProperty(n,\"__esModule\",{value:!0}),n.default=void 0;var r,i=(r=e(\"./common.js\"))&&r.__esModule?r:{default:r};function u(e){return(u=\"function\"==typeof Symbol&&\"symbol\"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&\"function\"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?\"symbol\":typeof e})(e)}function a(e,t){for(var n=0;n<t.length;n++){var r=t[n];r.enumerable=r.enumerable||!1,r.configurable=!0,\"value\"in r&&(r.writable=!0),Object.defineProperty(e,r.key,r)}}function s(e,t){return(s=Object.setPrototypeOf||function(e,t){return e.__proto__=t,e})(e,t)}function c(i){var a=function(){if(\"undefined\"==typeof Reflect||!Reflect.construct)return!1;if(Reflect.construct.sham)return!1;if(\"function\"==typeof Proxy)return!0;try{return Date.prototype.toString.call(Reflect.construct(Date,[],function(){})),!0}catch(e){return!1}}();return function(){var e,t,n,r=l(i);if(a){var o=l(this).constructor;e=Reflect.construct(r,arguments,o)}else e=r.apply(this,arguments);return t=this,!(n=e)||\"object\"!==u(n)&&\"function\"!=typeof n?function(e){if(void 0!==e)return e;throw new ReferenceError(\"this hasn't been initialised - super() hasn't been called\")}(t):n}}function l(e){return(l=Object.setPrototypeOf?Object.getPrototypeOf:function(e){return e.__proto__||Object.getPrototypeOf(e)})(e)}var o=function(){!function(e,t){if(\"function\"!=typeof t&&null!==t)throw new TypeError(\"Super expression must either be null or a function\");e.prototype=Object.create(t&&t.prototype,{constructor:{value:e,writable:!0,configurable:!0}}),t&&s(e,t)}(u,i[\"default\"]);var e,t,n,o=c(u);function u(e,t,n){var r;return function(e,t){if(!(e instanceof t))throw new TypeError(\"Cannot call a class as a function\")}(this,u),(r=o.call(this,e,t)).list=document.getElementById(n),r.map.spin(!0),r.initialized=!1,r}return e=u,n=[{key:\"radius\",value:function(e,t){var n=Math.ceil(Math.exp(e)/t);return n<5&&(n=5),n}},{key:\"historyMarkers\",value:function(e){return L.timeline(e,{pointToLayer:function(e,t){if(e.properties&&e.properties.magnitude&&e.properties.depth)return u.quakeMarker(t,e.properties.magnitude,e.properties.depth)},onEachFeature:u.markerPopup})}},{key:\"updateList\",value:function(o,i,a){var e=o.getLayers().sort(function(e,t){return moment(e.feature.properties.datetime)-moment(t.feature.properties.datetime)});a.innerHTML=\"\",e.forEach(function(t){var e=o.getLayerId(t),n=t.feature.properties;if(i.getBounds().contains({lat:n.latitude,lng:n.longitude})){var r=document.createElement(\"li\");r.className=\"quake-show\",r.setAttribute(\"data-layer-id\",e),r.innerHTML=u.quakeListItemHtml(n),r.onclick=function(e){i.flyTo(t.getLatLng(),14),i.once(\"moveend\",function(){t.openPopup()})},a.prepend(r)}})}}],(t=[{key:\"load\",value:function(e){var t=L.timelineSliderControl({formatOutput:function(e){return moment(e).format(\"YYYY-MM-DD HH:MM:SS\")},steps:4e3,duration:8e4,position:\"bottomright\"}),n=u.historyMarkers(e),r=this.map,o=this.list;n.on(\"change\",function(e){u.updateList(e.target,r,o)}),r.on(\"moveend\",function(e){console.log(\"Event: \",e),u.updateList(n,r,o)}),t.addTo(r),t.addTimelines(n),n.addTo(r),this.layer=n,this.control=t,this.map.spin(!1),this.initialized=!0}},{key:\"clear\",value:function(){this.initialized&&(this.control.removeTimelines(),this.map.removeControl(this.control),this.control={},this.layer.clearLayers(),this.list.innerHTML=\"\")}}])&&a(e.prototype,t),n&&a(e,n),u}();n.default=o},{\"./common.js\":1}],4:[function(e,t,n){\"use strict\";var r=i(e(\"./current\")),o=i(e(\"./history\"));function i(e){return e&&e.__esModule?e:{default:e}}var a=new r.default(\"current-map\",\"sidebar\",\"current-list\"),u=new o.default(\"history-map\",\"sidebar\",\"history-list\"),s=!1,c={features:[],type:\"FeatureCollection\"},l=document.getElementsByClassName(\"quake-alert\")[0],f=document.getElementById(\"quake-tweet\");function p(e){console.log(e);var t=e.properties.source;t.startsWith(\"https://twitter.com/\")&&function(e){f.innerHTML=\"\";var t={conversation:l.style.display=\"none\"};window.innerWidth<800&&(t.cards=\"hidden\"),twttr.widgets.createTweet(e,f,t),setTimeout(function(){l.style.display=\"inline\",l.classList.add(\"quake-alert-show\"),setTimeout(function(){l.classList.remove(\"quake-alert-show\"),setTimeout(function(){l.style.display=\"none\"},1e3)},4e3)},1e3)}(t.split(\"/\").pop())}function d(e,t){document.getElementById(e+\"-map\").classList.remove(\"hide\"),document.getElementById(e+\"-list\").classList.remove(\"hide\"),document.getElementById(t+\"-map\").classList.add(\"hide\"),document.getElementById(t+\"-list\").classList.add(\"hide\");var n=document.getElementById(\"list-header\");\"history\"===e?(n.innerHTML=n.innerHTML.replace(\"24h\",\"History\"),u.clear(),u.leafletMap._onResize(),u.load(c)):(n.innerHTML=n.innerHTML.replace(\"History\",\"24h\"),a.leafletMap._onResize())}document.getElementById(\"history-toggle\").onclick=function(){this.classList.add(\"hide\"),d(\"history\",\"current\"),document.getElementById(\"current-toggle\").classList.remove(\"hide\")},document.getElementById(\"current-toggle\").onclick=function(){this.classList.add(\"hide\"),d(\"current\",\"history\"),document.getElementById(\"history-toggle\").classList.remove(\"hide\")},function t(){var e=(\"https:\"===window.location.protocol?\"wss://\":\"ws://\")+window.location.host+\"/ws/\",n=new WebSocket(e);n.onopen=function(){},n.onmessage=function(e){var t,n=JSON.parse(e.data);console.log(\"New quakes \",n),s&&a.clear(),t=n,c.features=c.features.concat(t.features).sort(function(e,t){return moment(e.properties.datetime)-moment(t.properties.datetime)}),p(c.features.slice(-1)[0]),a.add(t),s=!1},n.onclose=function(e){console.log(\"Socket is closed. Reconnect will be attempted in 10 seconds.\",e.reason),setTimeout(function(){s=!0,t()},1e4)},n.onerror=function(e){console.error(\"Socket encountered error: \",e.message,\"Closing socket\"),n.close()}}()},{\"./current\":2,\"./history\":3}]},{},[4]);", "file_path": "server/static/quakes/quakes.js", "rank": 11, "score": 31141.656740030146 }, { "content": "class QuakeMap {\n\n\n\n constructor(mapId, sidebarId) {\n\n this.map = L.map(mapId, {\n\n center: [15.5, 120.91],\n\n zoom: 7,\n\n maxZoom: 18,\n\n zoomControl: false\n\n });\n\n\n\n const mapboxUrl = 'https://api.mapbox.com/styles/v1/{id}/tiles/{z}/{x}/{y}?access_token={accessToken}';\n\n\n\n const mapboxConfig = {\n\n attribution: 'Map data &copy; <a href=\"https://www.openstreetmap.org/\">OpenStreetMap</a> contributors, <a href=\"https://creativecommons.org/licenses/by-sa/2.0/\">CC-BY-SA</a>, Imagery © <a href=\"https://www.mapbox.com/\">Mapbox</a>',\n\n id: 'mapbox/outdoors-v11',\n\n accessToken: 'pk.eyJ1IjoiZmRlYW50b25pIiwiYSI6ImNrNWhhOHlueTAxcHAzZHA3Nnd1MDhveWkifQ.kTW32UkDDmHFl9MGhnNrbw',\n\n tileSize: 512,\n\n zoomOffset: -1\n\n };\n\n\n\n L.tileLayer(mapboxUrl, mapboxConfig).addTo(this.map);\n\n\n\n L.control.sidebar(sidebarId).addTo(this.map);\n\n\n\n L.control.zoom({\n\n position: 'topright'\n\n }).addTo(this.map);\n\n }\n\n\n\n static radius(magnitude, depth) {\n\n let size = Math.ceil(Math.exp(magnitude) / depth);\n\n if(size < 5) {\n\n size = 5;\n\n }\n\n // TODO: Find a better way to do this!\n\n if (depth < 2) {\n\n if (magnitude > 8) {\n\n size = 140;\n\n } else if (magnitude > 7) {\n\n size = 120;\n\n } else if (magnitude > 6) {\n\n size = 100;\n\n } else if (magnitude > 5) {\n\n size = 80;\n\n }\n\n }\n\n return size\n\n }\n\n\n\n static quakeMarker(latlng, magnitude, depth) {\n\n return new L.circleMarker(latlng, {\n\n className: \"marker-fade-in\",\n\n radius: QuakeMap.radius(magnitude, depth),\n\n fillColor: \"#ff3b33\",\n\n color: \"#ff3b33\",\n\n weight: 1,\n\n fillOpacity: 0.6\n\n });\n\n }\n\n\n\n static markerPopup(feature, layer) {\n\n if (feature.properties) {\n\n const props = feature.properties;\n\n const header = '<h3>' + props.province + ' ' + props.longitude + ', ' + props.latitude + '</h3>';\n\n const details = '<ul style=\"list-style-type:none;padding-left: 0;\">' +\n\n '<li><b>Magnitude: </b>' + props.magnitude + '</li>' +\n\n '<li><b>Depth: </b>' + props.depth + '</li>' +\n\n '<li><b>Location: </b>' + props.location + '</li>' +\n\n '<li><b>Timestamp: </b>' + props.datetime + '</li>' +\n\n '<li><b>Source: </b><a href=\"'+ props.url +'\" target=\"_blank\">phivolcs</li>' +\n\n '</ul>';\n\n layer.bindPopup(header + details);\n\n }\n\n }\n\n\n\n static quakeListItemHtml(props) {\n\n return '<div class=\"quake-container\">' +\n\n '<span class=\"quake-magnitude\">' + props.magnitude + '</span>' +\n\n '<h1 class=\"quake-location\">' + props.province + '</h1>' +\n\n '<h2 class=\"quake-timestamp\">' + moment(props.start).tz('Asia/Manila').format() + '</h2>' +\n\n '<aside class=\"quake-aside\">' + props.depth + ' km</aside>' +\n\n '</div>';\n\n }\n\n\n\n get leafletMap() {\n\n return this.map;\n\n }\n", "file_path": "server/client/app/js/common.js", "rank": 12, "score": 29517.222165608706 }, { "content": " constructor(mapId, sidebarId) {\n\n this.map = L.map(mapId, {\n\n center: [15.5, 120.91],\n\n zoom: 7,\n\n maxZoom: 18,\n\n zoomControl: false\n\n });\n\n\n\n const mapboxUrl = 'https://api.mapbox.com/styles/v1/{id}/tiles/{z}/{x}/{y}?access_token={accessToken}';\n\n\n\n const mapboxConfig = {\n\n attribution: 'Map data &copy; <a href=\"https://www.openstreetmap.org/\">OpenStreetMap</a> contributors, <a href=\"https://creativecommons.org/licenses/by-sa/2.0/\">CC-BY-SA</a>, Imagery © <a href=\"https://www.mapbox.com/\">Mapbox</a>',\n\n id: 'mapbox/outdoors-v11',\n\n accessToken: 'pk.eyJ1IjoiZmRlYW50b25pIiwiYSI6ImNrNWhhOHlueTAxcHAzZHA3Nnd1MDhveWkifQ.kTW32UkDDmHFl9MGhnNrbw',\n\n tileSize: 512,\n\n zoomOffset: -1\n\n };\n\n\n\n L.tileLayer(mapboxUrl, mapboxConfig).addTo(this.map);\n\n\n\n L.control.sidebar(sidebarId).addTo(this.map);\n\n\n\n L.control.zoom({\n\n position: 'topright'\n\n }).addTo(this.map);\n", "file_path": "server/client/app/js/common.js", "rank": 13, "score": 28939.18129813039 }, { "content": " static radius(magnitude, depth) {\n\n let size = Math.ceil(Math.exp(magnitude) / depth);\n\n if(size < 5) {\n\n size = 5;\n\n }\n\n // TODO: Find a better way to do this!\n\n if (depth < 2) {\n\n if (magnitude > 8) {\n\n size = 140;\n\n } else if (magnitude > 7) {\n\n size = 120;\n\n } else if (magnitude > 6) {\n\n size = 100;\n\n } else if (magnitude > 5) {\n\n size = 80;\n\n }\n\n }\n\n return size\n", "file_path": "server/client/app/js/common.js", "rank": 14, "score": 28939.18129813039 }, { "content": " get leafletMap() {\n\n return this.map;\n", "file_path": "server/client/app/js/common.js", "rank": 15, "score": 28383.345336727 }, { "content": " static markerPopup(feature, layer) {\n\n if (feature.properties) {\n\n const props = feature.properties;\n\n const header = '<h3>' + props.province + ' ' + props.longitude + ', ' + props.latitude + '</h3>';\n\n const details = '<ul style=\"list-style-type:none;padding-left: 0;\">' +\n\n '<li><b>Magnitude: </b>' + props.magnitude + '</li>' +\n\n '<li><b>Depth: </b>' + props.depth + '</li>' +\n\n '<li><b>Location: </b>' + props.location + '</li>' +\n\n '<li><b>Timestamp: </b>' + props.datetime + '</li>' +\n\n '<li><b>Source: </b><a href=\"'+ props.url +'\" target=\"_blank\">phivolcs</li>' +\n\n '</ul>';\n\n layer.bindPopup(header + details);\n\n }\n", "file_path": "server/client/app/js/common.js", "rank": 16, "score": 28383.345336727 }, { "content": "const imagemin = require('gulp-imagemin');\n", "file_path": "server/client/gulpfile.js", "rank": 17, "score": 23873.799762358874 }, { "content": "const browserify = require('browserify');\n", "file_path": "server/client/gulpfile.js", "rank": 18, "score": 23873.799762358874 }, { "content": "const plumber = require('gulp-plumber');\n", "file_path": "server/client/gulpfile.js", "rank": 19, "score": 23873.799762358874 }, { "content": "const uglify = require(\"gulp-uglify\");\n", "file_path": "server/client/gulpfile.js", "rank": 20, "score": 23873.799762358874 }, { "content": "const gulp = require('gulp');\n", "file_path": "server/client/gulpfile.js", "rank": 21, "score": 23873.799762358874 }, { "content": "const cssnano = require(\"cssnano\");\n", "file_path": "server/client/gulpfile.js", "rank": 22, "score": 23873.799762358874 }, { "content": "const del = require('del');\n", "file_path": "server/client/gulpfile.js", "rank": 23, "score": 23873.799762358874 }, { "content": "const config = {\n\n entries: [\n\n './app/js/main.js'\n\n ]\n", "file_path": "server/client/gulpfile.js", "rank": 24, "score": 23873.799762358874 }, { "content": "const log = require('gulplog');\n", "file_path": "server/client/gulpfile.js", "rank": 25, "score": 23873.799762358874 }, { "content": "const postcss = require(\"gulp-postcss\");\n", "file_path": "server/client/gulpfile.js", "rank": 26, "score": 23873.799762358874 }, { "content": "const buffer = require(\"vinyl-buffer\");\n", "file_path": "server/client/gulpfile.js", "rank": 27, "score": 23873.799762358874 }, { "content": "const qa = document.getElementsByClassName(\"quake-alert\")[0];\n", "file_path": "server/client/app/js/main.js", "rank": 28, "score": 22424.67895644949 }, { "content": "let reconnected = false;\n", "file_path": "server/client/app/js/main.js", "rank": 29, "score": 22424.67895644949 }, { "content": "const history = new HistoryMap(\"history-map\", \"sidebar\", \"history-list\");\n", "file_path": "server/client/app/js/main.js", "rank": 30, "score": 22424.67895644949 }, { "content": "const tweet = document.getElementById(\"quake-tweet\");\n", "file_path": "server/client/app/js/main.js", "rank": 31, "score": 22424.67895644949 }, { "content": "const current = new CurrentMap(\"current-map\", \"sidebar\", \"current-list\");\n", "file_path": "server/client/app/js/main.js", "rank": 32, "score": 22424.67895644949 }, { "content": "class HistoryMap extends QuakeMap {\n\n\n\n constructor(mapId, sidebarId, listId) {\n\n super(mapId, sidebarId);\n\n\n\n this.list = document.getElementById(listId);\n\n\n\n this.map.spin(true);\n\n\n\n this.initialized = false;\n\n }\n\n\n\n static radius(magnitude, depth) {\n\n let size = Math.ceil(Math.exp(magnitude) / depth);\n\n if(size < 5) size = 5;\n\n return size\n\n }\n\n\n\n static historyMarkers(json) {\n\n return L.timeline(json, {\n\n pointToLayer: function(feature, latlng) {\n\n if (feature.properties && feature.properties.magnitude && feature.properties.depth) {\n\n return HistoryMap.quakeMarker(latlng, feature.properties.magnitude, feature.properties.depth);\n\n }\n\n },\n\n onEachFeature: HistoryMap.markerPopup\n\n });\n\n };\n\n\n\n static updateList(layer, map, list) {\n\n\n\n const displayed = layer.getLayers().sort(function(a,b) {\n\n const first = moment(a.feature.properties.datetime);\n\n const second = moment(b.feature.properties.datetime);\n\n return first - second;\n\n });\n\n list.innerHTML = \"\";\n\n\n\n displayed.forEach(function(quake){\n\n const layerId = layer.getLayerId(quake);\n\n const props = quake.feature.properties;\n\n const inBounds = map.getBounds().contains({lat: props.latitude, lng: props.longitude});\n\n if( inBounds ) {\n\n const newItem = document.createElement('li');\n\n newItem.className = \"quake-show\";\n\n newItem.setAttribute(\"data-layer-id\", layerId);\n\n newItem.innerHTML = HistoryMap.quakeListItemHtml(props);\n\n\n\n newItem.onclick = function(e) {\n\n map.flyTo(quake.getLatLng(), 14);\n\n map.once('moveend', function() {\n\n quake.openPopup();\n\n });\n\n };\n\n\n\n list.prepend(newItem);\n\n }\n\n });\n\n }\n\n\n\n load(json) {\n\n\n\n let timelineControl = L.timelineSliderControl({\n\n formatOutput: function(date){\n\n return moment(date).format(\"YYYY-MM-DD HH:MM:SS\");\n\n },\n\n steps: 4000,\n\n duration: 80000,\n\n position: \"bottomright\"\n\n });\n\n\n\n let timeline = HistoryMap.historyMarkers(json);\n\n\n\n const map = this.map;\n\n const list = this.list;\n\n timeline.on('change', function(e){\n\n HistoryMap.updateList(e.target, map, list);\n\n });\n\n\n\n map.on(\"moveend\", function (e) {\n\n console.log(\"Event: \", e);\n\n HistoryMap.updateList(timeline, map, list);\n\n });\n\n\n\n timelineControl.addTo(map);\n\n timelineControl.addTimelines(timeline);\n\n\n\n timeline.addTo(map);\n\n\n\n this.layer = timeline;\n\n this.control = timelineControl;\n\n\n\n this.map.spin(false);\n\n this.initialized = true;\n\n }\n\n\n\n clear() {\n\n if(this.initialized) {\n\n this.control.removeTimelines();\n\n this.map.removeControl(this.control);\n\n this.control = {};\n\n this.layer.clearLayers();\n\n this.list.innerHTML = \"\";\n\n }\n\n }\n", "file_path": "server/client/app/js/history.js", "rank": 33, "score": 21764.14572124115 }, { "content": "class CurrentMap extends QuakeMap {\n\n\n\n constructor(mapId, sidebarId, listId) {\n\n super(mapId, sidebarId);\n\n\n\n this.list = document.getElementById(listId);\n\n\n\n this.map.spin(true);\n\n this.initialized = false;\n\n }\n\n\n\n static currentMarkers(json) {\n\n return L.geoJSON(json, {\n\n pointToLayer: function(feature, latlng) {\n\n if (feature.properties) {\n\n return CurrentMap.quakeMarker(latlng, feature.properties.magnitude, feature.properties.depth);\n\n }\n\n },\n\n onEachFeature: CurrentMap.markerPopup\n\n });\n\n }\n\n\n\n static filterOld(json) {\n\n const horizon = moment().utc().subtract(24, 'hours');\n\n const filtered = json.features.filter(function(item) {\n\n return moment.utc(item.properties.datetime).isAfter(horizon);\n\n });\n\n return {\n\n type: \"FeatureCollection\",\n\n features: filtered\n\n };\n\n }\n\n\n\n static updateList(layer, map, list) {\n\n\n\n const displayed = layer.getLayers().sort(function(a,b) {\n\n const first = moment(a.feature.properties.datetime);\n\n const second = moment(b.feature.properties.datetime);\n\n return first - second;\n\n });\n\n displayed.forEach(function(quake){\n\n const layerId = layer.getLayerId(quake);\n\n const props = quake.feature.properties;\n\n const newItem = document.createElement('li');\n\n newItem.setAttribute(\"data-layer-id\", layerId);\n\n newItem.innerHTML = CurrentMap.quakeListItemHtml(props);\n\n\n\n newItem.onclick = function(e) {\n\n map.flyTo(quake.getLatLng(), 10);\n\n map.once('moveend', function() {\n\n quake.openPopup();\n\n });\n\n };\n\n\n\n list.prepend(newItem);\n\n\n\n setTimeout(function() {\n\n newItem.className = newItem.className + \"quake-show\";\n\n }, 50);\n\n\n\n });\n\n\n\n if(displayed.length > 0) {\n\n const latest = displayed[displayed.length - 1];\n\n map.flyTo(latest.getLatLng(), 7);\n\n }\n\n }\n\n\n\n static clusterIcon(size, text) {\n\n const style = \"width: \" + size + \"px; height: \" + size + \"px; line-height: \" + size + \"px;\";\n\n const html = '<div class=\"quakes-marker-icon\" style=\"'+ style +'\"><b>' + text + '</b></div>';\n\n return L.divIcon({ html: html, className: 'quakes-cluster', iconSize: L.point(size, size) });\n\n }\n\n\n\n add(json) {\n\n let latest = CurrentMap.filterOld(json);\n\n let markers = CurrentMap.currentMarkers(latest);\n\n\n\n if(!this.initialized) {\n\n let cluster = L.markerClusterGroup({\n\n maxClusterRadius: function (zoom) {\n\n return (zoom <= 6) ? 80 : 1; // radius in pixels\n\n },\n\n iconCreateFunction: function(cluster) {\n\n const size = Math.log(cluster.getChildCount())*40;\n\n return CurrentMap.clusterIcon(size, cluster.getChildCount());\n\n }\n\n });\n\n cluster.addLayer(markers);\n\n this.map.addLayer(cluster);\n\n\n\n CurrentMap.updateList(cluster, this.map, this.list);\n\n\n\n this.layer = cluster;\n\n this.map.spin(false);\n\n this.initialized = true;\n\n } else {\n\n this.layer.addLayers(markers);\n\n this.layer.refreshClusters(markers);\n\n\n\n CurrentMap.updateList(markers, this.map, this.list);\n\n }\n\n }\n\n\n\n clear() {\n\n this.layer.clearLayers();\n\n this.list.innerHTML = \"\";\n\n }\n", "file_path": "server/client/app/js/current.js", "rank": 34, "score": 21764.14572124115 }, { "content": " clear() {\n\n if(this.initialized) {\n\n this.control.removeTimelines();\n\n this.map.removeControl(this.control);\n\n this.control = {};\n\n this.layer.clearLayers();\n\n this.list.innerHTML = \"\";\n\n }\n", "file_path": "server/client/app/js/history.js", "rank": 35, "score": 21141.411935411274 }, { "content": " add(json) {\n\n let latest = CurrentMap.filterOld(json);\n\n let markers = CurrentMap.currentMarkers(latest);\n\n\n\n if(!this.initialized) {\n\n let cluster = L.markerClusterGroup({\n\n maxClusterRadius: function (zoom) {\n\n return (zoom <= 6) ? 80 : 1; // radius in pixels\n\n },\n\n iconCreateFunction: function(cluster) {\n\n const size = Math.log(cluster.getChildCount())*40;\n\n return CurrentMap.clusterIcon(size, cluster.getChildCount());\n\n }\n\n });\n\n cluster.addLayer(markers);\n\n this.map.addLayer(cluster);\n\n\n\n CurrentMap.updateList(cluster, this.map, this.list);\n\n\n\n this.layer = cluster;\n\n this.map.spin(false);\n\n this.initialized = true;\n\n } else {\n\n this.layer.addLayers(markers);\n\n this.layer.refreshClusters(markers);\n\n\n\n CurrentMap.updateList(markers, this.map, this.list);\n\n }\n", "file_path": "server/client/app/js/current.js", "rank": 36, "score": 21141.411935411274 }, { "content": " constructor(mapId, sidebarId, listId) {\n\n super(mapId, sidebarId);\n\n\n\n this.list = document.getElementById(listId);\n\n\n\n this.map.spin(true);\n\n\n\n this.initialized = false;\n", "file_path": "server/client/app/js/history.js", "rank": 37, "score": 21141.411935411274 }, { "content": " clear() {\n\n this.layer.clearLayers();\n\n this.list.innerHTML = \"\";\n", "file_path": "server/client/app/js/current.js", "rank": 38, "score": 21141.411935411274 }, { "content": " load(json) {\n\n\n\n let timelineControl = L.timelineSliderControl({\n\n formatOutput: function(date){\n\n return moment(date).format(\"YYYY-MM-DD HH:MM:SS\");\n\n },\n\n steps: 4000,\n\n duration: 80000,\n\n position: \"bottomright\"\n\n });\n\n\n\n let timeline = HistoryMap.historyMarkers(json);\n\n\n\n const map = this.map;\n\n const list = this.list;\n\n timeline.on('change', function(e){\n\n HistoryMap.updateList(e.target, map, list);\n\n });\n\n\n\n map.on(\"moveend\", function (e) {\n\n console.log(\"Event: \", e);\n\n HistoryMap.updateList(timeline, map, list);\n\n });\n\n\n\n timelineControl.addTo(map);\n\n timelineControl.addTimelines(timeline);\n\n\n\n timeline.addTo(map);\n\n\n\n this.layer = timeline;\n\n this.control = timelineControl;\n\n\n\n this.map.spin(false);\n\n this.initialized = true;\n", "file_path": "server/client/app/js/history.js", "rank": 39, "score": 21141.411935411274 }, { "content": " static radius(magnitude, depth) {\n\n let size = Math.ceil(Math.exp(magnitude) / depth);\n\n if(size < 5) size = 5;\n\n return size\n", "file_path": "server/client/app/js/history.js", "rank": 40, "score": 21141.411935411274 }, { "content": " constructor(mapId, sidebarId, listId) {\n\n super(mapId, sidebarId);\n\n\n\n this.list = document.getElementById(listId);\n\n\n\n this.map.spin(true);\n\n this.initialized = false;\n", "file_path": "server/client/app/js/current.js", "rank": 41, "score": 21141.411935411274 }, { "content": "[![Build Status](https://travis-ci.com/fdeantoni/ph-quakes.svg?branch=master)](https://travis-ci.com/fdeantoni/ph-quakes)\n\n[![Dependency Status](https://deps.rs/repo/github/fdeantoni/ph-quakes/status.svg)](https://deps.rs/repo/github/fdeantoni/ph-quakes)\n\n\n\n# *PH Quakes* #\n\n\n\nThe Philippines is in the [Ring of Fire](https://en.wikipedia.org/wiki/Ring_of_Fire) so earthquakes and\n\nvolcano eruptions are a frequent occurrence. [PHIVOLCS](https://en.wikipedia.org/wiki/Philippine_Institute_of_Volcanology_and_Seismology) keeps\n\ntrack of all these and publishes earthquake data on its [website](https://www.phivolcs.dost.gov.ph/index.php/earthquake/earthquake-information3), \n\nand [twitter account](https://twitter.com/phivolcs_dost).\n\n\n\nThis application makes use of both sources to gather quake data and display it on a [leaflet](https://leafletjs.com/) \n\nmap. It emulates the functionality the [USGS Earthquake Map](https://earthquake.usgs.gov/earthquakes/map//). \n\n\n\n## Demo ##\n\n\n\nA running instance can be found here: [https://ph-quakes.herokuapp.com](https://ph-quakes.herokuapp.com/).\n\n\n\n## Get it and Run it ##\n\n\n\nTo run the application you will need [Rust](https://www.rust-lang.org/). Install that first. After that, run\n\nthe following:\n\n\n\n $ git clone https://github.com/fdeantoni/ph-quakes\n\n $ cd ph-quakes/server\n\n $ cargo run \n\n\n\n## Development ##\n\n\n\nThe Javascript client uses `gulp` to minify and deploy the sources. Run it as follows:\n\n\n\n $ cd ph-quakes/server/client\n\n $ gulp build\n\n \n\nOnce the updated javascript resources have been built and deployed, you can run the server as follows:\n\n\n\n $ cd .. # back to ph-quakes/server\n\n $ RUST_LOG=actix_server=info,actix_web=info,quakes_server=debug cargo run \n\n\n\nThe `quakes-server` should now be running at [http://localhost:8080].\n\n\n\nTo run the server with dummy data so as not to make any API calls from `quakes-scraper` and `quakes-twitter`,\n\npass the `TEST` env variable, i.e.:\n\n\n\n $ TEST=true cargo run\n\n \n\nThis will generate a dummy quake every 10 seconds. \n\n\n", "file_path": "README.md", "rank": 42, "score": 20559.537066404515 }, { "content": "## Leaflet Plugins ##\n\n\n\nBesides Leaflet, this project also makes use of the following Leaflet plugins:\n\n * [Leaflet.Spin](https://github.com/makinacorpus/Leaflet.Spin)\n\n * [sidebar-v2](https://github.com/Turbo87/sidebar-v2)\n\n * [Leaflet.timeline](https://github.com/skeate/Leaflet.timeline)\n\n * [Leaflet.markercluster](https://github.com/Leaflet/Leaflet.markercluster)\n\n\n\n## License ##\n\n\n\n`ph-quakes` is distributed under the terms of the Apache License (Version 2.0).\n\n\n\nSee [LICENSE](LICENSE) for details.\n\n\n\nCopyright 2020 Ferdinand de Antoni\n\n\n\n## Disclaimer ##\n\n\n", "file_path": "README.md", "rank": 43, "score": 20556.385955616995 }, { "content": " static updateList(layer, map, list) {\n\n\n\n const displayed = layer.getLayers().sort(function(a,b) {\n\n const first = moment(a.feature.properties.datetime);\n\n const second = moment(b.feature.properties.datetime);\n\n return first - second;\n\n });\n\n list.innerHTML = \"\";\n\n\n\n displayed.forEach(function(quake){\n\n const layerId = layer.getLayerId(quake);\n\n const props = quake.feature.properties;\n\n const inBounds = map.getBounds().contains({lat: props.latitude, lng: props.longitude});\n\n if( inBounds ) {\n\n const newItem = document.createElement('li');\n\n newItem.className = \"quake-show\";\n\n newItem.setAttribute(\"data-layer-id\", layerId);\n\n newItem.innerHTML = HistoryMap.quakeListItemHtml(props);\n\n\n\n newItem.onclick = function(e) {\n\n map.flyTo(quake.getLatLng(), 14);\n\n map.once('moveend', function() {\n\n quake.openPopup();\n\n });\n\n };\n\n\n\n list.prepend(newItem);\n\n }\n\n });\n", "file_path": "server/client/app/js/history.js", "rank": 44, "score": 20553.323207450354 }, { "content": " static historyMarkers(json) {\n\n return L.timeline(json, {\n\n pointToLayer: function(feature, latlng) {\n\n if (feature.properties && feature.properties.magnitude && feature.properties.depth) {\n\n return HistoryMap.quakeMarker(latlng, feature.properties.magnitude, feature.properties.depth);\n\n }\n\n },\n\n onEachFeature: HistoryMap.markerPopup\n\n });\n", "file_path": "server/client/app/js/history.js", "rank": 45, "score": 20553.323207450354 }, { "content": " static clusterIcon(size, text) {\n\n const style = \"width: \" + size + \"px; height: \" + size + \"px; line-height: \" + size + \"px;\";\n\n const html = '<div class=\"quakes-marker-icon\" style=\"'+ style +'\"><b>' + text + '</b></div>';\n\n return L.divIcon({ html: html, className: 'quakes-cluster', iconSize: L.point(size, size) });\n", "file_path": "server/client/app/js/current.js", "rank": 46, "score": 20553.323207450354 }, { "content": " static updateList(layer, map, list) {\n\n\n\n const displayed = layer.getLayers().sort(function(a,b) {\n\n const first = moment(a.feature.properties.datetime);\n\n const second = moment(b.feature.properties.datetime);\n\n return first - second;\n\n });\n\n displayed.forEach(function(quake){\n\n const layerId = layer.getLayerId(quake);\n\n const props = quake.feature.properties;\n\n const newItem = document.createElement('li');\n\n newItem.setAttribute(\"data-layer-id\", layerId);\n\n newItem.innerHTML = CurrentMap.quakeListItemHtml(props);\n\n\n\n newItem.onclick = function(e) {\n\n map.flyTo(quake.getLatLng(), 10);\n\n map.once('moveend', function() {\n\n quake.openPopup();\n\n });\n\n };\n\n\n\n list.prepend(newItem);\n\n\n\n setTimeout(function() {\n\n newItem.className = newItem.className + \"quake-show\";\n\n }, 50);\n\n\n\n });\n\n\n\n if(displayed.length > 0) {\n\n const latest = displayed[displayed.length - 1];\n\n map.flyTo(latest.getLatLng(), 7);\n\n }\n", "file_path": "server/client/app/js/current.js", "rank": 47, "score": 20553.323207450354 }, { "content": " static filterOld(json) {\n\n const horizon = moment().utc().subtract(24, 'hours');\n\n const filtered = json.features.filter(function(item) {\n\n return moment.utc(item.properties.datetime).isAfter(horizon);\n\n });\n\n return {\n\n type: \"FeatureCollection\",\n\n features: filtered\n\n };\n", "file_path": "server/client/app/js/current.js", "rank": 48, "score": 20553.323207450354 }, { "content": " static currentMarkers(json) {\n\n return L.geoJSON(json, {\n\n pointToLayer: function(feature, latlng) {\n\n if (feature.properties) {\n\n return CurrentMap.quakeMarker(latlng, feature.properties.magnitude, feature.properties.depth);\n\n }\n\n },\n\n onEachFeature: CurrentMap.markerPopup\n\n });\n", "file_path": "server/client/app/js/current.js", "rank": 49, "score": 20553.323207450354 }, { "content": "const browserify = require('browserify');\n\nconst gulp = require('gulp');\n\nconst log = require('gulplog');\n\nconst plumber = require('gulp-plumber');\n\nconst source = require('vinyl-source-stream');\n\nconst buffer = require(\"vinyl-buffer\");\n\nconst uglify = require(\"gulp-uglify\");\n\nconst postcss = require(\"gulp-postcss\");\n\nconst cssnano = require(\"cssnano\");\n\nconst imagemin = require('gulp-imagemin');\n\nconst del = require('del');\n\n\n\nconst config = {\n\n entries: [\n\n './app/js/main.js'\n\n ]\n\n};\n\n\n\nfunction javascript(cb) {\n\n browserify(config)\n\n .transform('babelify', { presets: [\"@babel/preset-env\"] } )\n\n .bundle()\n\n .on('error', log.error)\n\n .pipe(source('quakes.js'))\n\n .pipe(plumber())\n\n .pipe(buffer())\n\n .pipe(uglify())\n\n .pipe(gulp.dest('../static/quakes'));\n\n\n\n cb();\n\n}\n\n\n\nexports.javascript = javascript;\n\n\n\nfunction css(cb) {\n\n gulp.src(\"app/css/*\")\n\n .pipe(plumber())\n\n .pipe( postcss([cssnano()] ) )\n\n .pipe(gulp.dest(\"../static/quakes\"))\n\n .on('error', log.error);\n\n\n\n cb();\n\n}\n\n\n\nexports.css = css;\n\n\n\nfunction image(cb) {\n\n gulp.src('app/images/*')\n\n .pipe(plumber())\n\n .pipe(imagemin())\n\n .pipe(gulp.dest('../static/quakes'))\n\n .on('error', log.error);\n\n\n\n cb();\n\n}\n\n\n\nexports.build = gulp.parallel(javascript, css, image);\n\n\n\nexports.watch = function() {\n\n gulp.watch('app/css/*.css', css);\n\n gulp.watch('app/images/*', image);\n\n gulp.watch('app/js/*.js', gulp.series(clean, javascript));\n\n};\n\n\n\nfunction clean(cb) {\n\n del.sync('../static/quakes/*');\n\n cb();\n\n}\n\n\n", "file_path": "server/client/gulpfile.js", "rank": 50, "score": 19470.125780658997 }, { "content": "import QuakeMap from \"./common.js\";\n\n\n\nclass HistoryMap extends QuakeMap {\n\n\n\n constructor(mapId, sidebarId, listId) {\n\n super(mapId, sidebarId);\n\n\n\n this.list = document.getElementById(listId);\n\n\n\n this.map.spin(true);\n\n\n\n this.initialized = false;\n\n }\n\n\n\n static radius(magnitude, depth) {\n\n let size = Math.ceil(Math.exp(magnitude) / depth);\n\n if(size < 5) size = 5;\n\n return size\n\n }\n\n\n\n static historyMarkers(json) {\n\n return L.timeline(json, {\n\n pointToLayer: function(feature, latlng) {\n\n if (feature.properties && feature.properties.magnitude && feature.properties.depth) {\n\n return HistoryMap.quakeMarker(latlng, feature.properties.magnitude, feature.properties.depth);\n\n }\n\n },\n\n onEachFeature: HistoryMap.markerPopup\n\n });\n\n };\n\n\n\n static updateList(layer, map, list) {\n\n\n\n const displayed = layer.getLayers().sort(function(a,b) {\n\n const first = moment(a.feature.properties.datetime);\n\n const second = moment(b.feature.properties.datetime);\n\n return first - second;\n\n });\n\n list.innerHTML = \"\";\n\n\n\n displayed.forEach(function(quake){\n\n const layerId = layer.getLayerId(quake);\n\n const props = quake.feature.properties;\n\n const inBounds = map.getBounds().contains({lat: props.latitude, lng: props.longitude});\n\n if( inBounds ) {\n\n const newItem = document.createElement('li');\n\n newItem.className = \"quake-show\";\n\n newItem.setAttribute(\"data-layer-id\", layerId);\n\n newItem.innerHTML = HistoryMap.quakeListItemHtml(props);\n\n\n\n newItem.onclick = function(e) {\n\n map.flyTo(quake.getLatLng(), 14);\n\n map.once('moveend', function() {\n\n quake.openPopup();\n\n });\n\n };\n\n\n\n list.prepend(newItem);\n\n }\n\n });\n\n }\n\n\n\n load(json) {\n\n\n\n let timelineControl = L.timelineSliderControl({\n\n formatOutput: function(date){\n\n return moment(date).format(\"YYYY-MM-DD HH:MM:SS\");\n\n },\n\n steps: 4000,\n\n duration: 80000,\n\n position: \"bottomright\"\n\n });\n\n\n\n let timeline = HistoryMap.historyMarkers(json);\n\n\n\n const map = this.map;\n\n const list = this.list;\n\n timeline.on('change', function(e){\n\n HistoryMap.updateList(e.target, map, list);\n\n });\n\n\n\n map.on(\"moveend\", function (e) {\n\n console.log(\"Event: \", e);\n\n HistoryMap.updateList(timeline, map, list);\n\n });\n\n\n\n timelineControl.addTo(map);\n\n timelineControl.addTimelines(timeline);\n\n\n\n timeline.addTo(map);\n\n\n\n this.layer = timeline;\n\n this.control = timelineControl;\n\n\n\n this.map.spin(false);\n\n this.initialized = true;\n\n }\n\n\n\n clear() {\n\n if(this.initialized) {\n\n this.control.removeTimelines();\n\n this.map.removeControl(this.control);\n\n this.control = {};\n\n this.layer.clearLayers();\n\n this.list.innerHTML = \"\";\n\n }\n\n }\n\n}\n\n\n\nexport default HistoryMap;\n", "file_path": "server/client/app/js/history.js", "rank": 51, "score": 18495.385409901923 }, { "content": "!function(t,e){if(\"object\"==typeof exports&&\"object\"==typeof module)module.exports=e();else if(\"function\"==typeof define&&define.amd)define([],e);else{var n=e();for(var i in n)(\"object\"==typeof exports?exports:t)[i]=n[i]}}(this,function(){return function(t){function e(i){if(n[i])return n[i].exports;var r=n[i]={i:i,l:!1,exports:{}};return t[i].call(r.exports,r,r.exports,e),r.l=!0,r.exports}var n={};return e.m=t,e.c=n,e.d=function(t,n,i){e.o(t,n)||Object.defineProperty(t,n,{configurable:!1,enumerable:!0,get:i})},e.n=function(t){var n=t&&t.__esModule?function(){return t.default}:function(){return t};return e.d(n,\"a\",n),n},e.o=function(t,e){return Object.prototype.hasOwnProperty.call(t,e)},e.p=\"\",e(e.s=0)}([function(t,e,n){\"use strict\";L.TimelineVersion=\"1.0.0-beta\",n(1),n(3),n(4)},function(t,e,n){\"use strict\";var i=function(t){return t&&t.__esModule?t:{default:t}}(n(2));L.Timeline=L.GeoJSON.extend({times:null,ranges:null,initialize:function(t){var e=this,n=arguments.length>1&&void 0!==arguments[1]?arguments[1]:{};this.times=[],this.ranges=new i.default;L.GeoJSON.prototype.initialize.call(this,null,n),L.Util.setOptions(this,{drawOnSetTime:!0}),L.Util.setOptions(this,n),this.options.getInterval&&(this._getInterval=function(){var t;return(t=e.options).getInterval.apply(t,arguments)}),t&&this._process(t)},_getInterval:function(t){var e=\"start\"in t.properties,n=\"end\"in t.properties;return!(!e||!n)&&{start:new Date(t.properties.start).getTime(),end:new Date(t.properties.end).getTime()}},_process:function(t){var e=this,n=1/0,i=-1/0;t.features.forEach(function(t){var r=e._getInterval(t);r&&(e.ranges.insert(r.start,r.end,t),e.times.push(r.start),e.times.push(r.end),n=Math.min(n,r.start),i=Math.max(i,r.end))}),this.start=this.options.start||n,this.end=this.options.end||i,this.time=this.start,0!==this.times.length&&(this.times.sort(function(t,e){return t-e}),this.times=this.times.reduce(function(t,e,n){return 0===n?t:(t[t.length-1]!==e&&t.push(e),t)},[this.times[0]]))},setTime:function(t){this.time=\"number\"==typeof t?t:new Date(t).getTime(),this.options.drawOnSetTime&&this.updateDisplayedLayers(),this.fire(\"change\")},updateDisplayedLayers:function(){for(var t=this,e=this.ranges.lookup(this.time),n=0;n<this.getLayers().length;n++){for(var i=!1,r=this.getLayers()[n],o=0;o<e.length;o++)if(r.feature===e[o]){i=!0,e.splice(o,1);break}if(!i){var s=this.getLayers()[n--];this.removeLayer(s)}}e.forEach(function(e){return t.addData(e)})}}),L.timeline=function(t,e){return new L.Timeline(t,e)}},function(t,e,n){\"use strict\";function i(t){if(Array.isArray(t)){for(var e=0,n=Array(t.length);e<t.length;e++)n[e]=t[e];return n}return Array.from(t)}function r(t,e){if(!(t instanceof e))throw new TypeError(\"Cannot call a class as a function\")}Object.defineProperty(e,\"__esModule\",{value:!0});var o=function(){function t(t,e){for(var n=0;n<e.length;n++){var i=e[n];i.enumerable=i.enumerable||!1,i.configurable=!0,\"value\"in i&&(i.writable=!0),Object.defineProperty(t,i.key,i)}}return function(e,n,i){return n&&t(e.prototype,n),i&&t(e,i),e}}(),s=function t(e,n,i,o){r(this,t),this.low=e,this.high=n,this.min=e,this.max=n,this.data=i,this.left=null,this.right=null,this.parent=o},a=function(){function t(){r(this,t),this._root=null,this.size=0}return o(t,[{key:\"_insert\",value:function(t,e,n,i,r,o){var a=void 0;if(null===i)a=new s(t,e,n,r),null===r?this._root=a:r[o]=a;else{var l=t<i.low||t===i.low&&e<i.high?\"left\":\"right\";a=this._insert(t,e,n,i[l],i,l),i.max=Math.max(i.max,a.max),i.min=Math.min(i.min,a.min)}return a}},{key:\"insert\",value:function(t,e,n){this._insert(t,e,n,this._root,this._root),this.size++}},{key:\"lookup\",value:function(t){var e=[],n=this._root;return 2===arguments.length&&(n=arguments[1]),null===n||n.max<t?e:(e.push.apply(e,i(this.lookup(t,n.left))),n.low<=t&&(n.high>=t&&e.push(n.data),e.push.apply(e,i(this.lookup(t,n.right)))),e)}},{key:\"overlap\",value:function(t,e){var n=[],r=this._root;return 3===arguments.length&&(r=arguments[2]),t>r.high||r.low>e||n.push(r.data),r.left&&r.left.max>=t&&n.push.apply(n,i(this.overlap(t,e,r.left))),r.right&&r.right.min<=e&&n.push.apply(n,i(this.overlap(t,e,r.right))),n}}]),t}();e.default=a},function(t,e,n){\"use strict\";function i(t){if(Array.isArray(t)){for(var e=0,n=Array(t.length);e<t.length;e++)n[e]=t[e];return n}return Array.from(t)}L.TimelineSliderControl=L.Control.extend({initialize:function(){var t=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{};this.timelines=[],L.Util.setOptions(this,{duration:1e4,enableKeyboardControls:!1,enablePlayback:!0,formatOutput:function(t){return\"\"+(t||\"\")},showTicks:!0,waitToUpdateMap:!1,position:\"bottomleft\",steps:1e3,autoPlay:!1}),L.Util.setOptions(this,t),void 0!==t.start&&(this.start=t.start),void 0!==t.end&&(this.end=t.end)},_getTimes:function(){var t=this,e=[];if(this.timelines.forEach(function(n){var r=n.times.filter(function(e){return e>=t.start&&e<=t.end});e.push.apply(e,i(r))}),e.length){e.sort(function(t,e){return t-e});var n=[e[0]];return e.reduce(function(t,e){return t!==e&&n.push(e),e}),n}return e},_recalculate:function(){var t=void 0!==this.options.start,e=void 0!==this.options.end,n=this.options.duration,i=1/0,r=-1/0;this.timelines.forEach(function(t){t.start<i&&(i=t.start),t.end>r&&(r=t.end)}),t||(this.start=i,this._timeSlider.min=i===1/0?0:i,this._timeSlider.value=this._timeSlider.min),e||(this.end=r,this._timeSlider.max=r===-1/0?0:r),this._stepSize=Math.max(1,(this.end-this.start)/this.options.steps),this._stepDuration=Math.max(1,n/this.options.steps)},_nearestEventTime:function(t){for(var e=arguments.length>1&&void 0!==arguments[1]?arguments[1]:0,n=this._getTimes(),i=!1,r=n[0],o=1;o<n.length;o++){var s=n[o];if(i)return s;if(s>=t){if(-1===e)return r;if(s!==t)return s;i=!0}r=s}return r},_createDOM:function(){var t=L.DomUtil.create(\"div\",[\"leaflet-control-layers\",\"leaflet-control-layers-expanded\",\"leaflet-timeline-control\"].join(\" \"));if(this.container=t,this.options.enablePlayback){var e=L.DomUtil.create(\"div\",\"sldr-ctrl-container\",t),n=L.DomUtil.create(\"div\",\"button-container\",e);this._makeButtons(n),this.options.enableKeyboardControls&&this._addKeyListeners(),this._makeOutput(e)}this._makeSlider(t),this.options.showTicks&&this._buildDataList(t),this.options.autoPlay&&this._autoPlay()},_autoPlay:function(){var t=this;\"loading\"==document.readyState?window.addEventListener(\"load\",function(e){return t._autoPlay()}):this.play()},_addKeyListeners:function(){var t=this;this._listener=function(){return t._onKeydown.apply(t,arguments)},document.addEventListener(\"keydown\",this._listener)},_removeKeyListeners:function(){document.removeEventListener(\"keydown\",this._listener)},_buildDataList:function(t){this._datalist=L.DomUtil.create(\"datalist\",\"\",t);var e=Math.floor(1e6*Math.random());this._datalist.id=\"timeline-datalist-\"+e,this._timeSlider.setAttribute(\"list\",this._datalist.id),this._rebuildDataList()},_rebuildDataList:function(){for(var t=this._datalist;t.firstChild;)t.removeChild(t.firstChild);var e=L.DomUtil.create(\"select\",\"\",this._datalist);this._getTimes().forEach(function(t){L.DomUtil.create(\"option\",\"\",e).value=t})},_makeButton:function(t,e){var n=this,i=L.DomUtil.create(\"button\",e,t);i.addEventListener(\"click\",function(){return n[e]()}),L.DomEvent.disableClickPropagation(i)},_makeButtons:function(t){this._makeButton(t,\"prev\"),this._makeButton(t,\"play\"),this._makeButton(t,\"pause\"),this._makeButton(t,\"next\")},_disableMapDragging:function(){this.map.dragging.disable()},_enableMapDragging:function(){this.map.dragging.enable()},_makeSlider:function(t){var e=L.DomUtil.create(\"input\",\"time-slider\",t);e.type=\"range\",e.min=this.start||0,e.max=this.end||0,e.value=this.start||0,this._timeSlider=e,L.DomEvent.on(this._timeSlider,\"change input\",this._sliderChanged,this),L.DomEvent.on(this._timeSlider,\"pointerdown mousedown touchstart\",this._disableMapDragging,this),L.DomEvent.on(document,\"pointerup mouseup touchend\",this._enableMapDragging,this)},_makeOutput:function(t){this._output=L.DomUtil.create(\"output\",\"time-text\",t),this._output.innerHTML=this.options.formatOutput(this.start)},_onKeydown:function(t){switch(t.keyCode||t.which){case 37:this.prev();break;case 39:this.next();break;case 32:this.toggle();break;default:return}t.preventDefault()},_sliderChanged:function(t){var e=parseFloat(+t.target.value,10);this.time=e,this.options.waitToUpdateMap&&\"change\"!==t.type||this.timelines.forEach(function(t){return t.setTime(e)}),this._output&&(this._output.innerHTML=this.options.formatOutput(e))},_resetIfTimelinesChanged:function(t){this.timelines.length!==t&&(this._recalculate(),this.options.showTicks&&this._rebuildDataList(),this.setTime(this.start))},addTimelines:function(){var t=this;this.pause();for(var e=this.timelines.length,n=arguments.length,i=Array(n),r=0;r<n;r++)i[r]=arguments[r];i.forEach(function(e){-1===t.timelines.indexOf(e)&&t.timelines.push(e)}),this._resetIfTimelinesChanged(e)},removeTimelines:function(){var t=this;this.pause();for(var e=this.timelines.length,n=arguments.length,i=Array(n),r=0;r<n;r++)i[r]=arguments[r];i.forEach(function(e){var n=t.timelines.indexOf(e);-1!==n&&t.timelines.splice(n,1)}),this._resetIfTimelinesChanged(e)},toggle:function(){this._playing?this.pause():this.play()},prev:function(){this.pause();var t=this._nearestEventTime(this.time,-1);this._timeSlider.value=t,this.setTime(t)},pause:function(t){clearTimeout(this._timer),this._playing=!1,this.container.classList.remove(\"playing\"),this.syncedControl&&!t&&this.syncedControl.map(function(t){t.pause(!0)})},play:function(t){var e=this;clearTimeout(this._timer),parseFloat(this._timeSlider.value,10)===this.end&&(this._timeSlider.value=this.start),this._timeSlider.value=parseFloat(this._timeSlider.value,10)+this._stepSize,this.setTime(this._timeSlider.value),parseFloat(this._timeSlider.value,10)===this.end?(this._playing=!1,this.container.classList.remove(\"playing\")):(this._playing=!0,this.container.classList.add(\"playing\"),this._timer=setTimeout(function(){return e.play(!0)},this._stepDuration)),this.syncedControl&&!t&&this.syncedControl.map(function(t){t.play(!0)})},next:function(){this.pause();var t=this._nearestEventTime(this.time,1);this._timeSlider.value=t,this.setTime(t)},setTime:function(t){this._timeSlider&&(this._timeSlider.value=+t),this._sliderChanged({type:\"change\",target:{value:t}})},onAdd:function(t){return this.map=t,this._createDOM(),this.setTime(this.start),this.container},onRemove:function(){this.options.enableKeyboardControls&&this._removeKeyListeners(),L.DomEvent.off(this._timeSlider,\"change input\",this._sliderChanged,this),L.DomEvent.off(this._timeSlider,\"pointerdown mousedown touchstart\",this._disableMapDragging,this),L.DomEvent.off(document,\"pointerup mouseup touchend\",this._enableMapDragging,this),this._enableMapDragging()},syncControl:function(t){this.syncedControl||(this.syncedControl=[]),this.syncedControl.push(syncedControl)}}),L.timelineSliderControl=function(t,e,n,i){return new L.TimelineSliderControl(t,e,n,i)}},function(t,e,n){var i=n(5);\"string\"==typeof i&&(i=[[t.i,i,\"\"]]);var r={hmr:!0};r.transform=void 0;n(7)(i,r);i.locals&&(t.exports=i.locals)},function(t,e,n){(t.exports=n(6)(void 0)).push([t.i,'.leaflet-control.leaflet-timeline-control{width:96%;box-sizing:border-box;margin:2%;margin-bottom:20px;text-align:center}.leaflet-control.leaflet-timeline-control *{vertical-align:middle}.leaflet-control.leaflet-timeline-control input[type=range]{width:80%}.leaflet-control.leaflet-timeline-control .sldr-ctrl-container{float:left;width:15%;box-sizing:border-box}.leaflet-control.leaflet-timeline-control .button-container button{position:relative;width:20%;height:20px}.leaflet-control.leaflet-timeline-control .button-container button:after,.leaflet-control.leaflet-timeline-control .button-container button:before{content:\"\";position:absolute}.leaflet-control.leaflet-timeline-control .button-container button.play:before{border:7px solid transparent;border-width:7px 0 7px 10px;border-left-color:#000;margin-top:-7px;background:transparent;margin-left:-5px}.leaflet-control.leaflet-timeline-control .button-container button.pause{display:none}.leaflet-control.leaflet-timeline-control .button-container button.pause:before{width:4px;height:14px;border:4px solid #000;border-width:0 4px;margin-top:-7px;margin-left:-6px;background:transparent}.leaflet-control.leaflet-timeline-control .button-container button.prev:after,.leaflet-control.leaflet-timeline-control .button-container button.prev:before{margin:-8px 0 0;background:#000}.leaflet-control.leaflet-timeline-control .button-container button.prev:before{width:2px;height:14px;margin-top:-7px;margin-left:-7px}.leaflet-control.leaflet-timeline-control .button-container button.prev:after{border:7px solid transparent;border-width:7px 10px 7px 0;border-right-color:#000;margin-top:-7px;margin-left:-5px;background:transparent}.leaflet-control.leaflet-timeline-control .button-container button.next:after,.leaflet-control.leaflet-timeline-control .button-container button.next:before{margin:-8px 0 0;background:#000}.leaflet-control.leaflet-timeline-control .button-container button.next:before{width:2px;height:14px;margin-top:-7px;margin-left:5px}.leaflet-control.leaflet-timeline-control .button-container button.next:after{border:7px solid transparent;border-width:7px 0 7px 10px;border-left-color:#000;margin-top:-7px;margin-left:-5px;background:transparent}.leaflet-control.leaflet-timeline-control.playing button.pause{display:inline-block}.leaflet-control.leaflet-timeline-control.playing button.play{display:none}',\"\"])},function(t,e){function n(t,e){var n=t[1]||\"\",r=t[3];if(!r)return n;if(e&&\"function\"==typeof btoa){var o=i(r),s=r.sources.map(function(t){return\"/*# sourceURL=\"+r.sourceRoot+t+\" */\"});return[n].concat(s).concat([o]).join(\"\\n\")}return[n].join(\"\\n\")}function i(t){return\"/*# \"+(\"sourceMappingURL=data:application/json;charset=utf-8;base64,\"+btoa(unescape(encodeURIComponent(JSON.stringify(t)))))+\" */\"}t.exports=function(t){var e=[];return e.toString=function(){return this.map(function(e){var i=n(e,t);return e[2]?\"@media \"+e[2]+\"{\"+i+\"}\":i}).join(\"\")},e.i=function(t,n){\"string\"==typeof t&&(t=[[null,t,\"\"]]);for(var i={},r=0;r<this.length;r++){var o=this[r][0];\"number\"==typeof o&&(i[o]=!0)}for(r=0;r<t.length;r++){var s=t[r];\"number\"==typeof s[0]&&i[s[0]]||(n&&!s[2]?s[2]=n:n&&(s[2]=\"(\"+s[2]+\") and (\"+n+\")\"),e.push(s))}},e}},function(t,e,n){function i(t,e){for(var n=0;n<t.length;n++){var i=t[n],r=d[i.id];if(r){r.refs++;for(s=0;s<r.parts.length;s++)r.parts[s](i.parts[s]);for(;s<i.parts.length;s++)r.parts.push(c(i.parts[s],e))}else{for(var o=[],s=0;s<i.parts.length;s++)o.push(c(i.parts[s],e));d[i.id]={id:i.id,refs:1,parts:o}}}}function r(t,e){for(var n=[],i={},r=0;r<t.length;r++){var o=t[r],s=e.base?o[0]+e.base:o[0],a={css:o[1],media:o[2],sourceMap:o[3]};i[s]?i[s].parts.push(a):n.push(i[s]={id:s,parts:[a]})}return n}function o(t,e){var n=v(t.insertInto);if(!n)throw new Error(\"Couldn't find a style target. This probably means that the value for the 'insertInto' parameter is invalid.\");var i=y[y.length-1];if(\"top\"===t.insertAt)i?i.nextSibling?n.insertBefore(e,i.nextSibling):n.appendChild(e):n.insertBefore(e,n.firstChild),y.push(e);else if(\"bottom\"===t.insertAt)n.appendChild(e);else{if(\"object\"!=typeof t.insertAt||!t.insertAt.before)throw new Error(\"[Style Loader]\\n\\n Invalid value for parameter 'insertAt' ('options.insertAt') found.\\n Must be 'top', 'bottom', or Object.\\n (https://github.com/webpack-contrib/style-loader#insertat)\\n\");var r=v(t.insertInto+\" \"+t.insertAt.before);n.insertBefore(e,r)}}function s(t){if(null===t.parentNode)return!1;t.parentNode.removeChild(t);var e=y.indexOf(t);e>=0&&y.splice(e,1)}function a(t){var e=document.createElement(\"style\");return t.attrs.type=\"text/css\",u(e,t.attrs),o(t,e),e}function l(t){var e=document.createElement(\"link\");return t.attrs.type=\"text/css\",t.attrs.rel=\"stylesheet\",u(e,t.attrs),o(t,e),e}function u(t,e){Object.keys(e).forEach(function(n){t.setAttribute(n,e[n])})}function c(t,e){var n,i,r,o;if(e.transform&&t.css){if(!(o=e.transform(t.css)))return function(){};t.css=o}if(e.singleton){var u=b++;n=g||(g=a(e)),i=h.bind(null,n,u,!1),r=h.bind(null,n,u,!0)}else t.sourceMap&&\"function\"==typeof URL&&\"function\"==typeof URL.createObjectURL&&\"function\"==typeof URL.revokeObjectURL&&\"function\"==typeof Blob&&\"function\"==typeof btoa?(n=l(e),i=p.bind(null,n,e),r=function(){s(n),n.href&&URL.revokeObjectURL(n.href)}):(n=a(e),i=f.bind(null,n),r=function(){s(n)});return i(t),function(e){if(e){if(e.css===t.css&&e.media===t.media&&e.sourceMap===t.sourceMap)return;i(t=e)}else r()}}function h(t,e,n,i){var r=n?\"\":i.css;if(t.styleSheet)t.styleSheet.cssText=x(e,r);else{var o=document.createTextNode(r),s=t.childNodes;s[e]&&t.removeChild(s[e]),s.length?t.insertBefore(o,s[e]):t.appendChild(o)}}function f(t,e){var n=e.css,i=e.media;if(i&&t.setAttribute(\"media\",i),t.styleSheet)t.styleSheet.cssText=n;else{for(;t.firstChild;)t.removeChild(t.firstChild);t.appendChild(document.createTextNode(n))}}function p(t,e,n){var i=n.css,r=n.sourceMap,o=void 0===e.convertToAbsoluteUrls&&r;(e.convertToAbsoluteUrls||o)&&(i=_(i)),r&&(i+=\"\\n/*# sourceMappingURL=data:application/json;base64,\"+btoa(unescape(encodeURIComponent(JSON.stringify(r))))+\" */\");var s=new Blob([i],{type:\"text/css\"}),a=t.href;t.href=URL.createObjectURL(s),a&&URL.revokeObjectURL(a)}var d={},m=function(t){var e;return function(){return void 0===e&&(e=t.apply(this,arguments)),e}}(function(){return window&&document&&document.all&&!window.atob}),v=function(t){var e={};return function(n){if(void 0===e[n]){var i=t.call(this,n);if(i instanceof window.HTMLIFrameElement)try{i=i.contentDocument.head}catch(t){i=null}e[n]=i}return e[n]}}(function(t){return document.querySelector(t)}),g=null,b=0,y=[],_=n(8);t.exports=function(t,e){if(\"undefined\"!=typeof DEBUG&&DEBUG&&\"object\"!=typeof document)throw new Error(\"The style-loader cannot be used in a non-browser environment\");(e=e||{}).attrs=\"object\"==typeof e.attrs?e.attrs:{},e.singleton||(e.singleton=m()),e.insertInto||(e.insertInto=\"head\"),e.insertAt||(e.insertAt=\"bottom\");var n=r(t,e);return i(n,e),function(t){for(var o=[],s=0;s<n.length;s++){var a=n[s];(l=d[a.id]).refs--,o.push(l)}t&&i(r(t,e),e);for(s=0;s<o.length;s++){var l=o[s];if(0===l.refs){for(var u=0;u<l.parts.length;u++)l.parts[u]();delete d[l.id]}}}};var x=function(){var t=[];return function(e,n){return t[e]=n,t.filter(Boolean).join(\"\\n\")}}()},function(t,e){t.exports=function(t){var e=\"undefined\"!=typeof window&&window.location;if(!e)throw new Error(\"fixUrls requires window.location\");if(!t||\"string\"!=typeof t)return t;var n=e.protocol+\"//\"+e.host,i=n+e.pathname.replace(/\\/[^\\/]*$/,\"/\");return t.replace(/url\\s*\\(((?:[^)(]|\\((?:[^)(]+|\\([^)(]*\\))*\\))*)\\)/gi,function(t,e){var r=e.trim().replace(/^\"(.*)\"$/,function(t,e){return e}).replace(/^'(.*)'$/,function(t,e){return e});if(/^(#|data:|http:\\/\\/|https:\\/\\/|file:\\/\\/\\/)/i.test(r))return t;var o;return o=0===r.indexOf(\"//\")?r:0===r.indexOf(\"/\")?n+r:i+r.replace(/^\\.\\//,\"\"),\"url(\"+JSON.stringify(o)+\")\"})}}])});", "file_path": "server/static/timeline/leaflet.timeline.js", "rank": 52, "score": 18495.385409901923 }, { "content": "class QuakeMap {\n\n\n\n constructor(mapId, sidebarId) {\n\n this.map = L.map(mapId, {\n\n center: [15.5, 120.91],\n\n zoom: 7,\n\n maxZoom: 18,\n\n zoomControl: false\n\n });\n\n\n\n const mapboxUrl = 'https://api.mapbox.com/styles/v1/{id}/tiles/{z}/{x}/{y}?access_token={accessToken}';\n\n\n\n const mapboxConfig = {\n\n attribution: 'Map data &copy; <a href=\"https://www.openstreetmap.org/\">OpenStreetMap</a> contributors, <a href=\"https://creativecommons.org/licenses/by-sa/2.0/\">CC-BY-SA</a>, Imagery © <a href=\"https://www.mapbox.com/\">Mapbox</a>',\n\n id: 'mapbox/outdoors-v11',\n\n accessToken: 'pk.eyJ1IjoiZmRlYW50b25pIiwiYSI6ImNrNWhhOHlueTAxcHAzZHA3Nnd1MDhveWkifQ.kTW32UkDDmHFl9MGhnNrbw',\n\n tileSize: 512,\n\n zoomOffset: -1\n\n };\n\n\n\n L.tileLayer(mapboxUrl, mapboxConfig).addTo(this.map);\n\n\n\n L.control.sidebar(sidebarId).addTo(this.map);\n\n\n\n L.control.zoom({\n\n position: 'topright'\n\n }).addTo(this.map);\n\n }\n\n\n\n static radius(magnitude, depth) {\n\n let size = Math.ceil(Math.exp(magnitude) / depth);\n\n if(size < 5) {\n\n size = 5;\n\n }\n\n // TODO: Find a better way to do this!\n\n if (depth < 2) {\n\n if (magnitude > 8) {\n\n size = 140;\n\n } else if (magnitude > 7) {\n\n size = 120;\n\n } else if (magnitude > 6) {\n\n size = 100;\n\n } else if (magnitude > 5) {\n\n size = 80;\n\n }\n\n }\n\n return size\n\n }\n\n\n\n static quakeMarker(latlng, magnitude, depth) {\n\n return new L.circleMarker(latlng, {\n\n className: \"marker-fade-in\",\n\n radius: QuakeMap.radius(magnitude, depth),\n\n fillColor: \"#ff3b33\",\n\n color: \"#ff3b33\",\n\n weight: 1,\n\n fillOpacity: 0.6\n\n });\n\n }\n\n\n\n static markerPopup(feature, layer) {\n\n if (feature.properties) {\n\n const props = feature.properties;\n\n const header = '<h3>' + props.province + ' ' + props.longitude + ', ' + props.latitude + '</h3>';\n\n const details = '<ul style=\"list-style-type:none;padding-left: 0;\">' +\n\n '<li><b>Magnitude: </b>' + props.magnitude + '</li>' +\n\n '<li><b>Depth: </b>' + props.depth + '</li>' +\n\n '<li><b>Location: </b>' + props.location + '</li>' +\n\n '<li><b>Timestamp: </b>' + props.datetime + '</li>' +\n\n '<li><b>Source: </b><a href=\"'+ props.url +'\" target=\"_blank\">phivolcs</li>' +\n\n '</ul>';\n\n layer.bindPopup(header + details);\n\n }\n\n }\n\n\n\n static quakeListItemHtml(props) {\n\n return '<div class=\"quake-container\">' +\n\n '<span class=\"quake-magnitude\">' + props.magnitude + '</span>' +\n\n '<h1 class=\"quake-location\">' + props.province + '</h1>' +\n\n '<h2 class=\"quake-timestamp\">' + moment(props.start).tz('Asia/Manila').format() + '</h2>' +\n\n '<aside class=\"quake-aside\">' + props.depth + ' km</aside>' +\n\n '</div>';\n\n }\n\n\n\n get leafletMap() {\n\n return this.map;\n\n }\n\n}\n\n\n", "file_path": "server/client/app/js/common.js", "rank": 53, "score": 18495.385409901923 }, { "content": "\"use strict\";\n\n\n\nimport QuakeMap from \"./common.js\";\n\n\n\nclass CurrentMap extends QuakeMap {\n\n\n\n constructor(mapId, sidebarId, listId) {\n\n super(mapId, sidebarId);\n\n\n\n this.list = document.getElementById(listId);\n\n\n\n this.map.spin(true);\n\n this.initialized = false;\n\n }\n\n\n\n static currentMarkers(json) {\n\n return L.geoJSON(json, {\n\n pointToLayer: function(feature, latlng) {\n\n if (feature.properties) {\n\n return CurrentMap.quakeMarker(latlng, feature.properties.magnitude, feature.properties.depth);\n\n }\n\n },\n\n onEachFeature: CurrentMap.markerPopup\n\n });\n\n }\n\n\n\n static filterOld(json) {\n\n const horizon = moment().utc().subtract(24, 'hours');\n\n const filtered = json.features.filter(function(item) {\n\n return moment.utc(item.properties.datetime).isAfter(horizon);\n\n });\n\n return {\n\n type: \"FeatureCollection\",\n\n features: filtered\n\n };\n\n }\n\n\n\n static updateList(layer, map, list) {\n\n\n\n const displayed = layer.getLayers().sort(function(a,b) {\n\n const first = moment(a.feature.properties.datetime);\n\n const second = moment(b.feature.properties.datetime);\n\n return first - second;\n\n });\n\n displayed.forEach(function(quake){\n\n const layerId = layer.getLayerId(quake);\n\n const props = quake.feature.properties;\n\n const newItem = document.createElement('li');\n\n newItem.setAttribute(\"data-layer-id\", layerId);\n\n newItem.innerHTML = CurrentMap.quakeListItemHtml(props);\n\n\n\n newItem.onclick = function(e) {\n\n map.flyTo(quake.getLatLng(), 10);\n\n map.once('moveend', function() {\n\n quake.openPopup();\n\n });\n\n };\n\n\n\n list.prepend(newItem);\n\n\n\n setTimeout(function() {\n\n newItem.className = newItem.className + \"quake-show\";\n\n }, 50);\n\n\n\n });\n\n\n\n if(displayed.length > 0) {\n\n const latest = displayed[displayed.length - 1];\n\n map.flyTo(latest.getLatLng(), 7);\n\n }\n\n }\n\n\n\n static clusterIcon(size, text) {\n\n const style = \"width: \" + size + \"px; height: \" + size + \"px; line-height: \" + size + \"px;\";\n\n const html = '<div class=\"quakes-marker-icon\" style=\"'+ style +'\"><b>' + text + '</b></div>';\n\n return L.divIcon({ html: html, className: 'quakes-cluster', iconSize: L.point(size, size) });\n\n }\n\n\n\n add(json) {\n\n let latest = CurrentMap.filterOld(json);\n\n let markers = CurrentMap.currentMarkers(latest);\n\n\n\n if(!this.initialized) {\n\n let cluster = L.markerClusterGroup({\n\n maxClusterRadius: function (zoom) {\n\n return (zoom <= 6) ? 80 : 1; // radius in pixels\n\n },\n\n iconCreateFunction: function(cluster) {\n\n const size = Math.log(cluster.getChildCount())*40;\n\n return CurrentMap.clusterIcon(size, cluster.getChildCount());\n\n }\n\n });\n\n cluster.addLayer(markers);\n\n this.map.addLayer(cluster);\n\n\n\n CurrentMap.updateList(cluster, this.map, this.list);\n\n\n\n this.layer = cluster;\n\n this.map.spin(false);\n\n this.initialized = true;\n\n } else {\n\n this.layer.addLayers(markers);\n\n this.layer.refreshClusters(markers);\n\n\n\n CurrentMap.updateList(markers, this.map, this.list);\n\n }\n\n }\n\n\n\n clear() {\n\n this.layer.clearLayers();\n\n this.list.innerHTML = \"\";\n\n }\n\n}\n\n\n\nexport default CurrentMap;\n\n\n\n\n", "file_path": "server/client/app/js/current.js", "rank": 54, "score": 18495.385409901923 }, { "content": "import CurrentMap from \"./current\";\n\nimport HistoryMap from \"./history\"\n\n\n\nconst current = new CurrentMap(\"current-map\", \"sidebar\", \"current-list\");\n\nconst history = new HistoryMap(\"history-map\", \"sidebar\", \"history-list\");\n\n\n\nlet reconnected = false;\n\nlet geojson = {\n\n features: [],\n\n type: \"FeatureCollection\"\n\n};\n\n\n\nconst qa = document.getElementsByClassName(\"quake-alert\")[0];\n\nconst tweet = document.getElementById(\"quake-tweet\");\n\nfunction showTweet(id) {\n\n tweet.innerHTML = \"\";\n\n qa.style.display = 'none';\n\n var options = { conversation: 'none' };\n\n if(window.innerWidth < 800) {\n\n options['cards'] = 'hidden';\n\n }\n\n twttr.widgets.createTweet(id, tweet, options );\n\n setTimeout(function() {\n\n qa.style.display = 'inline';\n\n qa.classList.add(\"quake-alert-show\");\n\n setTimeout(function() {\n\n qa.classList.remove(\"quake-alert-show\");\n\n setTimeout(function() {\n\n qa.style.display = 'none';\n\n },1000);\n\n }, 4000);\n\n }, 1000);\n\n}\n\n\n\nfunction showLastQuake(feature) {\n\n console.log(feature);\n\n const source = feature.properties.source;\n\n if(source.startsWith(\"https://twitter.com/\")) {\n\n const id = source.split(\"/\").pop();\n\n showTweet(id);\n\n }\n\n}\n\n\n\nfunction add(json) {\n\n geojson.features = geojson.features.concat(json.features).sort(function(a,b) {\n\n const first = moment(a.properties.datetime);\n\n const second = moment(b.properties.datetime);\n\n return first - second;\n\n });\n\n showLastQuake(geojson.features.slice(-1)[0]);\n\n current.add(json);\n\n}\n\n\n\nfunction display(show, hide) {\n\n document.getElementById(show + \"-map\").classList.remove(\"hide\");\n\n document.getElementById(show + \"-list\").classList.remove(\"hide\");\n\n document.getElementById(hide + \"-map\").classList.add(\"hide\");\n\n document.getElementById(hide + \"-list\").classList.add(\"hide\");\n\n const header = document.getElementById(\"list-header\");\n\n if(show === \"history\") {\n\n header.innerHTML = header.innerHTML.replace(\"24h\", \"History\");\n\n history.clear();\n\n history.leafletMap._onResize();\n\n history.load(geojson);\n\n } else {\n\n header.innerHTML = header.innerHTML.replace(\"History\", \"24h\");\n\n current.leafletMap._onResize();\n\n }\n\n}\n\n\n\ndocument.getElementById(\"history-toggle\").onclick = function() {\n\n this.classList.add(\"hide\");\n\n display(\"history\", \"current\");\n\n document.getElementById(\"current-toggle\").classList.remove(\"hide\");\n\n};\n\n\n\ndocument.getElementById(\"current-toggle\").onclick = function() {\n\n this.classList.add(\"hide\");\n\n display(\"current\", \"history\");\n\n document.getElementById(\"history-toggle\").classList.remove(\"hide\");\n\n};\n\n\n\nfunction connect() {\n\n const wsUri = (window.location.protocol==='https:'&&'wss://'||'ws://') + window.location.host + '/ws/';\n\n let ws = new WebSocket(wsUri);\n\n\n\n ws.onopen = function() {\n\n // TODO: set config here.\n\n };\n\n\n\n ws.onmessage = function(event) {\n\n const json = JSON.parse(event.data);\n\n console.log('New quakes ', json);\n\n\n\n if(reconnected) current.clear();\n\n add(json);\n\n reconnected = false;\n\n };\n\n\n\n ws.onclose = function(e) {\n\n console.log('Socket is closed. Reconnect will be attempted in 10 seconds.', e.reason);\n\n setTimeout(function() {\n\n reconnected = true;\n\n connect();\n\n }, 10000);\n\n };\n\n\n\n ws.onerror = function(err) {\n\n console.error('Socket encountered error: ', err.message, 'Closing socket');\n\n ws.close();\n\n };\n\n}\n\n\n\nconnect();\n\n\n\n\n", "file_path": "server/client/app/js/main.js", "rank": 55, "score": 18495.385409901923 }, { "content": "\n\n fn get_source(screen_name: String, id: u64) -> String {\n\n format!(\"https://twitter.com/{}/status/{}\", screen_name, id)\n\n }\n\n\n\n pub(crate) async fn get_quakes(&self) -> Result<Vec<Quake>, TwitterError> {\n\n let mut quakes: Vec<Quake> = Vec::new();\n\n\n\n for tweet in self.0.clone() {\n\n\n\n if tweet.get_text().contains(\"Earthquake Information\") {\n\n let row = Self::parse_text(tweet.get_text());\n\n\n\n let datetime = Self::get_datetime(&row)?;\n\n let longitude = Self::get_longitude(&row)?;\n\n let latitude = Self::get_latitude(&row)?;\n\n let magnitude = Self::get_magnitude(&row)?;\n\n let depth = Self::get_depth(&row)?;\n\n let location = Self::get_location(&row)?;\n\n let province = Self::get_province(&row)?;\n", "file_path": "twitter/src/parser.rs", "rank": 63, "score": 33.91188772436668 }, { "content": " fn get_url(row: &HashMap<String, String>) -> Result<String, ScraperError> {\n\n let text = row.get(\"url\");\n\n if text.is_some() {\n\n Ok(text.unwrap().clone())\n\n } else {\n\n Err(ScraperError::new(\"URL not found in row!\"))\n\n }\n\n }\n\n\n\n fn process_row(&self, row: &Row) -> Result<Quake, ScraperError> {\n\n let datetime = Self::get_datetime(&row)?;\n\n let longitude = Self::get_longitude(&row)?;\n\n let latitude = Self::get_latitude(&row)?;\n\n let magnitude = Self::get_magnitude(&row)?;\n\n let depth = Self::get_depth(&row)?;\n\n let location = Self::get_location(&row)?;\n\n let province = Self::get_province(&row)?;\n\n let url = Self::get_url(&row)?;\n\n let source = self.1.clone();\n\n\n", "file_path": "scraper/src/parser.rs", "rank": 64, "score": 33.86297335704621 }, { "content": " let quake = Quake::new(\n\n datetime,\n\n longitude,\n\n latitude,\n\n magnitude,\n\n depth,\n\n location,\n\n province,\n\n url,\n\n source\n\n );\n\n Ok(quake)\n\n }\n\n\n\n pub async fn get_quakes(&self) -> Result<Vec<Quake>, ScraperError> {\n\n let mut quakes = Vec::new();\n\n\n\n for row in self.0.clone() {\n\n\n\n match self.process_row(&row) {\n", "file_path": "scraper/src/parser.rs", "rank": 65, "score": 31.943849951711236 }, { "content": " fn parse_text(text: String) -> HashMap<String, String> {\n\n let strings: Vec<&str> = text.split(\"\\n\").collect();\n\n let mut map: HashMap<String, String> = HashMap::new();\n\n for string in strings {\n\n if let Some(datetime) = Self::capture(string.to_string(), \"Date and Time: \".to_string()) {\n\n map.insert(\"datetime\".to_string(), format!(\"{} +08\", datetime));\n\n };\n\n if let Some(magnitude) = Self::capture(string.to_string(), \"Magnitude = \".to_string()) {\n\n map.insert(\"magnitude\".to_string(), magnitude);\n\n };\n\n if let Some(depth) = Self::capture(string.to_string(), \"Depth = \".to_string()) {\n\n if let Some(pos) = depth.rfind(\" kilometers\") {\n\n let stripped = &depth[0..pos];\n\n map.insert(\"depth\".to_string(), stripped.to_string());\n\n }\n\n };\n\n if let Some(location) = Self::capture(string.to_string(), \"Location = \".to_string()) {\n\n let parts: Vec<&str> = location.split(\" - \").collect();\n\n if parts.len() == 2 {\n\n let (latitude, longitude) = Self::find_latlng(parts[0].to_string());\n", "file_path": "twitter/src/parser.rs", "rank": 67, "score": 29.054811621904058 }, { "content": " let url = Self::get_url(tweet.get_url())?;\n\n let source = Self::get_source(tweet.get_screen_name(), tweet.get_tweet_id());\n\n\n\n let quake = Quake::new(\n\n datetime,\n\n longitude,\n\n latitude,\n\n magnitude,\n\n depth,\n\n location,\n\n province,\n\n url,\n\n source\n\n );\n\n quakes.push(quake);\n\n } else {\n\n debug!(\"Tweet {} missing #Earthquake tag, skipping:\\n{:#?}\", &tweet.get_tweet_id(), &tweet);\n\n }\n\n }\n\n\n", "file_path": "twitter/src/parser.rs", "rank": 68, "score": 28.67677287438159 }, { "content": " let url = td.select(&url_selector).last().unwrap().value().attr(\"href\").unwrap_or(\"\");\n\n let url = url.replace(\"\\\\\", \"/\");\n\n row.insert(\"url\".to_string(), format!(\"{}{}\", source_url, url).to_string());\n\n let text = Self::sanitize_text(td.text())\n\n .last().cloned().unwrap_or(\"error\");\n\n row.insert(\"Date - Time\".to_string(), format!(\"{} +08\", text));\n\n } else if header.eq(\"Location\") {\n\n let text: String = Self::sanitize_text(td.text()).join(\" \");\n\n let (location, province) = Self::find_province(text);\n\n row.insert(\"Location\".to_string(), location);\n\n row.insert(\"Province\".to_string(), province);\n\n } else if header.eq(\"Mag\") {\n\n let text = Self::sanitize_text(td.text())\n\n .last().cloned().unwrap_or(\"error\");\n\n row.insert(header, text.to_string());\n\n } else {\n\n row.insert(header, td.inner_html().trim().to_string());\n\n }\n\n }\n\n if !row.is_empty() {\n", "file_path": "scraper/src/parser.rs", "rank": 70, "score": 27.56632349488761 }, { "content": " let (location, province) = Self::find_province(parts[1].to_string());\n\n map.insert(\"longitude\".to_string(), longitude);\n\n map.insert(\"latitude\".to_string(), latitude);\n\n map.insert(\"location\".to_string(), location);\n\n map.insert(\"province\".to_string(), province);\n\n }\n\n };\n\n }\n\n map\n\n }\n\n\n\n fn get_datetime(row: &HashMap<String, String>) -> Result<DateTime<Utc>, TwitterError> {\n\n let text = row.get(\"datetime\");\n\n if text.is_some() {\n\n let value = text.unwrap();\n\n let datetime: DateTime<FixedOffset> = DateTime::parse_from_str(value, DATETIME_FORMAT)\n\n .map_err(|error|{\n\n TwitterError::new(format!(\"Trouble converting {} to timestamp: {}\", value, error.to_string()))\n\n })?;\n\n let utc: DateTime<Utc> = DateTime::from(datetime);\n", "file_path": "twitter/src/parser.rs", "rank": 71, "score": 27.491989592503916 }, { "content": " }\n\n }\n\n\n\n fn find_province(text: String) -> (String, String) {\n\n Quake::find_province(text)\n\n }\n\n\n\n fn find_latlng(text: String) -> (String, String) {\n\n let parts: Vec<&str> = text.split(\", \").collect();\n\n if parts.len() == 2 {\n\n let mut lat = parts[0].to_string();\n\n lat.truncate(lat.len() - 1);\n\n let mut lng = parts[1].to_string();\n\n lng.truncate(lng.len() - 1);\n\n (lat, lng)\n\n } else {\n\n (\"\".to_string(), \"\".to_string())\n\n }\n\n }\n\n\n", "file_path": "twitter/src/parser.rs", "rank": 73, "score": 23.852405369157424 }, { "content": " })\n\n } else {\n\n Err(ScraperError::new(\"Longitude not found in row!\"))\n\n }\n\n }\n\n\n\n fn get_latitude(row: &HashMap<String, String>) -> Result<f64, ScraperError> {\n\n let text = row.get(\"Latitude\");\n\n if text.is_some() {\n\n let value = text.unwrap();\n\n value.parse::<f64>().map_err(|error| {\n\n ScraperError::new(format!(\"Trouble converting {} to Latitude (f64): {}\", value, error.to_string()))\n\n })\n\n } else {\n\n Err(ScraperError::new(\"Latitude not found in row!\"))\n\n }\n\n }\n\n\n\n fn get_magnitude(row: &HashMap<String, String>) -> Result<f64, ScraperError> {\n\n let text = row.get(\"Mag\");\n", "file_path": "scraper/src/parser.rs", "rank": 74, "score": 22.516627848316354 }, { "content": " if text.is_some() {\n\n let value = text.unwrap();\n\n value.parse::<f64>().map_err(|error| {\n\n ScraperError::new(format!(\"Trouble converting {} to Magnitude (f64): {}\", value, error.to_string()))\n\n })\n\n } else {\n\n Err(ScraperError::new(\"Mag not found in row!\"))\n\n }\n\n }\n\n\n\n fn get_depth(row: &HashMap<String, String>) -> Result<u16, ScraperError> {\n\n let text = row.get(\"Depth\");\n\n if text.is_some() {\n\n let value = text.unwrap();\n\n value.parse::<u16>().map_err(|error| {\n\n ScraperError::new(format!(\"Trouble converting {} to Depth (i8): {}\", value, error.to_string()))\n\n })\n\n } else {\n\n Err(ScraperError::new(\"Depth not found in row!\"))\n\n }\n", "file_path": "scraper/src/parser.rs", "rank": 77, "score": 21.916264907512762 }, { "content": "#[derive(Debug,Clone,Deserialize)]\n\npub(crate) struct Tweet {\n\n created_at: String,\n\n id: u64,\n\n full_text: String,\n\n entities: Entities,\n\n user: User,\n\n}\n\n\n\nimpl Tweet {\n\n pub fn get_text(&self) -> String {\n\n self.full_text.clone()\n\n }\n\n pub fn get_url(&self) -> Option<String> {\n\n self.entities.urls.last().into_iter().map(|url| url.expanded_url.clone()).last()\n\n }\n\n pub fn get_tweet_id(&self) -> u64 {\n\n self.id\n\n }\n\n pub fn get_screen_name(&self) -> String {\n", "file_path": "twitter/src/client.rs", "rank": 78, "score": 21.82955768824133 }, { "content": " let text = row.get(\"Date - Time\");\n\n if text.is_some() {\n\n let value = text.unwrap();\n\n let datetime: DateTime<FixedOffset> = DateTime::parse_from_str(value, DATETIME_FORMAT)\n\n .map_err(|error|{\n\n ScraperError::new(format!(\"Trouble converting {} to timestamp: {}\", value, error.to_string()))\n\n })?;\n\n let utc: DateTime<Utc> = DateTime::from(datetime);\n\n Ok(utc)\n\n } else {\n\n Err(ScraperError::new(\"Date - Time not found in row!\"))\n\n }\n\n }\n\n\n\n fn get_longitude(row: &HashMap<String, String>) -> Result<f64, ScraperError> {\n\n let text = row.get(\"Longitude\");\n\n if text.is_some() {\n\n let value = text.unwrap();\n\n value.parse::<f64>().map_err(|error| {\n\n ScraperError::new(format!(\"Trouble converting {} to Longitude (f64): {}\", value, error.to_string()))\n", "file_path": "scraper/src/parser.rs", "rank": 79, "score": 21.417877747244233 }, { "content": " Ok(utc)\n\n } else {\n\n Err(TwitterError::new(\"Date - Time not found in tweet text!\"))\n\n }\n\n }\n\n\n\n fn get_longitude(row: &HashMap<String, String>) -> Result<f64, TwitterError> {\n\n let text = row.get(\"longitude\");\n\n if text.is_some() {\n\n let value = text.unwrap();\n\n value.parse::<f64>().map_err(|error| {\n\n TwitterError::new(format!(\"Trouble converting {} to f64: {}\", value, error.to_string()))\n\n })\n\n } else {\n\n Err(TwitterError::new(\"Longitude not found in tweet text!\"))\n\n }\n\n }\n\n\n\n fn get_latitude(row: &HashMap<String, String>) -> Result<f64, TwitterError> {\n\n let text = row.get(\"latitude\");\n", "file_path": "twitter/src/parser.rs", "rank": 80, "score": 21.185702251227895 }, { "content": " }\n\n\n\n fn get_depth(row: &HashMap<String, String>) -> Result<u16, TwitterError> {\n\n let text = row.get(\"depth\");\n\n if text.is_some() {\n\n let value = text.unwrap();\n\n value.parse::<u16>().map_err(|error| {\n\n TwitterError::new(format!(\"Trouble converting {} to i8: {}\", value, error.to_string()))\n\n })\n\n } else {\n\n Err(TwitterError::new(\"Depth not found in tweet text!\"))\n\n }\n\n }\n\n\n\n fn get_location(row: &HashMap<String, String>) -> Result<String, TwitterError> {\n\n let text = row.get(\"location\");\n\n if text.is_some() {\n\n Ok(text.unwrap().clone())\n\n } else {\n\n Err(TwitterError::new(\"Location not found in tweet text!\"))\n", "file_path": "twitter/src/parser.rs", "rank": 81, "score": 20.940772380713383 }, { "content": " \"#;\n\n\n\n #[actix_rt::test]\n\n async fn parse_html() {\n\n let parser = HtmlParser::parse(HTML.to_string(), \"http://www.example.com/\".to_string()).await;\n\n let quakes = parser.get_quakes().await.unwrap();\n\n println!(\"{:?}\", &quakes);\n\n assert_eq!(quakes[0].get_magnitude(), 4.0);\n\n }\n\n\n\n #[test]\n\n fn datetime_parsing() {\n\n let mut row: Row = Row::new();\n\n let header = \"Date - Time\".to_string();\n\n let dt_string = \"22 January 2020 - 12:40 AM +08\".to_string();\n\n row.insert(header, dt_string);\n\n let dt = HtmlParser::get_datetime(&row).unwrap();\n\n println!(\"{:?}\", dt);\n\n }\n\n\n\n #[test]\n\n fn parse_province() {\n\n let text = \"10km N 47° E of Cabanglasan (Bukidnon)\".to_string();\n\n let (location, province) = HtmlParser::find_province(text);\n\n println!(\"{:?}\", location);\n\n println!(\"{:?}\", province);\n\n }\n\n\n\n}", "file_path": "scraper/src/parser.rs", "rank": 83, "score": 20.882418561574266 }, { "content": " collection.push(row);\n\n }\n\n };\n\n }\n\n });\n\n HtmlParser(collection, source_url)\n\n }\n\n\n\n fn sanitize_text(text: Text<'_>) -> Vec<&str> {\n\n text\n\n .into_iter().map(|string| string.trim() )\n\n .filter(|string| !string.is_empty() )\n\n .collect()\n\n }\n\n\n\n fn find_province(text: String) -> (String, String) {\n\n Quake::find_province(text)\n\n }\n\n\n\n fn get_datetime(row: &HashMap<String, String>) -> Result<DateTime<Utc>, ScraperError> {\n", "file_path": "scraper/src/parser.rs", "rank": 84, "score": 20.6983897210626 }, { "content": "use log::*;\n\nuse crate::client::Tweet;\n\nuse quakes_api::*;\n\nuse std::collections::HashMap;\n\nuse crate::TwitterError;\n\n\n\nconst DATETIME_FORMAT: &str = \"%d %B %Y - %I:%M %p %#z\";\n\n\n\npub(crate) struct TweetParser(Vec<Tweet>);\n\n\n\nimpl TweetParser {\n\n\n\n fn capture(string: String, prefix: String) -> Option<String> {\n\n let string_len = string.len();\n\n let prefix_len = prefix.len();\n\n if string_len > prefix_len && string.starts_with(&prefix) {\n\n let stripped = &string[prefix.len()..string.len()];\n\n Some(stripped.trim().to_string())\n\n } else {\n\n None\n", "file_path": "twitter/src/parser.rs", "rank": 85, "score": 19.936826427195765 }, { "content": " Ok(quakes)\n\n }\n\n\n\n pub fn new(tweets: Vec<Tweet>) -> Self {\n\n TweetParser(tweets)\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n const TWEET_TEXT: &str = \"#EarthquakePH #EarthquakeSarangani\\nEarthquake Information No.1\\nDate and Time: 24 Jan 2020 - 07:21 AM\\nMagnitude = 2.3\\nDepth = 026 kilometers\\nLocation = 06.44N, 125.22E - 019 km N 11° W of Malungon (Sarangani)\\n\\nhttps://t.co/LzMZu5Gb5t\";\n\n\n\n #[test]\n\n fn parse_tweet_text() {\n\n let data = TweetParser::parse_text(TWEET_TEXT.to_string());\n\n println!(\"{:#?}\", data);\n\n }\n\n\n", "file_path": "twitter/src/parser.rs", "rank": 86, "score": 19.91492237786987 }, { "content": " println!(\"{:?}\", token);\n\n }\n\n\n\n #[actix_rt::test] #[ignore]\n\n async fn retrieve_timeline() {\n\n\n\n let mut client = init();\n\n let screen_name = \"phivolcs_dost\".to_string();\n\n let tweets = client.timeline(screen_name, None).await.unwrap();\n\n println!(\"{:#?}\", tweets);\n\n assert!(tweets.len() > 0);\n\n println!(\"Text: {:?}\", tweets[0].get_text());\n\n println!(\"Url: {:?}\", tweets[0].get_url());\n\n }\n\n\n\n const TWEET_TEXT: &str = \"#EarthquakePH #EarthquakeSarangani\\nEarthquake Information No.1\\nDate and Time: 24 Jan 2020 - 07:21 AM\\nMagnitude = 2.3\\nDepth = 026 kilometers\\nLocation = 06.44N, 125.22E - 019 km N 11° W of Malungon (Sarangani)\\n\\nhttps://t.co/LzMZu5Gb5t\";\n\n\n\n pub(crate) fn get_test_tweet() -> Tweet {\n\n Tweet {\n\n created_at: \"\".to_string(),\n", "file_path": "twitter/src/client.rs", "rank": 87, "score": 18.78670611029231 }, { "content": " if text.is_some() {\n\n let value = text.unwrap();\n\n value.parse::<f64>().map_err(|error| {\n\n TwitterError::new(format!(\"Trouble converting {} to f64: {}\", value, error.to_string()))\n\n })\n\n } else {\n\n Err(TwitterError::new(\"Latitude not found in tweet text!\"))\n\n }\n\n }\n\n\n\n fn get_magnitude(row: &HashMap<String, String>) -> Result<f64, TwitterError> {\n\n let text = row.get(\"magnitude\");\n\n if text.is_some() {\n\n let value = text.unwrap();\n\n value.parse::<f64>().map_err(|error| {\n\n TwitterError::new(format!(\"Trouble converting {} to f64: {}\", value, error.to_string()))\n\n })\n\n } else {\n\n Err(TwitterError::new(\"Magnitude not found in tweet text!\"))\n\n }\n", "file_path": "twitter/src/parser.rs", "rank": 88, "score": 18.708492008015718 }, { "content": "use awc;\n\nuse openssl::ssl::{SslConnector, SslMethod, SslVerifyMode};\n\nuse std::time::Duration;\n\nuse crate::ScraperError;\n\n\n\npub struct WebClient(awc::Client);\n\n\n\nimpl WebClient {\n\n\n\n fn ssl_connector() -> Result<SslConnector, std::io::Error> {\n\n let mut builder = SslConnector::builder(SslMethod::tls())?;\n\n builder.set_verify(SslVerifyMode::NONE);\n\n Ok(builder.build())\n\n }\n\n\n\n pub async fn retrieve(&self, url: String) -> Result<String, ScraperError> {\n\n let mut response = self.0.get(url)\n\n .header(\"User-Agent\", \"ph-quakes\")\n\n .send().await?;\n\n\n", "file_path": "scraper/src/client.rs", "rank": 89, "score": 17.575687582664358 }, { "content": "}\n\n\n\nimpl CacheActor {\n\n\n\n pub fn new(quakes: Vec<Quake>) -> CacheActor {\n\n let sessions = Vec::new();\n\n CacheActor { quakes, sessions }\n\n }\n\n}\n\n\n\n#[derive(Message)]\n\n#[rtype(result = \"()\")]\n\npub struct UpdateCache(pub Vec<Quake>);\n\n\n\nimpl Handler<UpdateCache> for CacheActor {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: UpdateCache, _: &mut Self::Context) -> Self::Result {\n\n let mut quakes = msg.0.clone();\n\n debug!(\"Current cache size: {}\", &self.quakes.len());\n", "file_path": "server/src/cache.rs", "rank": 90, "score": 17.343839055280174 }, { "content": " }\n\n\n\n fn get_location(row: &HashMap<String, String>) -> Result<String, ScraperError> {\n\n let text = row.get(\"Location\");\n\n if text.is_some() {\n\n Ok(text.unwrap().clone())\n\n } else {\n\n Err(ScraperError::new(\"Location not found in row!\"))\n\n }\n\n }\n\n\n\n fn get_province(row: &HashMap<String, String>) -> Result<String, ScraperError> {\n\n let text = row.get(\"Province\");\n\n if text.is_some() {\n\n Ok(text.unwrap().clone())\n\n } else {\n\n Err(ScraperError::new(\"province not found in row!\"))\n\n }\n\n }\n\n\n", "file_path": "scraper/src/parser.rs", "rank": 91, "score": 17.339096638659008 }, { "content": " async fn next(&mut self) -> Result<Vec<Quake>, TwitterError> {\n\n let screen_name = PHIVOLCS_SCREEN_NAME.to_string();\n\n let last_tweet_id = Some(self.last_tweet_id);\n\n let tweets = self.client.timeline(screen_name, last_tweet_id).await?;\n\n self.process(tweets).await\n\n }\n\n\n\n pub async fn get_tweets(&mut self) -> Result<Vec<Quake>, TwitterError> {\n\n if !self.has_started() {\n\n self.start().await\n\n } else {\n\n self.next().await\n\n }\n\n }\n\n\n\n pub fn new(key: String, secret: String) -> Self {\n\n let url = TWITTER_URL.to_string();\n\n let client = TwitterClient::new(url, key, secret);\n\n let last_tweet_id: u64 = 0;\n\n TwitterQuakes {\n", "file_path": "twitter/src/lib.rs", "rank": 92, "score": 16.786282654337562 }, { "content": "pub mod client;\n\npub mod parser;\n\n\n\nuse awc;\n\nuse std::borrow::Cow;\n\nuse std::str::Utf8Error;\n\nuse quakes_api::*;\n\nuse crate::client::{TwitterClient, Tweet};\n\nuse crate::parser::TweetParser;\n\n\n\nconst PHIVOLCS_SCREEN_NAME: &str = \"phivolcs_dost\";\n\nconst TWITTER_URL: &str = \"https://api.twitter.com\";\n\n\n\npub struct TwitterQuakes {\n\n client: TwitterClient,\n\n last_tweet_id: u64\n\n}\n\n\n\nimpl TwitterQuakes {\n\n\n", "file_path": "twitter/src/lib.rs", "rank": 93, "score": 16.759609066728473 }, { "content": "use log::*;\n\nuse awc;\n\nuse openssl::ssl::{SslConnector, SslMethod};\n\nuse std::time::Duration;\n\nuse serde_derive::*;\n\nuse crate::TwitterError;\n\n\n\n#[derive(Clone)]\n\npub(crate) struct TwitterClient {\n\n url: String,\n\n client: awc::Client,\n\n key: String,\n\n secret: String,\n\n token: String,\n\n}\n\n\n\n#[derive(Debug,Deserialize)]\n", "file_path": "twitter/src/client.rs", "rank": 94, "score": 16.405880844278443 }, { "content": "\n\n ctx.ping(b\"\");\n\n });\n\n }\n\n}\n\n\n\n#[derive(Message)]\n\n#[rtype(result = \"()\")]\n\npub struct CacheUpdates(pub QuakeList);\n\n\n\nimpl Handler<CacheUpdates> for WsActor {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: CacheUpdates, ctx: &mut Self::Context) -> Self::Result {\n\n debug!(\"Received new quakes to send to client...\");\n\n let quakes = msg.0.to_geojson();\n\n ctx.text(quakes.to_string());\n\n }\n\n}\n\n\n\npub(crate) async fn index(r: HttpRequest, stream: web::Payload, cache: web::Data<Addr<cache::CacheActor>>) -> Result<HttpResponse, Error> {\n\n debug!(\"{:?}\", r);\n\n let cache_addr= cache.get_ref().clone();\n\n let res = ws::start(WsActor::new(cache_addr), &r, stream);\n\n debug!(\"{:?}\", res);\n\n res\n\n}\n\n\n", "file_path": "server/src/websocket.rs", "rank": 95, "score": 16.39660980336551 }, { "content": " }\n\n }\n\n\n\n fn get_province(row: &HashMap<String, String>) -> Result<String, TwitterError> {\n\n let text = row.get(\"province\");\n\n if text.is_some() {\n\n Ok(text.unwrap().clone())\n\n } else {\n\n Err(TwitterError::new(\"province not found in tweet text!\"))\n\n }\n\n }\n\n\n\n fn get_url(text: Option<String>) -> Result<String, TwitterError> {\n\n if text.is_some() {\n\n let url = text.unwrap().replace(\"http://\", \"https://\");\n\n Ok(url)\n\n } else {\n\n Err(TwitterError::new(\"URL not found in tweet!\"))\n\n }\n\n }\n", "file_path": "twitter/src/parser.rs", "rank": 96, "score": 16.105800925915105 }, { "content": " set.extend(history);\n\n let mut quakes: Vec<Quake> = set.into_iter().collect();\n\n quakes.sort();\n\n Ok(quakes)\n\n}\n\n\n\n#[derive(Clone, Debug, PartialEq, Eq)]\n\npub struct ScraperError {\n\n description: Cow<'static, str>,\n\n}\n\n\n\nimpl ScraperError {\n\n pub fn new<S>(description: S) -> Self\n\n where\n\n S: Into<Cow<'static, str>>,\n\n {\n\n ScraperError {\n\n description: description.into(),\n\n }\n\n }\n", "file_path": "scraper/src/lib.rs", "rank": 97, "score": 16.02009561947573 }, { "content": "pub mod client;\n\npub mod parser;\n\n\n\nuse awc;\n\nuse std::borrow::Cow;\n\nuse std::str::Utf8Error;\n\nuse std::collections::HashSet;\n\n\n\nuse quakes_api::*;\n\nuse crate::client::WebClient;\n\nuse crate::parser::HtmlParser;\n\n\n\n\n\nstatic PHIVOLCS_URL: &str = \"https://earthquake.phivolcs.dost.gov.ph/\";\n\n\n\nasync fn retrieve_previous_month(client: &WebClient) -> Result<Vec<Quake>, ScraperError> {\n\n let current = Utc::today();\n\n let horizon = Utc::today() - time::Duration::weeks(4);\n\n if horizon.month() < current.month() {\n\n let url = format!(\"{}{}.html\", PHIVOLCS_URL, horizon.format(\"%Y_%B\"));\n", "file_path": "scraper/src/lib.rs", "rank": 98, "score": 15.408591598825263 }, { "content": " type Result = GetQuakesResponse;\n\n\n\n fn handle(&mut self, _: GetQuakes, _: &mut Self::Context) -> Self::Result {\n\n let quakes = QuakeList::new(self.quakes.clone());\n\n GetQuakesResponse(quakes)\n\n }\n\n}\n\n\n\n\n\n#[derive(Message)]\n\n#[rtype(result = \"()\")]\n\npub struct Connect {\n\n pub addr: Recipient<websocket::CacheUpdates>,\n\n}\n\n\n\nimpl Handler<Connect> for CacheActor {\n\n type Result = ();\n\n\n\n fn handle(&mut self, msg: Connect, _: &mut Self::Context) -> Self::Result {\n\n debug!(\"New websocket connection to cache!\");\n", "file_path": "server/src/cache.rs", "rank": 99, "score": 15.248578657287707 } ]
Rust
src/voxel_tools/mesh_builder.rs
TanTanDev/first_voxel_engine
1cb19a85fdba285e478eb97819dc762753c6c5e9
use crate::rendering::gpu_resources::GpuResources; use super::{ chunk, chunks::{adjacent_voxels, Chunks}, }; use super::{ direction::Direction, quad::Quad, rendering::voxel_vertex::VoxelVertex, voxel::Voxel, }; use wgpu::util::DeviceExt; pub fn build_chunk_mesh( chunks: &mut Chunks, device: &wgpu::Device, gpu_resources: &mut GpuResources, chunk_pos: &cgmath::Vector3<i32>, chunk_world_pos: &cgmath::Vector3<f32>, ) -> bool { let chunk_size = chunk::SIZE as i32; let mut quads = Vec::<Quad>::new(); for x in 0..chunk_size { for y in 0..chunk_size { for z in 0..chunk_size { let voxel_pos_local = cgmath::Vector3::<f32>::new(x as f32, y as f32, z as f32); let voxel_pos_world = chunk_world_pos + voxel_pos_local; if let Ok((voxel, back, left, down)) = adjacent_voxels(chunks, (x, y, z), chunk_pos) { process_voxel(&voxel, voxel_pos_world, &left, &down, &back, &mut quads); } } } } if quads.is_empty() { } let mut voxel_vertices = Vec::<VoxelVertex>::new(); let mut indices = Vec::<u32>::new(); let mut vert_index = 0; for quad in quads { let normal = quad.direction.get_normal(); (0..4).for_each(|index| { voxel_vertices.push(VoxelVertex { position: quad.corners[index].into(), normal: normal.into(), color_diffuse: quad.color.into(), }); }); indices.push(vert_index); indices.push(vert_index + 1); indices.push(vert_index + 2); indices.push(vert_index); indices.push(vert_index + 2); indices.push(vert_index + 3); vert_index += 4; } if let Some(chunk_mesh) = chunks.get_chunk_mesh_mut(chunk_pos) { let num_indices = indices.len() as u32; let num_vertices = voxel_vertices.len() as u32; let (v_buf, i_buf) = construct_buffers(device, voxel_vertices, indices); let v_buf = gpu_resources.buffer_arena.insert(v_buf); let i_buf = gpu_resources.buffer_arena.insert(i_buf); chunk_mesh.update_vertex_buffers(v_buf, i_buf, num_indices, num_vertices); return num_vertices != 0; } false } fn process_voxel( voxel: &Voxel, voxel_pos: cgmath::Vector3<f32>, left: &Voxel, down: &Voxel, back: &Voxel, quads: &mut Vec<Quad>, ) { match voxel.is_solid() { true => { if !left.is_solid() { quads.push(Quad::from_direction(Direction::Left, voxel_pos)); } if !down.is_solid() { quads.push(Quad::from_direction(Direction::Down, voxel_pos)); } if !back.is_solid() { quads.push(Quad::from_direction(Direction::Back, voxel_pos)); } } false => { if left.is_solid() { quads.push(Quad::from_direction(Direction::Right, voxel_pos)); } if down.is_solid() { quads.push(Quad::from_direction(Direction::Up, voxel_pos)); } if back.is_solid() { quads.push(Quad::from_direction(Direction::Forward, voxel_pos)); } } } } fn construct_buffers( device: &wgpu::Device, vertices: Vec<VoxelVertex>, indices: Vec<u32>, ) -> (wgpu::Buffer, wgpu::Buffer) { let vertex_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor { label: Some("voxel_chunk_vertices"), contents: bytemuck::cast_slice(&vertices), usage: wgpu::BufferUsage::VERTEX, }); let index_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor { label: Some("voxel_chunk_indices"), contents: bytemuck::cast_slice(&indices), usage: wgpu::BufferUsage::INDEX, }); (vertex_buffer, index_buffer) }
use crate::rendering::gpu_resources::GpuResources; use super::{ chunk, chunks::{adjacent_voxels, Chunks}, }; use super::{ direction::Direction, quad::Quad, rendering::voxel_vertex::VoxelVertex, voxel::Voxel, }; use wgpu::util::DeviceExt; pub fn build_chunk_mesh( chunks: &mut Chunks, device: &wgpu::Device, gpu_resources: &mut GpuResources, chunk_pos: &cgmath::Vector3<i32>, chunk_world_pos: &cgmath::Vector3<f32>, ) -> bool { let chunk_size = chunk::SIZE as i32; let mut quads = Vec::<Quad>::new(); for x in 0..chunk_size { for y in 0..chunk_size { for z in 0..chunk_size { let voxel_pos_local = cgmath::Vector3::<f32>::new(x as f32, y as f32, z as f32); let voxel_pos_world = chunk_world_pos + voxel_pos_local; if let Ok((voxel, back, left, down)) = adjacent_voxels(chunks, (x, y, z), chunk_pos) { process_voxel(&voxel, voxel_pos_world, &left, &down, &back, &mut quads); } } } } if quads.is_empty() { } let mut voxel_vertices = Vec::<VoxelVertex>::new(); let mut indices = Vec::<u32>::new(); let mut vert_index = 0; for quad in quads { let normal = quad.direction.get_normal(); (0..4).for_each(|index| { voxel_vertices.push(VoxelVertex { position: quad.corners[index].into(), normal: normal.into(), color_diffuse: quad.color.into(), }); }); indices.push(vert_index); indices.push(vert_index + 1); indices.push(vert_index + 2); indices.push(vert_index); indices.push(vert_index + 2); indices.push(vert_index + 3); vert_index += 4; } if let Some(chunk_mesh) = chunks.get_chunk_mesh_mut(chunk_pos) { let num_indices = indices.len() as u32; let num_vertices = voxel_vertices.len() as u32; let (v_buf, i_buf) = construct_buffers(device, voxel_vertices, indices); let v_buf = gpu_resources.buffer_arena.insert(v_buf); let i_buf = gpu_resources.buffer_arena.insert(i_buf); chunk_mesh.update_vertex_buffers(v_buf, i_buf, num_indices, num_vertices); return num_vertices != 0; } false }
fn construct_buffers( device: &wgpu::Device, vertices: Vec<VoxelVertex>, indices: Vec<u32>, ) -> (wgpu::Buffer, wgpu::Buffer) { let vertex_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor { label: Some("voxel_chunk_vertices"), contents: bytemuck::cast_slice(&vertices), usage: wgpu::BufferUsage::VERTEX, }); let index_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor { label: Some("voxel_chunk_indices"), contents: bytemuck::cast_slice(&indices), usage: wgpu::BufferUsage::INDEX, }); (vertex_buffer, index_buffer) }
fn process_voxel( voxel: &Voxel, voxel_pos: cgmath::Vector3<f32>, left: &Voxel, down: &Voxel, back: &Voxel, quads: &mut Vec<Quad>, ) { match voxel.is_solid() { true => { if !left.is_solid() { quads.push(Quad::from_direction(Direction::Left, voxel_pos)); } if !down.is_solid() { quads.push(Quad::from_direction(Direction::Down, voxel_pos)); } if !back.is_solid() { quads.push(Quad::from_direction(Direction::Back, voxel_pos)); } } false => { if left.is_solid() { quads.push(Quad::from_direction(Direction::Right, voxel_pos)); } if down.is_solid() { quads.push(Quad::from_direction(Direction::Up, voxel_pos)); } if back.is_solid() { quads.push(Quad::from_direction(Direction::Forward, voxel_pos)); } } } }
function_block-full_function
[ { "content": "pub fn adjacent_voxels<'a>(\n\n chunks: &'a mut Chunks,\n\n local_pos: (i32, i32, i32),\n\n chunk_pos: &cgmath::Vector3<i32>,\n\n) -> Result<(&'a Voxel, &'a Voxel, &'a Voxel, &'a Voxel)> {\n\n let (x, y, z) = (local_pos.0, local_pos.1, local_pos.2);\n\n let voxel = chunks\n\n .try_get_voxel(chunk_pos, &LocalCoordinate(x, y, z))\n\n .context(\"no voxel\")?;\n\n let back = chunks\n\n .try_get_voxel(chunk_pos, &LocalCoordinate(x, y, z - 1))\n\n .context(\"no back voxel\")?;\n\n let left = chunks\n\n .try_get_voxel(chunk_pos, &LocalCoordinate(x - 1, y, z))\n\n .context(\"no left voxel\")?;\n\n let down = chunks\n\n .try_get_voxel(chunk_pos, &LocalCoordinate(x, y - 1, z))\n\n .context(\"no down voxel\")?;\n\n Ok((voxel, back, left, down))\n\n}\n", "file_path": "src/voxel_tools/chunks.rs", "rank": 1, "score": 103332.2639879144 }, { "content": "pub fn draw_chunk<'a, 'b>(\n\n render_pass: &mut wgpu::RenderPass<'a>,\n\n num_indices: u32,\n\n camera_u: &'a wgpu::BindGroup,\n\n light_u: &'a wgpu::BindGroup,\n\n vertex_buffer: &'a wgpu::Buffer,\n\n index_buffer: &'a wgpu::Buffer,\n\n) -> Result<()> {\n\n render_pass.set_vertex_buffer(0, vertex_buffer.slice(..));\n\n render_pass.set_index_buffer(index_buffer.slice(..), wgpu::IndexFormat::Uint32);\n\n render_pass.set_bind_group(0, &camera_u, &[]);\n\n render_pass.set_bind_group(1, &light_u, &[]);\n\n render_pass.draw_indexed(0..num_indices, 0, 0..1);\n\n Ok(())\n\n}\n", "file_path": "src/voxel_tools/rendering/voxel_rendering.rs", "rank": 2, "score": 95453.57007586875 }, { "content": "pub fn create_light_bind_group_layout(device: &wgpu::Device) -> wgpu::BindGroupLayout {\n\n device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {\n\n label: Some(\"light_bind_group_layout\"),\n\n entries: &[wgpu::BindGroupLayoutEntry {\n\n binding: 0,\n\n visibility: wgpu::ShaderStage::VERTEX | wgpu::ShaderStage::FRAGMENT,\n\n ty: wgpu::BindingType::Buffer {\n\n ty: wgpu::BufferBindingType::Uniform,\n\n has_dynamic_offset: false,\n\n min_binding_size: None,\n\n },\n\n count: None,\n\n }],\n\n })\n\n}\n", "file_path": "src/light.rs", "rank": 3, "score": 87534.08631506619 }, { "content": "// less params exposed resulting in shorter code\n\npub fn create_pipeline_layout(\n\n device: &wgpu::Device,\n\n label: &str,\n\n bind_group_layouts: &[&BindGroupLayout],\n\n) -> wgpu::PipelineLayout {\n\n device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {\n\n label: Some(label),\n\n bind_group_layouts,\n\n push_constant_ranges: &[],\n\n })\n\n}\n", "file_path": "src/rendering/render_utils.rs", "rank": 4, "score": 79668.90028119663 }, { "content": "pub fn create_render_pipeline(\n\n device: &wgpu::Device,\n\n layout: &wgpu::PipelineLayout,\n\n color_format: wgpu::TextureFormat,\n\n depth_format: Option<wgpu::TextureFormat>,\n\n vertex_layouts: &[wgpu::VertexBufferLayout],\n\n shader_module: wgpu::ShaderModule,\n\n label: &str,\n\n) -> wgpu::RenderPipeline {\n\n let render_pipeline = device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {\n\n label: Some(label),\n\n layout: Some(layout),\n\n vertex: wgpu::VertexState {\n\n module: &shader_module,\n\n entry_point: \"vs_main\",\n\n buffers: vertex_layouts,\n\n },\n\n fragment: Some(wgpu::FragmentState {\n\n module: &shader_module,\n\n entry_point: \"fs_main\",\n", "file_path": "src/rendering/render_utils.rs", "rank": 5, "score": 79668.90028119663 }, { "content": "pub fn create_shader_module(device: &wgpu::Device, shader_str: &str, label: &str) -> ShaderModule {\n\n device.create_shader_module(&wgpu::ShaderModuleDescriptor {\n\n label: Some(label),\n\n source: wgpu::ShaderSource::Wgsl(Cow::Borrowed(shader_str)),\n\n flags: wgpu::ShaderFlags::empty(),\n\n })\n\n}\n\n\n", "file_path": "src/rendering/render_utils.rs", "rank": 6, "score": 79506.91762196209 }, { "content": "// less params exposed resulting in shorter code\n\npub fn create_bind_group_layout(\n\n device: &wgpu::Device,\n\n label: &str,\n\n binding_location: u32,\n\n visibility: wgpu::ShaderStage,\n\n) -> BindGroupLayout {\n\n device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {\n\n label: Some(label),\n\n entries: &[wgpu::BindGroupLayoutEntry {\n\n binding: binding_location,\n\n visibility,\n\n ty: wgpu::BindingType::Buffer {\n\n ty: wgpu::BufferBindingType::Uniform,\n\n has_dynamic_offset: false,\n\n min_binding_size: None,\n\n },\n\n count: None,\n\n }],\n\n })\n\n}\n\n\n", "file_path": "src/rendering/render_utils.rs", "rank": 7, "score": 78036.06256828328 }, { "content": "pub fn create_voxel_pipeline(\n\n device: &wgpu::Device,\n\n texture_format: wgpu::TextureFormat,\n\n light_bind_group_layout: &wgpu::BindGroupLayout,\n\n) -> wgpu::RenderPipeline {\n\n let visibility = wgpu::ShaderStage::VERTEX | wgpu::ShaderStage::FRAGMENT;\n\n let camera_bind_group_layout =\n\n render_utils::create_bind_group_layout(&device, \"camera_bind_layout\", 0, visibility);\n\n\n\n let shader_module = render_utils::create_shader_module(\n\n &device,\n\n include_str!(\"voxel.wgsl\"),\n\n \"voxel_shader_module\",\n\n );\n\n\n\n let bind_group_layouts = &[&camera_bind_group_layout, &light_bind_group_layout];\n\n let pipeline_layout =\n\n render_utils::create_pipeline_layout(&device, \"voxel_pipeline\", bind_group_layouts);\n\n\n\n println!(\"creating pipeline\");\n", "file_path": "src/voxel_tools/rendering/voxel_pipeline.rs", "rank": 8, "score": 76504.42611060069 }, { "content": "fn main() {\n\n env_logger::init();\n\n let event_loop = EventLoop::new();\n\n let window = WindowBuilder::new().build(&event_loop).unwrap();\n\n let mut state = block_on(State::new(&window));\n\n\n\n let mut last_render_time = std::time::Instant::now();\n\n event_loop.run(move |event, _, control_flow| match event {\n\n Event::RedrawRequested(_window_id) => {\n\n let now = std::time::Instant::now();\n\n let dt = now - last_render_time;\n\n last_render_time = now;\n\n state.update(dt);\n\n match state.render() {\n\n Ok(_) => {}\n\n // recreate swap_chain if lost\n\n Err(wgpu::SwapChainError::Lost) => state.resize(state.size),\n\n Err(wgpu::SwapChainError::OutOfMemory) => *control_flow = ControlFlow::Exit,\n\n // all other errors (Outdated, Timeout) should be resolved by the next frame\n\n Err(e) => println!(\"{:?}\", e),\n", "file_path": "src/main.rs", "rank": 9, "score": 47179.900553264924 }, { "content": "fn main() -> Result<()> {\n\n copy_res()?;\n\n Ok(())\n\n}\n", "file_path": "build.rs", "rank": 10, "score": 44695.50951713908 }, { "content": "// copy resource folder to OUT_DIR\n\nfn copy_res() -> Result<()> {\n\n // This tells cargo to rerun this script if something in /res/ changes.\n\n println!(\"cargo:rerun-if-changed=res/*\");\n\n\n\n let out_dir = env::var(\"OUT_DIR\")?;\n\n let out_dir = PathBuf::from(out_dir);\n\n let out_dir = out_dir\n\n .parent()\n\n .unwrap()\n\n .parent()\n\n .unwrap()\n\n .parent()\n\n .unwrap();\n\n println!(\"out dir: {:?}\", out_dir);\n\n let mut copy_options = CopyOptions::new();\n\n copy_options.overwrite = true;\n\n let mut paths_to_copy = Vec::new();\n\n paths_to_copy.push(\"res/\");\n\n copy_items(&paths_to_copy, out_dir, &copy_options)?;\n\n Ok(())\n\n}\n\n\n", "file_path": "build.rs", "rank": 11, "score": 43533.06095739597 }, { "content": "pub trait VertexDesc {\n\n fn desc<'a>() -> wgpu::VertexBufferLayout<'a>;\n\n}\n", "file_path": "src/rendering/vertex_desc.rs", "rank": 14, "score": 40891.006365080524 }, { "content": "pub trait DrawModel<'a, 'b>\n\nwhere\n\n 'b: 'a,\n\n{\n\n fn draw_mesh(\n\n &mut self,\n\n mesh: &'b Mesh,\n\n material: &'b Material,\n\n uniform: &'b wgpu::BindGroup,\n\n light: &'b wgpu::BindGroup,\n\n );\n\n fn draw_mesh_instanced(\n\n &mut self,\n\n mesh: &'b Mesh,\n\n material: &'b Material,\n\n instances: Range<u32>,\n\n uniform: &'b wgpu::BindGroup,\n\n light: &'b wgpu::BindGroup,\n\n );\n\n fn draw_model(\n", "file_path": "src/model.rs", "rank": 15, "score": 39507.364421300765 }, { "content": "pub trait DrawLight<'a, 'b>\n\nwhere\n\n 'b: 'a,\n\n{\n\n fn draw_light_mesh(\n\n &mut self,\n\n mesh: &'b Mesh,\n\n uniforms: &'b wgpu::BindGroup,\n\n light: &'b wgpu::BindGroup,\n\n );\n\n fn draw_light_mesh_instanced(\n\n &mut self,\n\n mesh: &'b Mesh,\n\n instances: Range<u32>,\n\n uniforms: &'b wgpu::BindGroup,\n\n light: &'b wgpu::BindGroup,\n\n ) where\n\n 'b: 'a;\n\n\n\n fn draw_light_model(\n", "file_path": "src/model.rs", "rank": 16, "score": 39507.364421300765 }, { "content": "use super::direction::Direction;\n\nuse crate::color::Color;\n\nuse cgmath::Vector3;\n\nuse rand::Rng;\n\n\n\npub struct Quad {\n\n pub color: Color,\n\n pub direction: Direction,\n\n // in world position\n\n pub corners: [Vector3<f32>; 4],\n\n}\n\n\n\nconst HALF_SIZE: f32 = 0.5f32;\n\n\n\nimpl Quad {\n\n pub fn from_direction(direction: Direction, pos: Vector3<f32>) -> Self {\n\n let corners = match direction {\n\n Direction::Left => [\n\n Vector3::new(pos.x - HALF_SIZE, pos.y - HALF_SIZE, pos.z - HALF_SIZE),\n\n Vector3::new(pos.x - HALF_SIZE, pos.y - HALF_SIZE, pos.z + HALF_SIZE),\n", "file_path": "src/voxel_tools/quad.rs", "rank": 17, "score": 29802.452281940907 }, { "content": " //color: Color::new(0.7f32, (1.-green_range) + rand::thread_rng().gen_range(0f32..green_range), 0.3f32, 1.),\n\n color: Color::new(\n\n rand::thread_rng().gen_range(0f32..0.1f32),\n\n (1. - green_range) + rand::thread_rng().gen_range(0f32..green_range),\n\n rand::thread_rng().gen_range(0f32..0.1f32),\n\n 1.,\n\n ),\n\n direction,\n\n }\n\n }\n\n}\n", "file_path": "src/voxel_tools/quad.rs", "rank": 18, "score": 29787.704678458533 }, { "content": " Vector3::new(pos.x - HALF_SIZE, pos.y - HALF_SIZE, pos.z - HALF_SIZE),\n\n ],\n\n Direction::Back => [\n\n Vector3::new(pos.x - HALF_SIZE, pos.y - HALF_SIZE, pos.z - HALF_SIZE),\n\n Vector3::new(pos.x - HALF_SIZE, pos.y + HALF_SIZE, pos.z - HALF_SIZE),\n\n Vector3::new(pos.x + HALF_SIZE, pos.y + HALF_SIZE, pos.z - HALF_SIZE),\n\n Vector3::new(pos.x + HALF_SIZE, pos.y - HALF_SIZE, pos.z - HALF_SIZE),\n\n ],\n\n Direction::Forward => [\n\n Vector3::new(pos.x + HALF_SIZE, pos.y - HALF_SIZE, pos.z - HALF_SIZE),\n\n Vector3::new(pos.x + HALF_SIZE, pos.y + HALF_SIZE, pos.z - HALF_SIZE),\n\n Vector3::new(pos.x - HALF_SIZE, pos.y + HALF_SIZE, pos.z - HALF_SIZE),\n\n Vector3::new(pos.x - HALF_SIZE, pos.y - HALF_SIZE, pos.z - HALF_SIZE),\n\n ],\n\n };\n\n\n\n let green_range = 0.4f32;\n\n Self {\n\n corners,\n\n //color: color::colors::GREEN,\n", "file_path": "src/voxel_tools/quad.rs", "rank": 19, "score": 29786.585335718773 }, { "content": " Vector3::new(pos.x - HALF_SIZE, pos.y + HALF_SIZE, pos.z + HALF_SIZE),\n\n Vector3::new(pos.x - HALF_SIZE, pos.y + HALF_SIZE, pos.z - HALF_SIZE),\n\n ],\n\n Direction::Right => [\n\n Vector3::new(pos.x - HALF_SIZE, pos.y + HALF_SIZE, pos.z - HALF_SIZE),\n\n Vector3::new(pos.x - HALF_SIZE, pos.y + HALF_SIZE, pos.z + HALF_SIZE),\n\n Vector3::new(pos.x - HALF_SIZE, pos.y - HALF_SIZE, pos.z + HALF_SIZE),\n\n Vector3::new(pos.x - HALF_SIZE, pos.y - HALF_SIZE, pos.z - HALF_SIZE),\n\n ],\n\n // assuming it's correct this is under i believe\n\n Direction::Down => [\n\n Vector3::new(pos.x - HALF_SIZE, pos.y - HALF_SIZE, pos.z - HALF_SIZE),\n\n Vector3::new(pos.x + HALF_SIZE, pos.y - HALF_SIZE, pos.z - HALF_SIZE),\n\n Vector3::new(pos.x + HALF_SIZE, pos.y - HALF_SIZE, pos.z + HALF_SIZE),\n\n Vector3::new(pos.x - HALF_SIZE, pos.y - HALF_SIZE, pos.z + HALF_SIZE),\n\n ],\n\n Direction::Up => [\n\n Vector3::new(pos.x - HALF_SIZE, pos.y - HALF_SIZE, pos.z + HALF_SIZE),\n\n Vector3::new(pos.x + HALF_SIZE, pos.y - HALF_SIZE, pos.z + HALF_SIZE),\n\n Vector3::new(pos.x + HALF_SIZE, pos.y - HALF_SIZE, pos.z - HALF_SIZE),\n", "file_path": "src/voxel_tools/quad.rs", "rank": 20, "score": 29783.65591587841 }, { "content": " if in_range && !adj_chunk_data_bad {\n\n self.chunk_mesh_load_queue.push_back(chunk_pos);\n\n if self.chunk_mesh_load_queue.len() >= MAX_MESH_QUEUE {\n\n return;\n\n }\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n pub fn update_unload_data_queue(&mut self) {\n\n let current_chunk_pos = cgmath::Vector3::<i32>::new(\n\n (self.position.x / SIZE as f32) as i32,\n\n (self.position.y / SIZE as f32) as i32,\n\n (self.position.z / SIZE as f32) as i32,\n\n );\n\n // find currently loaded meshes positions not contained in range\n\n // BOX BOUND CHECK IS FAST\n\n let outside = self\n", "file_path": "src/voxel_tools/chunks.rs", "rank": 21, "score": 28582.801879236085 }, { "content": " pub fn chunk_to_world(chunk_pos: cgmath::Vector3<i32>) -> cgmath::Vector3<f32> {\n\n cgmath::Vector3::<f32>::new(\n\n chunk_pos.x as f32 * SIZE as f32,\n\n chunk_pos.y as f32 * SIZE as f32,\n\n chunk_pos.z as f32 * SIZE as f32,\n\n )\n\n }\n\n\n\n pub fn in_range(&self, chunk_pos: cgmath::Vector3<i32>) -> bool {\n\n // convert from i32 postion to world f32 pos\n\n let chunk_real_pos = Self::chunk_to_world(chunk_pos);\n\n let delta = self.position - chunk_real_pos;\n\n let distance_sq: f32 = delta.magnitude2().into();\n\n let render_dist = (self.render_distance as f32) * SIZE as f32;\n\n let render_distance_sq = render_dist * render_dist;\n\n distance_sq < render_distance_sq\n\n }\n\n\n\n // based on current position load all meshes\n\n pub fn update_load_mesh_queue(&mut self) {\n", "file_path": "src/voxel_tools/chunks.rs", "rank": 22, "score": 28581.95067570911 }, { "content": " if self.chunk_data_unload_queue.len() >= MAX_DATA_UNLOAD_QUEUE {\n\n return;\n\n }\n\n }\n\n }\n\n\n\n // based on current position load all meshes\n\n pub fn update_unload_mesh_queue(&mut self) {\n\n let current_chunk_pos = cgmath::Vector3::<i32>::new(\n\n (self.position.x / SIZE as f32) as i32,\n\n (self.position.y / SIZE as f32) as i32,\n\n (self.position.z / SIZE as f32) as i32,\n\n );\n\n // find currently loaded meshes positions not contained in range\n\n // BOX BOUND CHECK IS FAST\n\n let outside = self\n\n .chunk_mesh_map\n\n .iter()\n\n .filter(|(p, _m)| {\n\n p.x < current_chunk_pos.x - self.render_distance\n", "file_path": "src/voxel_tools/chunks.rs", "rank": 23, "score": 28580.459278615563 }, { "content": "use super::voxel::Voxel;\n\n\n\n// argument-flavor struct\n\n#[derive(Debug, Clone, Copy)]\n\npub struct LocalCoordinate(pub i32, pub i32, pub i32);\n\n\n\n// dimension size\n\npub const SIZE: usize = 16;\n\n// chunk size in bits (5 = 32) (4 = 16)\n\n//pub const BIT_SIZE: i32 = 4;\n\nuse lazy_static::*;\n\nlazy_static! {\n\n // when SIZE 16, BIT_SIZE is 4\n\n // by shifting 16 << 4 we get 1\n\n // we with this get indexes from the collapsed array\n\n pub static ref BIT_SIZE: i32 = (SIZE as f32).log2() as i32;\n\n}\n\n\n\npub struct ChunkMesh {\n\n pub vertex_buffer: Option<generational_arena::Index>,\n", "file_path": "src/voxel_tools/chunk.rs", "rank": 24, "score": 28579.817443635086 }, { "content": " }\n\n\n\n pub fn get_voxel_from_index(&self, index: usize) -> Option<&Voxel> {\n\n //self.voxels.get(index).map_or(Voxel::new_empty(), |v| *v)\n\n self.voxels.get(index)\n\n }\n\n\n\n pub fn new() -> Self {\n\n let chunk = Self {\n\n voxels: [Voxel::new_empty(); SIZE * SIZE * SIZE],\n\n };\n\n chunk\n\n }\n\n\n\n pub fn build_voxel_data(&mut self, chunk_world_pos: &cgmath::Vector3<f32>) {\n\n use noise::{NoiseFn, Perlin, Seedable};\n\n let perlin = Perlin::new();\n\n perlin.set_seed(484);\n\n for (index, voxel) in self.voxels.iter_mut().enumerate() {\n\n let local_coord = Self::get_local_coordinate(index as i32);\n", "file_path": "src/voxel_tools/chunk.rs", "rank": 25, "score": 28579.10956501081 }, { "content": " return;\n\n }\n\n for y in -self.render_distance..self.render_distance {\n\n //for y in 0..1 {\n\n for z in -self.render_distance..self.render_distance {\n\n for x in -self.render_distance..self.render_distance {\n\n let current_chunk_pos = cgmath::Vector3::<i32>::new(\n\n (self.position.x / SIZE as f32) as i32,\n\n (self.position.y / SIZE as f32) as i32,\n\n (self.position.z / SIZE as f32) as i32,\n\n );\n\n let chunk_pos = current_chunk_pos + cgmath::Vector3::<i32>::new(x, y, z);\n\n\n\n // chunk is already being loaded, or is loaded\n\n let is_chunk_proccessing = self.is_chunk_processing(&chunk_pos);\n\n if is_chunk_proccessing {\n\n continue;\n\n }\n\n\n\n let in_range = self.in_range(current_chunk_pos);\n", "file_path": "src/voxel_tools/chunks.rs", "rank": 26, "score": 28577.07961524863 }, { "content": " pub index_buffer: Option<generational_arena::Index>,\n\n pub num_indices: u32,\n\n // debug info\n\n pub num_vertices: u32,\n\n}\n\n\n\nimpl ChunkMesh {\n\n pub fn new() -> Self {\n\n Self {\n\n vertex_buffer: None,\n\n index_buffer: None,\n\n num_indices: 0,\n\n num_vertices: 0,\n\n }\n\n }\n\n\n\n pub fn update_vertex_buffers(\n\n &mut self,\n\n vertex_buffer: generational_arena::Index,\n\n index_buffer: generational_arena::Index,\n", "file_path": "src/voxel_tools/chunk.rs", "rank": 27, "score": 28576.71023678165 }, { "content": " if self.chunk_mesh_map.len() >= DEFAULT_MAX_MESH_DATAS\n\n || self.chunk_mesh_load_queue.len() >= MAX_MESH_QUEUE\n\n {\n\n return;\n\n }\n\n for y in -self.render_distance..self.render_distance {\n\n //for y in 0..1 {\n\n for z in -self.render_distance..self.render_distance {\n\n for x in -self.render_distance..self.render_distance {\n\n let current_chunk_pos = cgmath::Vector3::<i32>::new(\n\n (self.position.x / SIZE as f32) as i32,\n\n (self.position.y / SIZE as f32) as i32,\n\n (self.position.z / SIZE as f32) as i32,\n\n );\n\n let chunk_pos = current_chunk_pos + cgmath::Vector3::<i32>::new(x, y, z);\n\n\n\n // chunk is already being loaded, or is loaded\n\n let is_mesh_proccessing = self.is_mesh_processing(&chunk_pos);\n\n if is_mesh_proccessing {\n\n continue;\n", "file_path": "src/voxel_tools/chunks.rs", "rank": 28, "score": 28576.466978193097 }, { "content": " pub fn get_index(coordinate: LocalCoordinate) -> usize {\n\n (coordinate.2 | (coordinate.1 << *BIT_SIZE) | (coordinate.0 << (*BIT_SIZE * 2))) as usize\n\n }\n\n\n\n pub fn get_local_coordinate(index: i32) -> LocalCoordinate {\n\n LocalCoordinate(\n\n (index as f32 / (SIZE * SIZE) as f32) as i32,\n\n ((index as f32 / SIZE as f32) % SIZE as f32) as i32,\n\n (index as f32 % SIZE as f32) as i32,\n\n )\n\n }\n\n\n\n pub fn get_voxel(&self, coordinate: LocalCoordinate) -> Option<&Voxel> {\n\n let index = Self::get_index(coordinate);\n\n self.get_voxel_from_index(index)\n\n }\n\n\n\n #[allow(dead_code)]\n\n pub fn get_voxel_from_index_mut(&mut self, index: usize) -> Option<&mut Voxel> {\n\n self.voxels.get_mut(index).map_or(None, |v| Some(v))\n", "file_path": "src/voxel_tools/chunk.rs", "rank": 29, "score": 28576.120710855448 }, { "content": " let chunk = self.chunk_data_map.get(&chunk_pos).context(\"\")?;\n\n chunk.get_voxel(local_pos).context(\"\")\n\n }\n\n\n\n pub fn get_chunk_mesh_mut(\n\n &mut self,\n\n chunk_pos: &cgmath::Vector3<i32>,\n\n ) -> Option<&mut ChunkMesh> {\n\n self.chunk_mesh_map.get_mut(chunk_pos)\n\n }\n\n\n\n pub fn build_chunk_data(\n\n &mut self,\n\n chunk_pos: cgmath::Vector3<i32>,\n\n ) {\n\n let mut chunk = self.chunk_pool.detached();\n\n let chunk_world_pos = Self::chunk_to_world(chunk_pos);\n\n\n\n chunk.build_voxel_data(&chunk_world_pos);\n\n println!(\"loaded chunk data at world pos: {:?}\", chunk_world_pos);\n", "file_path": "src/voxel_tools/chunks.rs", "rank": 30, "score": 28575.74477328133 }, { "content": " gpu_resources,\n\n &chunk_pos,\n\n &chunk_world_pos,\n\n ) {\n\n // successfully built return for now, 'only one per frame'\n\n return;\n\n }\n\n }\n\n }\n\n\n\n pub fn is_chunk_processing(&self, chunk_pos: &cgmath::Vector3<i32>) -> bool {\n\n self.chunk_data_map.contains_key(chunk_pos)\n\n || self.chunk_data_load_queue.contains(chunk_pos)\n\n }\n\n\n\n pub fn is_mesh_processing(&self, chunk_pos: &cgmath::Vector3<i32>) -> bool {\n\n self.chunk_mesh_map.contains_key(chunk_pos)\n\n || self.chunk_mesh_load_queue.contains(chunk_pos)\n\n }\n\n\n", "file_path": "src/voxel_tools/chunks.rs", "rank": 31, "score": 28575.473861103757 }, { "content": "\n\n // chunk data are put in queue due to heavy data processing\n\n chunk_data_load_queue: VecDeque<cgmath::Vector3<i32>>,\n\n chunk_mesh_load_queue: VecDeque<cgmath::Vector3<i32>>,\n\n\n\n chunk_data_unload_queue: VecDeque<cgmath::Vector3<i32>>,\n\n chunk_mesh_unload_queue: VecDeque<cgmath::Vector3<i32>>,\n\n\n\n pub position: cgmath::Vector3<f32>,\n\n\n\n render_distance: i32,\n\n}\n\n\n\nimpl Chunks {\n\n pub fn new() -> Self {\n\n let chunks = Self {\n\n chunk_data_map: HashMap::with_capacity(DEFAULT_MAX_CHUNK_DATAS),\n\n chunk_mesh_map: HashMap::with_capacity(DEFAULT_MAX_MESH_DATAS),\n\n chunk_pool: pool().with(StartingSize(DEFAULT_MAX_CHUNK_DATAS)).build(),\n\n chunk_mesh_pool: pool().with(StartingSize(DEFAULT_MAX_MESH_DATAS)).build(),\n", "file_path": "src/voxel_tools/chunks.rs", "rank": 32, "score": 28575.44376873744 }, { "content": " num_indices: u32,\n\n num_vertices: u32,\n\n ) {\n\n self.vertex_buffer = Some(vertex_buffer);\n\n self.index_buffer = Some(index_buffer);\n\n self.num_indices = num_indices;\n\n self.num_vertices = num_vertices;\n\n }\n\n}\n\n\n\nimpl lifeguard::Recycleable for ChunkMesh {\n\n fn new() -> Self {\n\n ChunkMesh::new()\n\n }\n\n\n\n fn reset(&mut self) {\n\n self.vertex_buffer = None;\n\n self.index_buffer = None;\n\n self.num_indices = 0u32;\n\n }\n", "file_path": "src/voxel_tools/chunk.rs", "rank": 33, "score": 28575.375136218034 }, { "content": " // position of chunks to load in\n\n chunk_data_load_queue: VecDeque::with_capacity(MAX_DATA_QUEUE),\n\n chunk_mesh_load_queue: VecDeque::with_capacity(MAX_MESH_QUEUE),\n\n chunk_data_unload_queue: VecDeque::with_capacity(MAX_DATA_QUEUE),\n\n chunk_mesh_unload_queue: VecDeque::with_capacity(MAX_MESH_QUEUE),\n\n position: cgmath::Vector3::<f32>::new(0., 0., 0.),\n\n render_distance: RENDER_DIST_RADIUS,\n\n };\n\n chunks\n\n }\n\n\n\n pub fn build_chunk_data_in_queue(\n\n &mut self,\n\n ) {\n\n while let Some(chunk_pos) = self.chunk_data_load_queue.pop_front() {\n\n self.build_chunk_data(chunk_pos);\n\n }\n\n }\n\n\n\n pub fn make_coords_valid(\n", "file_path": "src/voxel_tools/chunks.rs", "rank": 34, "score": 28575.30493958967 }, { "content": "}\n\n\n\npub struct Chunk {\n\n pub voxels: [Voxel; SIZE * SIZE * SIZE],\n\n}\n\n\n\nimpl lifeguard::Recycleable for Chunk {\n\n fn new() -> Self {\n\n Chunk::new()\n\n }\n\n\n\n fn reset(&mut self) {\n\n for voxel in self.voxels.iter_mut() {\n\n voxel.set_density_fraciton(0f32);\n\n }\n\n }\n\n}\n\n\n\nimpl Chunk {\n\n // convert 3d coordinate to array index\n", "file_path": "src/voxel_tools/chunk.rs", "rank": 35, "score": 28575.214397055894 }, { "content": "// max amount of per-chunk data we can load\n\npub const DEFAULT_MAX_CHUNK_DATAS: usize = 10000;\n\n// max amount of per-chunk meshes we can load\n\npub const DEFAULT_MAX_MESH_DATAS: usize = 10000;\n\npub const RENDER_DIST_RADIUS: i32 = 8;\n\n\n\npub const MAX_DATA_QUEUE: usize = 16;\n\npub const MAX_MESH_QUEUE: usize = 16;\n\n\n\npub const MAX_DATA_UNLOAD_QUEUE: usize = 16;\n\npub const MAX_MESH_UNLOAD_QUEUE: usize = 16;\n\n\n\npub struct Chunks {\n\n // chunk_map owns the current chunks, but when unloaded puts them back to chunk_pool\n\n chunk_data_map: HashMap<cgmath::Vector3<i32>, Chunk>,\n\n chunk_mesh_map: HashMap<cgmath::Vector3<i32>, ChunkMesh>,\n\n\n\n // chunk data is recycled from these pools\n\n chunk_pool: Pool<Chunk>,\n\n chunk_mesh_pool: Pool<ChunkMesh>,\n", "file_path": "src/voxel_tools/chunks.rs", "rank": 36, "score": 28574.927158673952 }, { "content": " }\n\n\n\n let in_range = self.in_range(current_chunk_pos);\n\n // check if adjacent chunks are loaded\n\n\n\n use cgmath::Vector3 as vec;\n\n // check if all adjacent chunks data are loaded\n\n let adj_chunk_data_bad = [\n\n -vec::<i32>::unit_x(),\n\n vec::<i32>::unit_x(),\n\n -vec::<i32>::unit_y(),\n\n vec::<i32>::unit_y(),\n\n -vec::<i32>::unit_z(),\n\n vec::<i32>::unit_z(),\n\n ]\n\n .iter_mut()\n\n .map(|v| *v + chunk_pos)\n\n .any(|v| !self.chunk_data_map.contains_key(&v));\n\n\n\n // queue chunk for mesh creation\n", "file_path": "src/voxel_tools/chunks.rs", "rank": 37, "score": 28574.802969024706 }, { "content": " self.chunk_data_map.insert(chunk_pos, chunk);\n\n }\n\n\n\n pub fn build_chunk_meshes_in_queue(\n\n &mut self,\n\n device: &wgpu::Device,\n\n gpu_resources: &mut GpuResources,\n\n ) {\n\n while let Some(chunk_pos) = self.chunk_mesh_load_queue.pop_front() {\n\n if self.chunk_mesh_map.len() >= DEFAULT_MAX_CHUNK_DATAS {\n\n return;\n\n }\n\n let chunk_mesh = self.chunk_mesh_pool.detached();\n\n self.chunk_mesh_map.insert(chunk_pos, chunk_mesh);\n\n\n\n println!(\"building chunk mesh at: {:?}\", chunk_pos);\n\n let chunk_world_pos = Self::chunk_to_world(chunk_pos);\n\n if mesh_builder::build_chunk_mesh(\n\n self,\n\n device,\n", "file_path": "src/voxel_tools/chunks.rs", "rank": 38, "score": 28574.388571185627 }, { "content": "use anyhow::Context;\n\nuse anyhow::*;\n\nuse cgmath::InnerSpace;\n\nuse lifeguard::*;\n\nuse std::{\n\n collections::{HashMap, VecDeque},\n\n};\n\n\n\nuse crate::rendering::gpu_resources::GpuResources;\n\n\n\nuse super::mesh_builder;\n\nuse super::{\n\n chunk::Chunk,\n\n rendering::voxel_rendering::{self},\n\n};\n\nuse super::{\n\n chunk::{ChunkMesh, LocalCoordinate, SIZE},\n\n voxel::Voxel,\n\n};\n\n\n", "file_path": "src/voxel_tools/chunks.rs", "rank": 39, "score": 28574.12664023222 }, { "content": " while local_pos.2 < 0 {\n\n local_pos.2 += chunk_size;\n\n chunk_pos.z -= 1;\n\n }\n\n while local_pos.2 > chunk_size {\n\n local_pos.2 -= chunk_size;\n\n chunk_pos.z += 1;\n\n }\n\n }\n\n\n\n // if the local coordinate goes outside bounds, the adjacent chunk will be checked instead\n\n pub fn try_get_voxel(\n\n &self,\n\n chunk_pos: &cgmath::Vector3<i32>,\n\n local_pos: &LocalCoordinate,\n\n ) -> Result<&Voxel> {\n\n let mut chunk_pos = *chunk_pos;\n\n let mut local_pos = *local_pos;\n\n Self::make_coords_valid(&mut chunk_pos, &mut local_pos);\n\n\n", "file_path": "src/voxel_tools/chunks.rs", "rank": 40, "score": 28574.125457730257 }, { "content": " if in_range {\n\n // load chunk\n\n self.chunk_data_load_queue.push_back(chunk_pos);\n\n }\n\n // check if we don't wan to load any more\n\n if self.chunk_data_load_queue.len() >= MAX_DATA_QUEUE {\n\n println!(\"done\");\n\n return;\n\n }\n\n }\n\n }\n\n }\n\n }\n\n\n\n pub fn draw<'a, '_b>(\n\n &mut self,\n\n render_pass: &mut wgpu::RenderPass<'a>,\n\n camera_bind_group: &'a wgpu::BindGroup,\n\n light_bind_group: &'a wgpu::BindGroup,\n\n gpu_resources: &'a GpuResources,\n", "file_path": "src/voxel_tools/chunks.rs", "rank": 41, "score": 28573.540543217783 }, { "content": " v_buffer.destroy();\n\n }\n\n gpu_resources.buffer_arena.remove(v_buf_key);\n\n }\n\n if let Some(i_buf_key) = chunk_mesh.index_buffer {\n\n if let Some(i_buffer) = gpu_resources.buffer_arena.get_mut(i_buf_key) {\n\n i_buffer.destroy();\n\n }\n\n gpu_resources.buffer_arena.remove(i_buf_key);\n\n }\n\n self.chunk_mesh_pool.attach(chunk_mesh);\n\n }\n\n }\n\n }\n\n\n\n // based on current position load all meshes\n\n pub fn update_load_data_queue(&mut self) {\n\n if self.chunk_data_map.len() >= DEFAULT_MAX_CHUNK_DATAS\n\n || self.chunk_data_load_queue.len() >= MAX_DATA_QUEUE\n\n {\n", "file_path": "src/voxel_tools/chunks.rs", "rank": 42, "score": 28572.8344904689 }, { "content": " chunk_pos: &mut cgmath::Vector3<i32>,\n\n local_pos: &mut LocalCoordinate,\n\n ) {\n\n let chunk_size = SIZE as i32;\n\n while local_pos.0 < 0 {\n\n local_pos.0 += chunk_size;\n\n chunk_pos.x -= 1;\n\n }\n\n while local_pos.0 > chunk_size {\n\n local_pos.0 -= chunk_size;\n\n chunk_pos.x += 1;\n\n }\n\n while local_pos.1 < 0 {\n\n local_pos.1 += chunk_size;\n\n chunk_pos.y -= 1;\n\n }\n\n while local_pos.1 > chunk_size {\n\n local_pos.1 -= chunk_size;\n\n chunk_pos.y += 1;\n\n }\n", "file_path": "src/voxel_tools/chunks.rs", "rank": 43, "score": 28572.832007115878 }, { "content": " }\n\n\n\n pub fn unload_data_queue(&mut self) {\n\n while let Some(chunk_pos) = self.chunk_data_unload_queue.pop_front() {\n\n // detach chunk data\n\n if let Some(chunk_data) = self.chunk_data_map.remove(&chunk_pos) {\n\n println!(\"unloading data at: {:?}\", chunk_pos);\n\n self.chunk_pool.attach(chunk_data);\n\n }\n\n }\n\n }\n\n\n\n // generate meshes queued up\n\n pub fn unload_mesh_queue(&mut self, gpu_resources: &mut GpuResources) {\n\n while let Some(chunk_pos) = self.chunk_mesh_unload_queue.pop_front() {\n\n // detach mesh data\n\n if let Some(chunk_mesh) = self.chunk_mesh_map.remove(&chunk_pos) {\n\n println!(\"unloading mesh at: {:?}\", chunk_pos);\n\n if let Some(v_buf_key) = chunk_mesh.vertex_buffer {\n\n if let Some(v_buffer) = gpu_resources.buffer_arena.get_mut(v_buf_key) {\n", "file_path": "src/voxel_tools/chunks.rs", "rank": 44, "score": 28572.35961705715 }, { "content": " );\n\n }\n\n Ok(())\n\n }\n\n\n\n #[allow(dead_code)]\n\n pub fn get_vertex_count(&self) -> u32 {\n\n self.chunk_mesh_map\n\n .iter()\n\n .map(|(_i, m)| m.num_vertices)\n\n .sum()\n\n }\n\n}\n", "file_path": "src/voxel_tools/chunks.rs", "rank": 45, "score": 28571.412371380145 }, { "content": " ) -> anyhow::Result<()> {\n\n for (_pos, chunk_mesh) in self.chunk_mesh_map.iter() {\n\n let vertex_buffer_index = chunk_mesh.vertex_buffer.as_ref().context(\"no vertices\")?;\n\n let index_buffer_index = chunk_mesh.index_buffer.as_ref().context(\"no indices\")?;\n\n let num_indices = chunk_mesh.num_indices;\n\n let vertex_buffer = gpu_resources\n\n .buffer_arena\n\n .get(*vertex_buffer_index)\n\n .context(\"no vertex buf\")?;\n\n let index_buffer = gpu_resources\n\n .buffer_arena\n\n .get(*index_buffer_index)\n\n .context(\"no vertex buf\")?;\n\n let _ = voxel_rendering::draw_chunk(\n\n render_pass,\n\n num_indices,\n\n camera_bind_group,\n\n light_bind_group,\n\n vertex_buffer,\n\n index_buffer,\n", "file_path": "src/voxel_tools/chunks.rs", "rank": 46, "score": 28569.764264530004 }, { "content": " .chunk_mesh_map\n\n .iter()\n\n .filter(|(p, _m)| {\n\n p.x < current_chunk_pos.x - self.render_distance\n\n || p.x > current_chunk_pos.x + self.render_distance\n\n || p.y < current_chunk_pos.y - self.render_distance\n\n || p.y > current_chunk_pos.y + self.render_distance\n\n || p.z < current_chunk_pos.z - self.render_distance\n\n || p.z > current_chunk_pos.z + self.render_distance\n\n })\n\n .map(|(p, _m)| p)\n\n .collect::<Vec<_>>();\n\n\n\n for chunk_pos in outside {\n\n // already proccessing skip\n\n if self.chunk_data_unload_queue.contains(chunk_pos) {\n\n continue;\n\n }\n\n println!(\"queueing chunk for data unload: {:?}\", chunk_pos);\n\n self.chunk_data_unload_queue.push_back(*chunk_pos);\n", "file_path": "src/voxel_tools/chunks.rs", "rank": 47, "score": 28568.90738190338 }, { "content": " || p.x > current_chunk_pos.x + self.render_distance\n\n || p.y < current_chunk_pos.y - self.render_distance\n\n || p.y > current_chunk_pos.y + self.render_distance\n\n || p.z < current_chunk_pos.z - self.render_distance\n\n || p.z > current_chunk_pos.z + self.render_distance\n\n })\n\n .map(|(p, _m)| p)\n\n .collect::<Vec<_>>();\n\n\n\n for chunk_pos in outside {\n\n // already proccessing skip\n\n if self.chunk_mesh_unload_queue.contains(chunk_pos) {\n\n continue;\n\n }\n\n self.chunk_mesh_unload_queue.push_back(*chunk_pos);\n\n println!(\"queueing chunk for mesh unload: {:?}\", chunk_pos);\n\n if self.chunk_mesh_unload_queue.len() >= MAX_MESH_UNLOAD_QUEUE {\n\n return;\n\n }\n\n }\n", "file_path": "src/voxel_tools/chunks.rs", "rank": 48, "score": 28568.896251171755 }, { "content": " let (l_x, l_y, l_z) = (local_coord.0, local_coord.1, local_coord.2);\n\n\n\n // convert noise to world\n\n let down_scale = 0.027f64;\n\n let x = (chunk_world_pos.x as f64 + l_x as f64) * down_scale;\n\n let y = (chunk_world_pos.y as f64 + l_y as f64) * down_scale;\n\n let z = (chunk_world_pos.z as f64 + l_z as f64) * down_scale;\n\n let density = perlin.get([x, y, z]);\n\n if density > 0.3f64 {\n\n voxel.set_density_fraciton(1f32);\n\n }\n\n }\n\n }\n\n}\n", "file_path": "src/voxel_tools/chunk.rs", "rank": 49, "score": 28568.405193046554 }, { "content": " }\n\n\n\n pub fn process_scroll(&mut self, delta: &MouseScrollDelta) {\n\n self.scroll = -match delta {\n\n // I'm assuming a line is about 100 pixels\n\n MouseScrollDelta::LineDelta(_, scroll) => scroll * 100.0,\n\n MouseScrollDelta::PixelDelta(PhysicalPosition { y: scroll, .. }) => *scroll as f32,\n\n };\n\n }\n\n\n\n pub fn update_camera(&mut self, camera: &mut Camera, dt: Duration) {\n\n let dt = dt.as_secs_f32();\n\n\n\n // Move forward/backward and left/right\n\n let (yaw_sin, yaw_cos) = camera.yaw.0.sin_cos();\n\n let forward = Vector3::new(yaw_cos, 0.0, yaw_sin).normalize();\n\n let right = Vector3::new(-yaw_sin, 0.0, yaw_cos).normalize();\n\n camera.position += forward * (self.amount_forward - self.amount_backward) * self.speed * dt;\n\n camera.position += right * (self.amount_right - self.amount_left) * self.speed * dt;\n\n\n", "file_path": "src/camera_controller.rs", "rank": 52, "score": 15.37321753529801 }, { "content": "pub enum Direction {\n\n Left,\n\n Right,\n\n Down,\n\n Up,\n\n Back,\n\n Forward,\n\n}\n\n\n\nimpl Direction {\n\n pub fn get_normal(&self) -> cgmath::Vector3<f32> {\n\n match self {\n\n Direction::Left => -cgmath::Vector3::<f32>::unit_x(),\n\n Direction::Right => cgmath::Vector3::<f32>::unit_x(),\n\n Direction::Down => -cgmath::Vector3::<f32>::unit_y(),\n\n Direction::Up => cgmath::Vector3::<f32>::unit_y(),\n\n Direction::Back => -cgmath::Vector3::<f32>::unit_z(),\n\n Direction::Forward => cgmath::Vector3::<f32>::unit_z(),\n\n }\n\n }\n\n}\n", "file_path": "src/voxel_tools/direction.rs", "rank": 53, "score": 15.222576798706832 }, { "content": "#[repr(C)]\n\n#[derive(Copy, Clone, Debug, bytemuck::Pod, bytemuck::Zeroable)]\n\npub struct Light {\n\n pub position: [f32; 3],\n\n // due to uniforms requireing 16 byte (4 float) spacing, we need to use a padding field here\n\n pub _padding: u32,\n\n pub color: [f32; 3],\n\n}\n\n\n", "file_path": "src/light.rs", "rank": 54, "score": 14.60713957156793 }, { "content": " }\n\n}\n\n\n\npub struct VertexInstance {\n\n pub position: cgmath::Vector3<f32>,\n\n pub rotation: cgmath::Quaternion<f32>,\n\n}\n\n\n\nimpl VertexInstance {\n\n pub fn to_raw(&self) -> VertexInstanceRaw {\n\n // needed to make invert() and transpose() available\n\n use cgmath::{Matrix, SquareMatrix};\n\n let model =\n\n cgmath::Matrix4::from_translation(self.position) * cgmath::Matrix4::from(self.rotation);\n\n let normal_matrix = model\n\n .invert()\n\n .expect(\"can't inverse model matrix\")\n\n .transpose();\n\n VertexInstanceRaw {\n\n model: model.into(),\n\n normal_matrix: normal_matrix.into(),\n\n }\n\n }\n\n}\n", "file_path": "src/rendering/vertex_instance.rs", "rank": 55, "score": 14.436668801142956 }, { "content": "use wgpu::util::DeviceExt;\n\n\n\nuse crate::{model::ModelVertex, rendering::vertex_desc::VertexDesc, texture::Texture, Vertex};\n\n\n\npub struct DepthPass {\n\n pipeline: wgpu::RenderPipeline,\n\n bind_group_layout: wgpu::BindGroupLayout,\n\n bind_group: wgpu::BindGroup,\n\n pub texture: Texture,\n\n index_buffer: wgpu::Buffer,\n\n vertex_buffer: wgpu::Buffer,\n\n num_indices: u32,\n\n}\n\n\n\npub const DEPTH_VERTICES: &[Vertex] = &[\n\n Vertex {\n\n position: [0.0, 0.0, 0.0],\n\n tex_coords: [0.0, 1.0],\n\n },\n\n Vertex {\n", "file_path": "src/depth_pass.rs", "rank": 56, "score": 13.976330056351792 }, { "content": "use cgmath::{Angle, InnerSpace, Rad, Vector3};\n\n#[repr(C)]\n\n#[derive(Debug, Clone, Copy, bytemuck::Pod, bytemuck::Zeroable)]\n\npub struct Uniform {\n\n view_position: [f32; 4],\n\n view_proj: [[f32; 4]; 4],\n\n}\n\n\n\nimpl Uniform {\n\n fn new() -> Self {\n\n // needed to access ::identity()\n\n use cgmath::SquareMatrix;\n\n Self {\n\n view_position: [0.0; 4],\n\n view_proj: cgmath::Matrix4::identity().into(),\n\n }\n\n }\n\n\n\n pub fn update(&mut self, eye: &cgmath::Point3<f32>, view_proj: cgmath::Matrix4<f32>) {\n\n // We don't specifically need homogeneous coordinates since we're just using\n", "file_path": "src/camera.rs", "rank": 58, "score": 13.815896284025506 }, { "content": "use std::{f32::consts::FRAC_PI_2, time::Duration};\n\n\n\nuse cgmath::{InnerSpace, Rad, Vector3};\n\nuse winit::{\n\n dpi::PhysicalPosition,\n\n event::{ElementState, MouseScrollDelta, VirtualKeyCode},\n\n};\n\n\n\nuse crate::camera::Camera;\n\n\n\n#[derive(Debug)]\n\npub struct CameraController {\n\n amount_left: f32,\n\n amount_right: f32,\n\n amount_forward: f32,\n\n amount_backward: f32,\n\n amount_up: f32,\n\n amount_down: f32,\n\n rotate_horizontal: f32,\n\n rotate_vertical: f32,\n", "file_path": "src/camera_controller.rs", "rank": 59, "score": 13.62456431446241 }, { "content": "use crate::rendering::vertex_desc::VertexDesc;\n\n\n\n#[repr(C)]\n\n#[derive(Copy, Clone, Debug, bytemuck::Pod, bytemuck::Zeroable)]\n\npub struct VoxelVertex {\n\n pub position: [f32; 3],\n\n pub normal: [f32; 3],\n\n pub color_diffuse: [f32; 3],\n\n}\n\n\n\nimpl VertexDesc for VoxelVertex {\n\n fn desc<'a>() -> wgpu::VertexBufferLayout<'a> {\n\n use std::mem;\n\n wgpu::VertexBufferLayout {\n\n array_stride: mem::size_of::<VoxelVertex>() as wgpu::BufferAddress,\n\n step_mode: wgpu::InputStepMode::Vertex,\n\n attributes: &[\n\n // position\n\n wgpu::VertexAttribute {\n\n format: wgpu::VertexFormat::Float32x3,\n", "file_path": "src/voxel_tools/rendering/voxel_vertex.rs", "rank": 60, "score": 13.355080087308352 }, { "content": " }\n\n\n\n pub fn is_solid(&self) -> bool {\n\n self.density > 0u8\n\n }\n\n\n\n #[allow(dead_code)]\n\n pub fn density_fraction(&self) -> f32 {\n\n self.density as f32 / 255f32\n\n }\n\n\n\n pub fn set_density_fraciton(&mut self, fraction: f32) {\n\n self.density = (fraction * 255f32) as u8;\n\n }\n\n}\n", "file_path": "src/voxel_tools/voxel.rs", "rank": 61, "score": 12.914168969020643 }, { "content": " // a vec3 in the shader. We're using Point3 for the camera.eye, and this is\n\n // the easiest way to convert to Vector4. We're using Vector4 because of\n\n // the uniforms 16 byte spacing requirement\n\n self.view_position = eye.to_homogeneous().into();\n\n self.view_proj = view_proj.into();\n\n }\n\n}\n\n\n\n#[rustfmt::skip]\n\npub const OPENGL_TO_WGPU_MATRIX: cgmath::Matrix4<f32> = cgmath::Matrix4::new(\n\n 1.0, 0.0, 0.0, 0.0,\n\n 0.0, 1.0, 0.0, 0.0,\n\n 0.0, 0.0, 0.5, 0.0,\n\n 0.0, 0.0, 0.5, 1.0,\n\n);\n\n\n\npub struct Camera {\n\n pub position: cgmath::Point3<f32>,\n\n pub yaw: Rad<f32>,\n\n pub pitch: Rad<f32>,\n", "file_path": "src/camera.rs", "rank": 62, "score": 12.900457939279736 }, { "content": " .write_buffer(&self.light_buffer, 0, bytemuck::cast_slice(&[self.light]));\n\n\n\n self.camera_controller.update_camera(&mut self.camera, dt);\n\n self.camera.update_uniform();\n\n self.rotation += 3f32;\n\n self.queue.write_buffer(\n\n &self.camera_uniform_buffer,\n\n 0,\n\n bytemuck::cast_slice(&[self.camera.uniform]),\n\n );\n\n\n\n self.chunks.position = (\n\n self.camera.position.x,\n\n self.camera.position.y,\n\n self.camera.position.z,\n\n )\n\n .into();\n\n\n\n use rand::*;\n\n if rand::thread_rng().gen_range(0..5) == 0 {\n", "file_path": "src/main.rs", "rank": 63, "score": 12.487663948527171 }, { "content": "\n\n pub fn update_uniform(&mut self) {\n\n self.uniform\n\n .update(&self.position, self.build_view_projection_matrix());\n\n }\n\n\n\n pub fn build_view_projection_matrix(&self) -> cgmath::Matrix4<f32> {\n\n let view = cgmath::Matrix4::look_to_rh(\n\n self.position,\n\n Vector3::new(self.yaw.0.cos(), self.pitch.0.sin(), self.yaw.sin()).normalize(),\n\n Vector3::unit_y(),\n\n );\n\n let proj =\n\n cgmath::perspective(cgmath::Deg(self.fovy), self.aspect, self.z_near, self.z_far);\n\n OPENGL_TO_WGPU_MATRIX * proj * view\n\n }\n\n}\n", "file_path": "src/camera.rs", "rank": 64, "score": 12.217482466829068 }, { "content": "use super::vertex_desc::VertexDesc;\n\n\n\n#[repr(C)]\n\n#[derive(Copy, Clone, bytemuck::Pod, bytemuck::Zeroable)]\n\npub struct VertexInstanceRaw {\n\n pub model: [[f32; 4]; 4],\n\n pub normal_matrix: [[f32; 4]; 4],\n\n}\n\n\n\nimpl VertexDesc for VertexInstanceRaw {\n\n fn desc<'a>() -> wgpu::VertexBufferLayout<'a> {\n\n use std::mem;\n\n wgpu::VertexBufferLayout {\n\n array_stride: mem::size_of::<VertexInstanceRaw>() as wgpu::BufferAddress,\n\n step_mode: wgpu::InputStepMode::Instance,\n\n attributes: &[\n\n // model\n\n wgpu::VertexAttribute {\n\n offset: 0,\n\n shader_location: 5,\n", "file_path": "src/rendering/vertex_instance.rs", "rank": 65, "score": 12.177587708798033 }, { "content": " let diffuse_color = Color::from(mat.diffuse);\n\n materials.push(Material {\n\n name: mat.name,\n\n diffuse_texture: diffuse_texture,\n\n bind_group,\n\n diffuse_color: diffuse_color,\n\n });\n\n }\n\n\n\n let mut meshes = Vec::new();\n\n for m in obj_models {\n\n let mut vertices = Vec::new();\n\n println!(\"mesh indices: {:?}\", m.mesh.indices.len());\n\n println!(\"mesh postiions: {:?}\", m.mesh.positions.len());\n\n println!(\"mesh normals: {:?}\", m.mesh.normals.len());\n\n for i in 0..m.mesh.positions.len() / 3 {\n\n vertices.push(ModelVertex {\n\n position: [\n\n m.mesh.positions[i * 3],\n\n m.mesh.positions[i * 3 + 1],\n", "file_path": "src/model.rs", "rank": 66, "score": 12.167052713102049 }, { "content": " }\n\n}\n\n\n\nimpl Color {\n\n pub fn new(r: f32, g: f32, b: f32, a: f32) -> Color {\n\n Color([\n\n (r.min(1.).max(0.) * 255.) as u8,\n\n (g.min(1.).max(0.) * 255.) as u8,\n\n (b.min(1.).max(0.) * 255.) as u8,\n\n (a.min(1.).max(0.) * 255.) as u8,\n\n ])\n\n }\n\n}\n\n\n\n#[allow(dead_code)]\n\npub mod colors {\n\n use super::Color;\n\n pub const LIGHTGRAY: Color = Color([200, 200, 200, 255]);\n\n pub const GRAY: Color = Color([130, 130, 130, 255]);\n\n pub const DARKGRAY: Color = Color([80, 80, 80, 255]);\n", "file_path": "src/color.rs", "rank": 67, "score": 11.986346244025942 }, { "content": " position: [1.0, 0.0, 0.0],\n\n tex_coords: [1.0, 1.0],\n\n },\n\n Vertex {\n\n position: [1.0, 1.0, 0.0],\n\n tex_coords: [1.0, 0.0],\n\n },\n\n Vertex {\n\n position: [0.0, 1.0, 0.0],\n\n tex_coords: [0.0, 0.0],\n\n },\n\n];\n\n\n\npub const DEPTH_INDICES: &[u16] = &[0, 1, 2, 0, 2, 3];\n\n\n\nimpl DepthPass {\n\n pub fn new(device: &wgpu::Device, sc_desc: &wgpu::SwapChainDescriptor) -> Self {\n\n let texture = Texture::create_depth_texture(&device, &sc_desc, \"depth_texture\");\n\n let shader_module = crate::rendering::render_utils::create_shader_module(\n\n device,\n", "file_path": "src/depth_pass.rs", "rank": 68, "score": 11.986090836284259 }, { "content": " for mesh in model.meshes.iter() {\n\n let material = &model.materials[mesh.material_id];\n\n self.draw_mesh_instanced(mesh, material, instances.clone(), uniform, light);\n\n }\n\n }\n\n}\n\n\n\n#[repr(C)]\n\n#[derive(Copy, Clone, Debug, bytemuck::Pod, bytemuck::Zeroable)]\n\npub struct ModelVertex {\n\n position: [f32; 3],\n\n tex_coords: [f32; 2],\n\n normal: [f32; 3],\n\n color_diffuse: [f32; 3],\n\n}\n\n\n\nimpl VertexDesc for ModelVertex {\n\n fn desc<'a>() -> wgpu::VertexBufferLayout<'a> {\n\n use std::mem;\n\n wgpu::VertexBufferLayout {\n", "file_path": "src/model.rs", "rank": 69, "score": 11.487758225471895 }, { "content": " a: 1.0,\n\n };\n\n let aspect = sc_desc.width as f32 / sc_desc.height as f32;\n\n let mut camera = Camera::new(aspect);\n\n\n\n let offset = 8f32;\n\n let instances = (0..NUM_INSTANCES_PER_ROW)\n\n .flat_map(|z| {\n\n (0..NUM_INSTANCES_PER_ROW).map(move |x| {\n\n let position = cgmath::Vector3::new(x as f32 * offset, 0f32, z as f32 * offset)\n\n - INSTANCE_DISPLACEMENT;\n\n let rotation = if position.is_zero() {\n\n cgmath::Quaternion::from_axis_angle(\n\n cgmath::Vector3::unit_z(),\n\n cgmath::Deg(0.0),\n\n )\n\n } else {\n\n cgmath::Quaternion::from_axis_angle(\n\n position.clone().normalize(),\n\n cgmath::Deg(45.0),\n", "file_path": "src/main.rs", "rank": 70, "score": 10.95533217698399 }, { "content": " // find what chunks needs to be loaded\n\n chunks.update_load_data_queue();\n\n chunks.update_load_mesh_queue();\n\n\n\n // load voxel data in chunks\n\n chunks.build_chunk_data_in_queue();\n\n\n\n // load meshes based on voxel data in chunk\n\n chunks.build_chunk_meshes_in_queue(&device, &mut gpu_resources);\n\n\n\n Self {\n\n gpu_resources,\n\n chunks,\n\n rotation: 0f32,\n\n surface,\n\n camera,\n\n camera_controller,\n\n camera_uniform_buffer,\n\n camera_bind_group: uniform_bind_group,\n\n device,\n", "file_path": "src/main.rs", "rank": 71, "score": 10.855425470625272 }, { "content": " pub name: String,\n\n pub vertex_buffer: wgpu::Buffer,\n\n pub index_buffer: wgpu::Buffer,\n\n pub num_indices: u32,\n\n pub material_id: usize,\n\n}\n\n\n\npub struct Model {\n\n pub meshes: Vec<Mesh>,\n\n pub materials: Vec<Material>,\n\n}\n\n\n\nimpl Model {\n\n pub fn load<P: AsRef<Path>>(\n\n device: &wgpu::Device,\n\n queue: &wgpu::Queue,\n\n layout: &wgpu::BindGroupLayout,\n\n path: P,\n\n ) -> Result<Self> {\n\n let (obj_models, obj_materials) = tobj::load_obj(\n", "file_path": "src/model.rs", "rank": 72, "score": 10.737730426842724 }, { "content": "use anyhow::*;\n\nuse image::GenericImageView;\n\nuse std::path::Path;\n\nuse wgpu::util::DeviceExt;\n\n\n\nuse crate::color::Color;\n\n\n\npub struct Texture {\n\n pub texture: wgpu::Texture,\n\n pub view: wgpu::TextureView,\n\n pub sampler: wgpu::Sampler,\n\n}\n\n\n\nimpl Texture {\n\n #[allow(dead_code)]\n\n pub fn from_bytes_to_image(\n\n device: &wgpu::Device,\n\n queue: &wgpu::Queue,\n\n bytes: &[u8],\n\n label: &str,\n", "file_path": "src/texture.rs", "rank": 73, "score": 10.599269610577299 }, { "content": " pub tex_coords: [f32; 2],\n\n}\n\n\n\npub const NUM_INSTANCES_PER_ROW: u32 = 100;\n\npub const NUM_INSTANCES: u32 = NUM_INSTANCES_PER_ROW * NUM_INSTANCES_PER_ROW;\n\npub const INSTANCE_DISPLACEMENT: cgmath::Vector3<f32> = cgmath::Vector3::new(\n\n NUM_INSTANCES_PER_ROW as f32 * 0.5f32,\n\n 0.0,\n\n NUM_INSTANCES_PER_ROW as f32 * 0.5f32,\n\n);\n\n\n", "file_path": "src/main.rs", "rank": 74, "score": 10.55713310927158 }, { "content": "pub mod chunk;\n\npub mod chunks;\n\npub mod direction;\n\npub mod mesh_builder;\n\npub mod quad;\n\npub mod rendering;\n\npub mod voxel;\n", "file_path": "src/voxel_tools.rs", "rank": 75, "score": 10.417081479586813 }, { "content": " pub aspect: f32,\n\n pub fovy: f32,\n\n pub z_near: f32,\n\n pub z_far: f32,\n\n pub uniform: Uniform,\n\n}\n\n\n\nimpl Camera {\n\n pub fn new(aspect: f32) -> Self {\n\n Self {\n\n position: (0., 1., 2.).into(),\n\n yaw: Rad::<f32>(-90f32),\n\n pitch: Rad::<f32>(-20f32),\n\n aspect,\n\n fovy: 45.0,\n\n z_near: 0.1,\n\n z_far: 500.0,\n\n uniform: Uniform::new(),\n\n }\n\n }\n", "file_path": "src/camera.rs", "rank": 76, "score": 10.313715411835977 }, { "content": " ) -> Result<Self> {\n\n let img = image::load_from_memory(bytes)?;\n\n Self::from_image(device, queue, &img, Some(label))\n\n }\n\n\n\n pub fn from_color(\n\n device: &wgpu::Device,\n\n queue: &wgpu::Queue,\n\n color: Color,\n\n width: u32,\n\n height: u32,\n\n ) -> Result<Self> {\n\n let mut bytes = Vec::with_capacity((width * height) as usize);\n\n for _y in 0..height {\n\n for _x in 0..width {\n\n let colors: [u8; 4] = color.into();\n\n bytes.push(colors[0]);\n\n bytes.push(colors[1]);\n\n bytes.push(colors[2]);\n\n bytes.push(colors[3]);\n", "file_path": "src/texture.rs", "rank": 77, "score": 10.305255607887297 }, { "content": " self.mouse_pressed = *state == ElementState::Pressed;\n\n true\n\n }\n\n DeviceEvent::Key(KeyboardInput {\n\n virtual_keycode: Some(key),\n\n state,\n\n ..\n\n }) => self.camera_controller.process_keyboard(*key, *state),\n\n _ => false,\n\n }\n\n }\n\n\n\n fn update(&mut self, dt: std::time::Duration) {\n\n use cgmath::Rotation3;\n\n let old_position: cgmath::Vector3<_> = self.light.position.into();\n\n self.light.position =\n\n (cgmath::Quaternion::from_axis_angle((0.0, 1.0, 0.0).into(), cgmath::Deg(1.0))\n\n * old_position)\n\n .into();\n\n self.queue\n", "file_path": "src/main.rs", "rank": 78, "score": 10.089825977636478 }, { "content": " self.chunks.update_load_data_queue();\n\n self.chunks.update_load_mesh_queue();\n\n\n\n self.chunks.update_unload_mesh_queue();\n\n self.chunks.update_unload_data_queue();\n\n }\n\n self.chunks\n\n .build_chunk_data_in_queue();\n\n self.chunks\n\n .build_chunk_meshes_in_queue(&self.device, &mut self.gpu_resources);\n\n self.chunks.unload_data_queue();\n\n self.chunks.unload_mesh_queue(&mut self.gpu_resources);\n\n }\n\n\n\n fn render(&mut self) -> Result<(), wgpu::SwapChainError> {\n\n let frame = self.swap_chain.get_current_frame()?.output;\n\n let mut encoder = self\n\n .device\n\n .create_command_encoder(&wgpu::CommandEncoderDescriptor {\n\n label: Some(\"Render encoder\"),\n", "file_path": "src/main.rs", "rank": 79, "score": 9.886184498510236 }, { "content": " self.sc_desc.width = new_size.width;\n\n self.sc_desc.height = new_size.height;\n\n self.camera.aspect = new_size.width as f32 / new_size.height as f32;\n\n self.swap_chain = self.device.create_swap_chain(&self.surface, &self.sc_desc);\n\n self.depth_pass.resize(&self.device, &self.sc_desc);\n\n }\n\n\n\n fn input(&mut self, event: &DeviceEvent) -> bool {\n\n match event {\n\n DeviceEvent::MouseMotion { delta } => {\n\n if self.mouse_pressed {\n\n self.camera_controller.process_mouse(delta.0, delta.1);\n\n }\n\n true\n\n }\n\n DeviceEvent::MouseWheel { delta } => {\n\n self.camera_controller.process_scroll(delta);\n\n true\n\n }\n\n DeviceEvent::Button { button: 1, state } => {\n", "file_path": "src/main.rs", "rank": 80, "score": 9.82989931832908 }, { "content": " m.mesh.positions[i * 3 + 2],\n\n ],\n\n tex_coords: [m.mesh.texcoords[i * 2], m.mesh.texcoords[i * 2 + 1]],\n\n normal: [\n\n m.mesh.normals[i * 3],\n\n m.mesh.normals[i * 3 + 1],\n\n m.mesh.normals[i * 3 + 2],\n\n ],\n\n color_diffuse: materials[m.mesh.material_id.unwrap()].diffuse_color.into(),\n\n });\n\n }\n\n let vertex_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {\n\n label: Some(&format!(\"{:?} vertex_buffer\", path.as_ref())),\n\n contents: bytemuck::cast_slice(&vertices),\n\n usage: wgpu::BufferUsage::VERTEX,\n\n });\n\n let index_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {\n\n label: Some(&format!(\"{:?} index_buffer\", path.as_ref())),\n\n contents: bytemuck::cast_slice(&m.mesh.indices),\n\n usage: wgpu::BufferUsage::INDEX,\n", "file_path": "src/model.rs", "rank": 82, "score": 9.517424386399442 }, { "content": " }\n\n\n\n pub fn process_keyboard(&mut self, key: VirtualKeyCode, state: ElementState) -> bool {\n\n let amount = if state == ElementState::Pressed {\n\n 1.0\n\n } else {\n\n 0.0\n\n };\n\n match key {\n\n VirtualKeyCode::W | VirtualKeyCode::Up => {\n\n self.amount_forward = amount;\n\n true\n\n }\n\n VirtualKeyCode::S | VirtualKeyCode::Down => {\n\n self.amount_backward = amount;\n\n true\n\n }\n\n VirtualKeyCode::A | VirtualKeyCode::Left => {\n\n self.amount_left = amount;\n\n true\n", "file_path": "src/camera_controller.rs", "rank": 83, "score": 9.37750212611019 }, { "content": " )\n\n };\n\n\n\n let camera_controller = CameraController::new(10.2, 1.0);\n\n let depth_pass = DepthPass::new(&device, &sc_desc);\n\n\n\n let obj_model = model::Model::load(\n\n &device,\n\n &queue,\n\n &texture_bind_group_layout,\n\n std::path::Path::new(\"res/turkey.obj\"),\n\n )\n\n .unwrap();\n\n\n\n let voxel_render_pipeline =\n\n create_voxel_pipeline(&device, sc_desc.format, &light_bind_group_layout);\n\n\n\n let mut gpu_resources = GpuResources::new();\n\n\n\n let mut chunks = Chunks::new();\n", "file_path": "src/main.rs", "rank": 84, "score": 8.853321902691505 }, { "content": " }\n\n }\n\n let bytes: &[u8] = bytes.as_ref();\n\n Self::from_bytes(device, queue, &bytes, width, height, Some(\"color texture\"))\n\n }\n\n\n\n pub fn from_bytes(\n\n device: &wgpu::Device,\n\n queue: &wgpu::Queue,\n\n bytes: &[u8],\n\n width: u32,\n\n height: u32,\n\n label: Option<&str>,\n\n ) -> Result<Self> {\n\n let texture_size = wgpu::Extent3d {\n\n width,\n\n height,\n\n depth_or_array_layers: 1,\n\n };\n\n\n", "file_path": "src/texture.rs", "rank": 85, "score": 8.839905641600243 }, { "content": " scroll: f32,\n\n speed: f32,\n\n sensitivity: f32,\n\n}\n\n\n\nimpl CameraController {\n\n pub fn new(speed: f32, sensitivity: f32) -> Self {\n\n Self {\n\n amount_left: 0.0,\n\n amount_right: 0.0,\n\n amount_forward: 0.0,\n\n amount_backward: 0.0,\n\n amount_up: 0.0,\n\n amount_down: 0.0,\n\n rotate_horizontal: 0.0,\n\n rotate_vertical: 0.0,\n\n scroll: 0.0,\n\n speed,\n\n sensitivity,\n\n }\n", "file_path": "src/camera_controller.rs", "rank": 86, "score": 8.83073658226615 }, { "content": " bind_group,\n\n bind_group_layout,\n\n vertex_buffer,\n\n index_buffer,\n\n num_indices,\n\n }\n\n }\n\n\n\n pub fn resize(&mut self, device: &wgpu::Device, sc_desc: &wgpu::SwapChainDescriptor) {\n\n self.texture = Texture::create_depth_texture(device, sc_desc, \"depth_texture\");\n\n self.bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {\n\n label: Some(\"depth_pass.bind_group\"),\n\n layout: &self.bind_group_layout,\n\n entries: &[\n\n wgpu::BindGroupEntry {\n\n binding: 0,\n\n resource: wgpu::BindingResource::TextureView(&self.texture.view),\n\n },\n\n wgpu::BindGroupEntry {\n\n binding: 1,\n", "file_path": "src/depth_pass.rs", "rank": 87, "score": 8.798543112618272 }, { "content": "use camera::Camera;\n\nuse camera_controller::*;\n\nuse cgmath::{InnerSpace, Zero};\n\nuse futures::executor::block_on;\n\nuse model::Model;\n\nuse rendering::gpu_resources::GpuResources;\n\nuse voxel_tools::chunks::Chunks;\n\nuse wgpu::util::DeviceExt;\n\nuse winit::{\n\n event::*,\n\n event_loop::{ControlFlow, EventLoop},\n\n window::{Window, WindowBuilder},\n\n};\n\n\n\nuse crate::{\n\n depth_pass::DepthPass,\n\n light::Light,\n\n rendering::{\n\n render_utils::create_render_pipeline, vertex_desc::VertexDesc, vertex_instance::*,\n\n },\n", "file_path": "src/main.rs", "rank": 88, "score": 8.643635734270468 }, { "content": " voxel_tools::{\n\n rendering::voxel_pipeline::create_voxel_pipeline,\n\n },\n\n};\n\n\n\nmod camera;\n\nmod camera_controller;\n\nmod color;\n\nmod depth_pass;\n\nmod light;\n\nmod model;\n\nmod rendering;\n\nmod texture;\n\nmod voxel_tools;\n\n\n\n#[repr(C)]\n\n#[derive(Copy, Clone, Debug, bytemuck::Pod, bytemuck::Zeroable)]\n\npub struct Vertex {\n\n pub position: [f32; 3],\n\n //color: [f32; 3],\n", "file_path": "src/main.rs", "rank": 89, "score": 8.062847031387626 }, { "content": " write_mask: wgpu::ColorWrite::ALL,\n\n }],\n\n }),\n\n });\n\n\n\n let vertex_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {\n\n label: Some(\"depth_pass.vertex_buffer\"),\n\n contents: bytemuck::cast_slice(DEPTH_VERTICES),\n\n usage: wgpu::BufferUsage::VERTEX,\n\n });\n\n let index_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {\n\n label: Some(\"depth_pass.index_buffer\"),\n\n contents: bytemuck::cast_slice(DEPTH_INDICES),\n\n usage: wgpu::BufferUsage::INDEX,\n\n });\n\n let num_indices = DEPTH_INDICES.len() as u32;\n\n\n\n Self {\n\n pipeline,\n\n texture,\n", "file_path": "src/depth_pass.rs", "rank": 90, "score": 7.834747361931896 }, { "content": " self.set_bind_group(2, &light, &[]);\n\n self.draw_indexed(0..mesh.num_indices, 0, instances);\n\n }\n\n\n\n fn draw_model(\n\n &mut self,\n\n model: &'b Model,\n\n uniform: &'b wgpu::BindGroup,\n\n light: &'b wgpu::BindGroup,\n\n ) {\n\n self.draw_model_instanced(model, 0..1, uniform, light);\n\n }\n\n\n\n fn draw_model_instanced(\n\n &mut self,\n\n model: &'b Model,\n\n instances: Range<u32>,\n\n uniform: &'b wgpu::BindGroup,\n\n light: &'b wgpu::BindGroup,\n\n ) {\n", "file_path": "src/model.rs", "rank": 91, "score": 7.524573058115382 }, { "content": "#[repr(C)]\n\n#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]\n\npub struct Color(pub [u8; 4]);\n\n\n\nimpl Into<[f32; 4]> for Color {\n\n fn into(self) -> [f32; 4] {\n\n [\n\n self.0[0] as f32 / 255.,\n\n self.0[1] as f32 / 255.,\n\n self.0[2] as f32 / 255.,\n\n self.0[3] as f32 / 255.,\n\n ]\n\n }\n\n}\n\n\n\nimpl Into<[u8; 4]> for Color {\n\n fn into(self) -> [u8; 4] {\n\n self.0\n\n }\n\n}\n", "file_path": "src/color.rs", "rank": 92, "score": 7.285406560664975 }, { "content": " }\n\n VirtualKeyCode::D | VirtualKeyCode::Right => {\n\n self.amount_right = amount;\n\n true\n\n }\n\n VirtualKeyCode::Space => {\n\n self.amount_up = amount;\n\n true\n\n }\n\n VirtualKeyCode::LShift => {\n\n self.amount_down = amount;\n\n true\n\n }\n\n _ => false,\n\n }\n\n }\n\n\n\n pub fn process_mouse(&mut self, mouse_dx: f64, mouse_dy: f64) {\n\n self.rotate_horizontal = mouse_dx as f32;\n\n self.rotate_vertical = mouse_dy as f32;\n", "file_path": "src/camera_controller.rs", "rank": 93, "score": 7.134060543332149 }, { "content": "use anyhow::*;\n\nuse std::{ops::Range, path::Path};\n\nuse tobj::LoadOptions;\n\nuse wgpu::util::DeviceExt;\n\n\n\nuse crate::{\n\n color::{self, Color},\n\n rendering::vertex_desc::VertexDesc,\n\n texture::Texture,\n\n};\n", "file_path": "src/model.rs", "rank": 94, "score": 6.626356848426662 }, { "content": " array_stride: mem::size_of::<ModelVertex>() as wgpu::BufferAddress,\n\n step_mode: wgpu::InputStepMode::Vertex,\n\n attributes: &[\n\n // position\n\n wgpu::VertexAttribute {\n\n format: wgpu::VertexFormat::Float32x3,\n\n offset: 0,\n\n shader_location: 0,\n\n },\n\n // tex coord\n\n wgpu::VertexAttribute {\n\n format: wgpu::VertexFormat::Float32x2,\n\n offset: mem::size_of::<[f32; 3]>() as wgpu::BufferAddress,\n\n shader_location: 1,\n\n },\n\n // normal\n\n wgpu::VertexAttribute {\n\n format: wgpu::VertexFormat::Float32x3,\n\n offset: mem::size_of::<[f32; 5]>() as wgpu::BufferAddress,\n\n shader_location: 2,\n", "file_path": "src/model.rs", "rank": 95, "score": 6.576649160397377 }, { "content": "use generational_arena::*;\n\npub struct GpuResources {\n\n pub buffer_arena: Arena<wgpu::Buffer>,\n\n}\n\n\n\nimpl GpuResources {\n\n pub fn new() -> Self {\n\n Self {\n\n buffer_arena: Arena::with_capacity(32),\n\n }\n\n }\n\n}\n", "file_path": "src/rendering/gpu_resources.rs", "rank": 96, "score": 6.343327443873231 }, { "content": " let texture_buffer = device.create_buffer_init(&wgpu::util::BufferInitDescriptor {\n\n label,\n\n contents: bytes,\n\n usage: wgpu::BufferUsage::COPY_SRC,\n\n });\n\n\n\n let mut encoder = device.create_command_encoder(&wgpu::CommandEncoderDescriptor {\n\n label: Some(\"temp texture encoder\"),\n\n });\n\n\n\n let texture = device.create_texture(&wgpu::TextureDescriptor {\n\n size: texture_size,\n\n mip_level_count: 1,\n\n sample_count: 1,\n\n dimension: wgpu::TextureDimension::D2,\n\n format: wgpu::TextureFormat::Rgba8UnormSrgb,\n\n // sampled: use in shader\n\n // copy dst, we want to copy data to this texture\n\n usage: wgpu::TextureUsage::SAMPLED | wgpu::TextureUsage::COPY_DST,\n\n label: Some(\"my texture\"),\n", "file_path": "src/texture.rs", "rank": 97, "score": 6.326006442106534 }, { "content": " });\n\n meshes.push(Mesh {\n\n name: m.name,\n\n vertex_buffer,\n\n index_buffer,\n\n num_indices: m.mesh.indices.len() as u32,\n\n material_id: m.mesh.material_id.unwrap_or(0),\n\n });\n\n }\n\n Ok(Self { meshes, materials })\n\n }\n\n}\n\n\n", "file_path": "src/model.rs", "rank": 98, "score": 6.3050931039999725 }, { "content": " // Move in/out (aka. \"zoom\")\n\n // Note: this isn't an actual zoom. The camera's position\n\n // changes when zooming. I've added this to make it easier\n\n // to get closer to an object you want to focus on.\n\n let (pitch_sin, pitch_cos) = camera.pitch.0.sin_cos();\n\n let scrollward =\n\n Vector3::new(pitch_cos * yaw_cos, pitch_sin, pitch_cos * yaw_sin).normalize();\n\n camera.position += scrollward * self.scroll * self.speed * self.sensitivity * dt;\n\n self.scroll = 0.0;\n\n\n\n // Move up/down. Since we don't use roll, we can just\n\n // modify the y coordinate directly.\n\n camera.position.y += (self.amount_up - self.amount_down) * self.speed * dt;\n\n\n\n // Rotate\n\n camera.yaw += Rad(self.rotate_horizontal) * self.sensitivity * dt;\n\n camera.pitch += Rad(-self.rotate_vertical) * self.sensitivity * dt;\n\n\n\n // If process_mouse isn't called every frame, these values\n\n // will not get set to zero, and the camera will rotate\n", "file_path": "src/camera_controller.rs", "rank": 99, "score": 6.152697658244767 } ]
Rust
components/restreamer/src/dvr.rs
iAnanich/ephyr
0ed272838d04b4e952e105bd0e170005c363dcf9
use std::{ ffi::OsString, io, path::{Path, PathBuf}, time::SystemTime, }; use anyhow::anyhow; use ephyr_log::log; use futures::{future, TryFutureExt as _, TryStreamExt as _}; use once_cell::sync::OnceCell; use tokio::fs; use url::Url; use uuid::Uuid; use crate::state; static STORAGE: OnceCell<Storage> = OnceCell::new(); #[derive(Debug)] pub struct Storage { pub root_path: PathBuf, } impl Storage { #[inline] #[must_use] pub fn global() -> &'static Storage { STORAGE.get().expect("dvr::Storage is not initialized") } #[inline] pub fn set_global(self) -> anyhow::Result<()> { STORAGE .set(self) .map_err(|_| anyhow!("dvr::Storage has been initialized already")) } pub fn file_url(&self, output: &state::Output) -> anyhow::Result<Url> { let mut full = self.root_path.clone(); full.push(output.id.to_string()); full.push(output.dst.path().trim_start_matches('/')); Url::from_file_path(full) .map_err(|e| anyhow!("Failed convert path to URL: {:?}", e)) } pub async fn list_files(&self, id: state::OutputId) -> Vec<String> { let dir = &self.root_path; let mut output_dir = dir.clone(); output_dir.push(id.to_string()); fs::read_dir(output_dir) .try_flatten_stream() .try_filter_map(|i| async move { Ok(i.file_type().await?.is_file().then(|| i.path()).and_then( |p| Some(p.strip_prefix(dir).ok()?.display().to_string()), )) }) .try_collect() .await .unwrap_or_else(|e| { if e.kind() != io::ErrorKind::NotFound { log::error!("Failed to list {} DVR files: {}", id, e); } vec![] }) } pub async fn remove_file<P: AsRef<Path>>(&self, path: P) -> bool { let path = path.as_ref(); let mut full = self.root_path.clone(); full.push(path.strip_prefix("/").unwrap_or(path)); if let Err(e) = fs::remove_file(full).await { if e.kind() != io::ErrorKind::NotFound { log::error!( "Failed to remove {} DVR file: {}", path.display(), e, ); } return false; } true } pub async fn cleanup(&self, restreams: &[state::Restream]) { fs::read_dir(&self.root_path) .try_flatten_stream() .try_filter(|i| { future::ready( i.file_name() .to_str() .and_then(|n| Uuid::parse_str(n).ok()) .map_or(true, |id| { let id = id.into(); !restreams .iter() .any(|r| r.outputs.iter().any(|o| o.id == id)) }), ) }) .try_for_each_concurrent(4, |i| async move { if i.file_type().await?.is_dir() { fs::remove_dir_all(i.path()).await } else { fs::remove_file(i.path()).await } }) .await .unwrap_or_else(|e| { log::error!("Failed to cleanup DVR files: {}", e) }) } } #[allow(clippy::missing_panics_doc)] pub async fn new_file_path(url: &Url) -> io::Result<PathBuf> { let mut path = url.to_file_path().map_err(|_| { io::Error::new(io::ErrorKind::Other, "File URL contains bad file path") })?; if let Some(dir) = path.parent() { fs::create_dir_all(dir).await?; } let now = SystemTime::now() .duration_since(SystemTime::UNIX_EPOCH) .unwrap(); let mut file_name = OsString::new(); if let Some(name) = path.file_stem() { file_name.push(name) } file_name.push(format!("_{}.", now.as_micros())); if let Some(ext) = path.extension() { file_name.push(ext) } path.set_file_name(file_name); Ok(path) }
use std::{ ffi::OsString, io, path::{Path, PathBuf}, time::SystemTime, }; use anyhow::anyhow; use ephyr_log::log; use futures::{future, TryFutureExt as _, TryStreamExt as _}; use once_cell::sync::OnceCell; use tokio::fs; use url::Url; use uuid::Uuid; use crate::state; static STORAGE: OnceCell<Storage> = OnceCell::new(); #[derive(Debug)] pub struct Storage { pub root_path: PathBuf, } impl Storage { #[inline]
og::error!("Failed to cleanup DVR files: {}", e) }) } } #[allow(clippy::missing_panics_doc)] pub async fn new_file_path(url: &Url) -> io::Result<PathBuf> { let mut path = url.to_file_path().map_err(|_| { io::Error::new(io::ErrorKind::Other, "File URL contains bad file path") })?; if let Some(dir) = path.parent() { fs::create_dir_all(dir).await?; } let now = SystemTime::now() .duration_since(SystemTime::UNIX_EPOCH) .unwrap(); let mut file_name = OsString::new(); if let Some(name) = path.file_stem() { file_name.push(name) } file_name.push(format!("_{}.", now.as_micros())); if let Some(ext) = path.extension() { file_name.push(ext) } path.set_file_name(file_name); Ok(path) }
#[must_use] pub fn global() -> &'static Storage { STORAGE.get().expect("dvr::Storage is not initialized") } #[inline] pub fn set_global(self) -> anyhow::Result<()> { STORAGE .set(self) .map_err(|_| anyhow!("dvr::Storage has been initialized already")) } pub fn file_url(&self, output: &state::Output) -> anyhow::Result<Url> { let mut full = self.root_path.clone(); full.push(output.id.to_string()); full.push(output.dst.path().trim_start_matches('/')); Url::from_file_path(full) .map_err(|e| anyhow!("Failed convert path to URL: {:?}", e)) } pub async fn list_files(&self, id: state::OutputId) -> Vec<String> { let dir = &self.root_path; let mut output_dir = dir.clone(); output_dir.push(id.to_string()); fs::read_dir(output_dir) .try_flatten_stream() .try_filter_map(|i| async move { Ok(i.file_type().await?.is_file().then(|| i.path()).and_then( |p| Some(p.strip_prefix(dir).ok()?.display().to_string()), )) }) .try_collect() .await .unwrap_or_else(|e| { if e.kind() != io::ErrorKind::NotFound { log::error!("Failed to list {} DVR files: {}", id, e); } vec![] }) } pub async fn remove_file<P: AsRef<Path>>(&self, path: P) -> bool { let path = path.as_ref(); let mut full = self.root_path.clone(); full.push(path.strip_prefix("/").unwrap_or(path)); if let Err(e) = fs::remove_file(full).await { if e.kind() != io::ErrorKind::NotFound { log::error!( "Failed to remove {} DVR file: {}", path.display(), e, ); } return false; } true } pub async fn cleanup(&self, restreams: &[state::Restream]) { fs::read_dir(&self.root_path) .try_flatten_stream() .try_filter(|i| { future::ready( i.file_name() .to_str() .and_then(|n| Uuid::parse_str(n).ok()) .map_or(true, |id| { let id = id.into(); !restreams .iter() .any(|r| r.outputs.iter().any(|o| o.id == id)) }), ) }) .try_for_each_concurrent(4, |i| async move { if i.file_type().await?.is_dir() { fs::remove_dir_all(i.path()).await } else { fs::remove_file(i.path()).await } }) .await .unwrap_or_else(|e| { l
random
[ { "content": "/// Interprets given [panic payload][1] as displayable message.\n\n///\n\n/// [1]: std::panic::PanicInfo::payload\n\npub fn display_panic<'a>(err: &'a (dyn Any + Send + 'static)) -> &'a str {\n\n if let Some(s) = err.downcast_ref::<&str>() {\n\n return s;\n\n }\n\n if let Some(s) = err.downcast_ref::<String>() {\n\n return s.as_str();\n\n }\n\n \"Box<Any>\"\n\n}\n", "file_path": "components/restreamer/src/lib.rs", "rank": 0, "score": 75244.29582067978 }, { "content": "/// Interprets given [panic payload][1] as displayable message.\n\n///\n\n/// [1]: std::panic::PanicInfo::payload\n\npub fn display_panic<'a>(err: &'a (dyn Any + Send + 'static)) -> &'a str {\n\n if let Some(s) = err.downcast_ref::<&str>() {\n\n return s;\n\n }\n\n if let Some(s) = err.downcast_ref::<String>() {\n\n return s.as_str();\n\n }\n\n \"Box<Any>\"\n\n}\n", "file_path": "components/vod-meta-server/src/util.rs", "rank": 1, "score": 72034.98910407211 }, { "content": "#[derive(Clone, Copy, Debug, Deserialize)]\n\nstruct Mode {\n\n /// Indicator whether [`state::Playlist`]s should be updated regardless\n\n /// its broken playbacks.\n\n #[serde(default)]\n\n force: bool,\n\n\n\n /// Indicator whether [`state::Playlist`]s should be checked and verified\n\n /// without applying any real changes to existing [`State`].\n\n #[serde(default)]\n\n dry_run: bool,\n\n}\n", "file_path": "components/vod-meta-server/src/server.rs", "rank": 2, "score": 57760.55213818508 }, { "content": "#[derive(Clone, Debug)]\n\nstruct AuthTokenHash(String);\n\n\n\nasync fn verify_auth_token(\n\n req: ServiceRequest,\n\n auth: BearerAuth,\n\n) -> Result<ServiceRequest, error::Error> {\n\n let token_hash = req.app_data::<AuthTokenHash>().unwrap().0.clone();\n\n\n\n let is_ok = web::block(move || {\n\n argon2::verify_encoded(&token_hash, auth.token().as_bytes())\n\n })\n\n .await\n\n .map_err(error::ErrorInternalServerError)?;\n\n if !is_ok {\n\n return Err(error::ErrorUnauthorized(\"Invalid Bearer token provided\"));\n\n }\n\n\n\n Ok(req)\n\n}\n\n\n\n/// Parameters configuring the mode for applying new [`State`].\n", "file_path": "components/vod-meta-server/src/server.rs", "rank": 3, "score": 50688.18643938629 }, { "content": "#[derive(Clone, Debug)]\n\nstruct ServerProcess(future::AbortHandle);\n\n\n\nimpl Drop for ServerProcess {\n\n #[inline]\n\n fn drop(&mut self) {\n\n self.0.abort();\n\n }\n\n}\n\n\n\n/// ID of [SRS] server client guarded by its participation.\n\n///\n\n/// Once this ID is fully [`Drop`]ped the client will be kicked from [SRS]\n\n/// server.\n\n///\n\n/// [SRS]: https://github.com/ossrs/srs\n\n#[derive(Clone, Debug, Eq, Hash, PartialEq)]\n\npub struct ClientId(Arc<u32>);\n\n\n\nimpl From<u32> for ClientId {\n\n #[inline]\n", "file_path": "components/restreamer/src/srs.rs", "rank": 4, "score": 48914.1852890856 }, { "content": "#[inline]\n\n#[must_use]\n\npub fn schema() -> Schema {\n\n Schema::new(QueriesRoot, MutationsRoot, SubscriptionsRoot)\n\n}\n\n\n\n/// Root of all [GraphQL mutations][1] in the [`Schema`].\n\n///\n\n/// [1]: https://spec.graphql.org/June2018/#sec-Root-Operation-Types\n\n#[derive(Clone, Copy, Debug)]\n\npub struct MutationsRoot;\n\n\n\n#[graphql_object(name = \"Mutation\", context = Context)]\n\nimpl MutationsRoot {\n\n /// Applies the specified JSON `spec` of `Restream`s to this server.\n\n ///\n\n /// If `replace` is `true` then replaces all the existing `Restream`s with\n\n /// the one defined by the `spec`. Otherwise, merges the `spec` with\n\n /// existing `Restream`s.\n\n ///\n\n /// ### Result\n\n ///\n", "file_path": "components/restreamer/src/api/graphql/client.rs", "rank": 5, "score": 48595.13754639064 }, { "content": "#[allow(clippy::trivially_copy_pass_by_ref)] // required for `serde`\n\n#[inline]\n\n#[must_use]\n\npub fn is_false(val: &bool) -> bool {\n\n !*val\n\n}\n", "file_path": "components/restreamer/src/serde.rs", "rank": 6, "score": 45067.392120653414 }, { "content": "#[must_use]\n\npub fn format(duration: &Duration) -> String {\n\n let secs = duration.as_secs();\n\n let mins = secs / 60;\n\n format!(\"{:02}:{:02}:{:02}\", mins / 60, mins % 60, secs % 60)\n\n}\n\n\n\n/// Serializes [`Duration`] into a `%H:%M:%S` time-like format (`123:05:01`,\n\n/// for example).\n\n///\n\n/// # Errors\n\n///\n\n/// Never errors.\n", "file_path": "common/serde/src/timelike.rs", "rank": 7, "score": 45063.60765587324 }, { "content": "/// Runs application.\n\n///\n\n/// # Errors\n\n///\n\n/// If running has failed and could not be performed. The appropriate error\n\n/// is logged.\n\npub fn run() -> Result<(), cli::Failure> {\n\n let mut cfg = cli::Opts::from_args();\n\n cfg.verbose = cfg.verbose.or_else(|| {\n\n if cfg.debug {\n\n Some(slog::Level::Debug)\n\n } else {\n\n None\n\n }\n\n });\n\n\n\n // This guard should be held till the end of the program for the logger\n\n // to present in global context.\n\n mem::forget(ephyr_log::init(cfg.verbose));\n\n\n\n server::run(cfg)\n\n}\n\n\n", "file_path": "components/restreamer/src/lib.rs", "rank": 8, "score": 45059.00719445937 }, { "content": "/// Runs application.\n\n///\n\n/// # Errors\n\n///\n\n/// If running has failed and could not be performed. The appropriate error\n\n/// is logged.\n\npub fn run() -> Result<(), cli::Failure> {\n\n let opts = cli::Opts::from_args();\n\n\n\n // This guard should be held till the end of the program for the logger\n\n // to present in global context.\n\n let _log_guard = ephyr_log::init(opts.verbose);\n\n\n\n server::run(opts)\n\n}\n", "file_path": "components/vod-meta-server/src/lib.rs", "rank": 9, "score": 43163.92835015507 }, { "content": "#[must_use]\n\npub fn main_logger(level: slog::Level) -> slog::Logger {\n\n use slog::Drain as _;\n\n use slog_async::OverflowStrategy::Drop;\n\n\n\n let decorator = slog_term::TermDecorator::new().build();\n\n let drain = slog_term::CompactFormat::new(decorator).build().fuse();\n\n\n\n let drain = drain\n\n .filter_level(level)\n\n .filter(|rec| {\n\n // Disable annoying DEBUG logs from `hyper` crate.\n\n !(rec.level() == slog::Level::Debug\n\n && rec.module() == \"hyper::proto::h1::io\")\n\n })\n\n .fuse();\n\n\n\n let drain = slog_async::Async::new(drain)\n\n .overflow_strategy(Drop)\n\n .build()\n\n .fuse();\n\n\n\n slog::Logger::root(drain, slog::o!())\n\n}\n", "file_path": "common/log/src/lib.rs", "rank": 10, "score": 40104.66874705201 }, { "content": "/// Deserializes [`Duration`] from whole seconds.\n\n///\n\n/// # Errors\n\n///\n\n/// If an input is not a 64-bits unsigned integer number ([`u64`]).\n\npub fn deserialize<'a, D>(de: D) -> Result<Duration, D::Error>\n\nwhere\n\n D: Deserializer<'a>,\n\n{\n\n let secs = u64::deserialize(de)?;\n\n Ok(Duration::from_secs(secs))\n\n}\n\n\n\n#[cfg(test)]\n\nmod spec {\n\n use std::time::Duration;\n\n\n\n use serde::{Deserialize, Serialize};\n\n\n\n #[derive(Deserialize, Serialize)]\n\n struct Test(#[serde(with = \"super\")] Duration);\n\n\n\n #[test]\n\n fn serializes_correctly() {\n\n for (input, expected) in &[\n", "file_path": "common/serde/src/seconds.rs", "rank": 11, "score": 37196.727991858235 }, { "content": "/// Deserializes [`Duration`] from a `%H:%M:%S` time-like format (`123:05:01`,\n\n/// for example).\n\n///\n\n/// # Errors\n\n///\n\n/// If an input is not time-like formatted or does contain invalid time.\n\npub fn deserialize<'a, D>(de: D) -> Result<Duration, D::Error>\n\nwhere\n\n D: Deserializer<'a>,\n\n{\n\n let s = <Cow<'_, str>>::deserialize(de)?;\n\n let mut iter = s.split(':');\n\n\n\n let hours: u64 = iter\n\n .next()\n\n .ok_or_else(|| D::Error::custom(\"no hours specified\"))?\n\n .parse()\n\n .map_err(|e| D::Error::custom(format!(\"cannot parse hours: {}\", e)))?;\n\n\n\n let mins: u64 = iter\n\n .next()\n\n .ok_or_else(|| D::Error::custom(\"no minutes specified\"))?\n\n .parse()\n\n .map_err(|e| {\n\n D::Error::custom(format!(\"cannot parse minutes: {}\", e))\n\n })?;\n", "file_path": "common/serde/src/timelike.rs", "rank": 12, "score": 37196.727991858235 }, { "content": "/// Initializes global logger with the given verbosity `level` ([`Info`] by\n\n/// default, if [`None`]), returning its guard that should be held as long as\n\n/// program runs.\n\n///\n\n/// # Panics\n\n///\n\n/// If failed to initialize logger.\n\n///\n\n/// [`Info`]: slog::Level::Info\n\npub fn init(level: Option<slog::Level>) -> slog_scope::GlobalLoggerGuard {\n\n let guard = slog_scope::set_global_logger(main_logger(\n\n level.unwrap_or(slog::Level::Info),\n\n ));\n\n if let Err(e) = slog_stdlog::init() {\n\n panic!(\"Failed to initialize logger: {}\", e)\n\n };\n\n guard\n\n}\n\n\n\n/// Creates, configures and returns main [`Logger`] of the application.\n\n///\n\n/// [`Logger`]: slog::Logger\n", "file_path": "common/log/src/lib.rs", "rank": 13, "score": 36944.63737888301 }, { "content": "/// Deserializes [`TimeZone`] from a [RFC 3339 format][1] (`+04:03`, for\n\n/// example).\n\n///\n\n/// # Errors\n\n///\n\n/// If an input is not [RFC 3339 formatted][1] timezone or does contain invalid\n\n/// timezone.\n\n///\n\n/// [1]: https://tools.ietf.org/html/rfc3339#section-4.2\n\npub fn deserialize<'a, D>(de: D) -> Result<TimeZone, D::Error>\n\nwhere\n\n D: Deserializer<'a>,\n\n{\n\n let s = <Cow<'_, str>>::deserialize(de)?;\n\n let (sign, s) = match s.chars().next() {\n\n Some('+') => (1, &s[1..]),\n\n Some('-') => (-1, &s[1..]),\n\n Some(_) => (1, &*s),\n\n None => {\n\n return Err(D::Error::custom(format!(\"invalid timezone: {}\", s)))\n\n }\n\n };\n\n let mut iter = s.split(':');\n\n\n\n let hours: u32 = iter\n\n .next()\n\n .ok_or_else(|| D::Error::custom(\"no hours specified\"))?\n\n .parse()\n\n .map_err(|e| D::Error::custom(format!(\"cannot parse hours: {}\", e)))?;\n", "file_path": "common/serde/src/timezone.rs", "rank": 14, "score": 36417.103065068375 }, { "content": "#[inline]\n\npub fn serialize<S>(dur: &Duration, ser: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n ser.serialize_u64(dur.as_secs())\n\n}\n\n\n", "file_path": "common/serde/src/seconds.rs", "rank": 15, "score": 34036.53394039485 }, { "content": "#[inline]\n\npub fn serialize<S>(dur: &Duration, ser: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n ser.serialize_str(&format(dur))\n\n}\n\n\n", "file_path": "common/serde/src/timelike.rs", "rank": 16, "score": 34036.53394039485 }, { "content": "#[allow(clippy::trivially_copy_pass_by_ref)]\n\n#[inline]\n\npub fn serialize<S>(tz: &TimeZone, ser: S) -> Result<S::Ok, S::Error>\n\nwhere\n\n S: Serializer,\n\n{\n\n ser.serialize_str(&format!(\"{:?}\", tz))\n\n}\n\n\n", "file_path": "common/serde/src/timezone.rs", "rank": 17, "score": 33367.02836644489 }, { "content": " ///\n\n /// Prints the error message and quits the program in case of failure.\n\n #[inline]\n\n #[must_use]\n\n pub fn from_args() -> Self {\n\n <Self as StructOpt>::from_args()\n\n }\n\n\n\n /// Parses [`slog::Level`] from the given string.\n\n ///\n\n /// This function is required, because [`slog::Level`]'s [`FromStr`]\n\n /// implementation returns `()`, which is not [`Display`] as [`StructOpt`]\n\n /// requires.\n\n ///\n\n /// # Errors\n\n ///\n\n /// If [`slog::Level`] failed to parse from the string.\n\n ///\n\n /// [`Display`]: std::fmt::Display\n\n /// [`FromStr`]: std::str::FromStr\n", "file_path": "components/restreamer/src/cli.rs", "rank": 19, "score": 17.11445399547597 }, { "content": " pub verbose: Option<slog::Level>,\n\n}\n\n\n\nimpl Opts {\n\n /// Parses CLI [`Opts`] from command line arguments.\n\n ///\n\n /// Prints the error message and quits the program in case of failure.\n\n #[inline]\n\n #[must_use]\n\n pub fn from_args() -> Self {\n\n <Self as StructOpt>::from_args()\n\n }\n\n\n\n /// Parses [`slog::Level`] from the given string.\n\n ///\n\n /// This function is required, because [`slog::Level`]'s [`FromStr`]\n\n /// implementation returns `()`, which is not [`Display`] as [`StructOpt`]\n\n /// requires.\n\n ///\n\n /// # Errors\n", "file_path": "components/vod-meta-server/src/cli.rs", "rank": 20, "score": 16.999249425765388 }, { "content": "\n\nimpl Spec {\n\n /// Converts this [`Spec`] into a [`v1::Spec`].\n\n #[inline]\n\n #[must_use]\n\n pub fn into_v1(self) -> v1::Spec {\n\n match self {\n\n Self::V1(s) => s,\n\n }\n\n }\n\n}\n", "file_path": "components/restreamer/src/spec/mod.rs", "rank": 22, "score": 15.738676521968092 }, { "content": ")]\n\npub struct Delay(#[serde(with = \"serde_humantime\")] Duration);\n\n\n\nimpl Delay {\n\n /// Creates a new [`Delay`] out of the given milliseconds.\n\n #[inline]\n\n #[must_use]\n\n pub fn from_millis<N: TryInto<u64>>(millis: N) -> Option<Self> {\n\n millis\n\n .try_into()\n\n .ok()\n\n .map(|m| Self(Duration::from_millis(m)))\n\n }\n\n\n\n /// Returns milliseconds of this [`Delay`].\n\n #[inline]\n\n #[must_use]\n\n #[allow(clippy::missing_panics_doc)]\n\n pub fn as_millis(&self) -> i32 {\n\n self.0.as_millis().try_into().unwrap()\n", "file_path": "components/restreamer/src/state.rs", "rank": 23, "score": 15.371209330704156 }, { "content": " Serialize,\n\n)]\n\npub struct RestreamId(Uuid);\n\n\n\nimpl RestreamId {\n\n /// Generates a new random [`RestreamId`].\n\n #[inline]\n\n #[must_use]\n\n pub fn random() -> Self {\n\n Self(Uuid::new_v4())\n\n }\n\n}\n\n\n\n/// Key of a [`Restream`] identifying it, and used to form its endpoints URLs.\n\n#[derive(\n\n Clone, Debug, Deref, Display, Eq, Hash, Into, PartialEq, Serialize,\n\n)]\n\npub struct RestreamKey(String);\n\n\n\nimpl RestreamKey {\n", "file_path": "components/restreamer/src/state.rs", "rank": 24, "score": 15.367716069346605 }, { "content": "pub struct Context(Option<SendWrapper<HttpRequest>>);\n\n\n\nimpl Context {\n\n /// Creates new [`Context`] wrapping the given [`HttpRequest`].\n\n #[inline]\n\n #[must_use]\n\n pub fn new(req: HttpRequest) -> Self {\n\n Self(Some(SendWrapper::new(req)))\n\n }\n\n\n\n /// Creates a fake [`Context`], which panics on use.\n\n ///\n\n /// Intended for situations where we cannot provide [`HttpRequest`] for\n\n /// operation execution (running introspection locally, for example).\n\n #[inline]\n\n #[must_use]\n\n pub fn fake() -> Self {\n\n Self(None)\n\n }\n\n\n", "file_path": "components/restreamer/src/api/graphql/mod.rs", "rank": 25, "score": 15.333875880125683 }, { "content": " pub status: http::StatusCode,\n\n\n\n /// Message of this [`Error`](struct@Error).\n\n #[default = \"Unknown error has happened.\"]\n\n pub message: Cow<'static, str>,\n\n\n\n /// Backtrace of this [`Error`](struct@Error).\n\n #[error(not(backtrace))]\n\n pub backtrace: Option<Vec<String>>,\n\n}\n\n\n\nimpl Error {\n\n /// Creates new default [`Error`](struct@Error) with a given unique literal\n\n /// code applied.\n\n ///\n\n /// Code is usually upper-cased, like `USER_NOT_FOUND`.\n\n ///\n\n /// Goes as `errors.extensions.code` field of GraphQL response.\n\n #[inline]\n\n #[must_use]\n", "file_path": "components/restreamer/src/api/graphql/mod.rs", "rank": 26, "score": 15.202394961405316 }, { "content": " /// [Profile]: https://trac.ffmpeg.org/wiki/Encode/H.264#Profile\n\n pub vprofile: Option<Cow<'static, str>>,\n\n\n\n /// [FFmpeg audio encoder][1] to encode the transcoded live stream with.\n\n ///\n\n /// [1]: https://ffmpeg.org/ffmpeg-codecs.html#Audio-Encoders\n\n pub acodec: Option<Cow<'static, str>>,\n\n}\n\n\n\nimpl TranscodingRestreamer {\n\n /// Checks whether this [`TranscodingRestreamer`] process must be restarted,\n\n /// as cannot apply the new `actual` params on itself correctly, without\n\n /// interruptions.\n\n #[inline]\n\n #[must_use]\n\n pub fn needs_restart(&self, actual: &Self) -> bool {\n\n self != actual\n\n }\n\n\n\n /// Properly setups the given [FFmpeg] [`Command`] for this\n", "file_path": "components/restreamer/src/ffmpeg.rs", "rank": 27, "score": 15.104182825859017 }, { "content": " Eq,\n\n From,\n\n GraphQLScalarValue,\n\n Into,\n\n PartialEq,\n\n Serialize,\n\n)]\n\npub struct InputId(Uuid);\n\n\n\nimpl InputId {\n\n /// Generates a new random [`InputId`].\n\n #[inline]\n\n #[must_use]\n\n pub fn random() -> Self {\n\n Self(Uuid::new_v4())\n\n }\n\n}\n\n\n\n/// Key of an [`Input`] used to form its endpoint URL.\n\n#[derive(\n", "file_path": "components/restreamer/src/state.rs", "rank": 28, "score": 14.877648751457908 }, { "content": " Clone, Debug, Deref, Display, Eq, Hash, Into, PartialEq, Serialize,\n\n)]\n\npub struct InputKey(String);\n\n\n\nimpl InputKey {\n\n /// Creates a new [`InputKey`] if the given value meets its invariants.\n\n #[must_use]\n\n pub fn new<'s, S: Into<Cow<'s, str>>>(val: S) -> Option<Self> {\n\n static REGEX: Lazy<Regex> =\n\n Lazy::new(|| Regex::new(\"^[a-z0-9_-]{1,20}$\").unwrap());\n\n\n\n let val = val.into();\n\n (!val.is_empty() && REGEX.is_match(&val))\n\n .then(|| Self(val.into_owned()))\n\n }\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for InputKey {\n\n #[inline]\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n", "file_path": "components/restreamer/src/state.rs", "rank": 29, "score": 14.512366972663745 }, { "content": " RestreamKey, Volume,\n\n },\n\n Spec,\n\n};\n\n\n\nuse super::Context;\n\nuse url::Url;\n\n\n\n/// Full schema of [`api::graphql::client`].\n\n///\n\n/// [`api::graphql::client`]: graphql::client\n\npub type Schema =\n\n RootNode<'static, QueriesRoot, MutationsRoot, SubscriptionsRoot>;\n\n\n\n/// Constructs and returns new [`Schema`], ready for use.\n\n#[inline]\n\n#[must_use]\n", "file_path": "components/restreamer/src/api/graphql/client.rs", "rank": 30, "score": 14.423086018220678 }, { "content": " /// It is used for differentiating servers on UI side if multiple servers\n\n /// are used.\n\n pub title: Option<String>,\n\n\n\n /// Whether do we need to confirm deletion of inputs and outputs\n\n /// If `true` we should confirm deletion, `false` - do not confirm\n\n pub delete_confirmation: Option<bool>,\n\n}\n\n\n\nimpl Settings {\n\n /// Exports this [`Settings`] as a [`spec::v1::Settings`].\n\n #[inline]\n\n #[must_use]\n\n pub fn export(&self) -> spec::v1::Settings {\n\n spec::v1::Settings {\n\n delete_confirmation: self.delete_confirmation,\n\n title: self.title.clone(),\n\n }\n\n }\n\n\n", "file_path": "components/restreamer/src/state.rs", "rank": 31, "score": 14.405295626823527 }, { "content": " Deserialize,\n\n Display,\n\n Eq,\n\n From,\n\n GraphQLScalarValue,\n\n Into,\n\n PartialEq,\n\n Serialize,\n\n)]\n\npub struct OutputId(Uuid);\n\n\n\nimpl OutputId {\n\n /// Generates a new random [`OutputId`].\n\n #[inline]\n\n #[must_use]\n\n pub fn random() -> Self {\n\n Self(Uuid::new_v4())\n\n }\n\n}\n\n\n", "file_path": "components/restreamer/src/state.rs", "rank": 32, "score": 14.336050420148446 }, { "content": "impl Label {\n\n /// Creates a new [`Label`] if the given value meets its invariants.\n\n #[must_use]\n\n pub fn new<'s, S: Into<Cow<'s, str>>>(val: S) -> Option<Self> {\n\n static REGEX: Lazy<Regex> =\n\n Lazy::new(|| Regex::new(r\"^[^,\\n\\t\\r\\f\\v]{1,70}$\").unwrap());\n\n\n\n let val = val.into();\n\n (!val.is_empty() && REGEX.is_match(&val))\n\n .then(|| Self(val.into_owned()))\n\n }\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for Label {\n\n #[inline]\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n Self::new(<Cow<'_, str>>::deserialize(deserializer)?)\n", "file_path": "components/restreamer/src/state.rs", "rank": 33, "score": 14.213888137994335 }, { "content": " #[inline]\n\n #[must_use]\n\n pub fn needs_restart(&self, actual: &Self) -> bool {\n\n self.url != actual.url || self.delay != actual.delay\n\n }\n\n}\n\n\n\n/// Abort handle of a spawned [FFmpeg] [`Restreamer`] process.\n\n///\n\n/// [FFmpeg]: https://ffmpeg.org\n\n#[derive(Clone, Debug)]\n\npub struct DroppableAbortHandle(future::AbortHandle);\n\n\n\nimpl Drop for DroppableAbortHandle {\n\n #[inline]\n\n fn drop(&mut self) {\n\n self.0.abort();\n\n }\n\n}\n\n\n\n/// Generates a new port for a [ZeroMQ] listener, which is highly unlikely to be\n\n/// used already.\n\n///\n\n/// [ZeroMQ]: https://zeromq.org\n\n#[must_use]\n", "file_path": "components/restreamer/src/ffmpeg.rs", "rank": 34, "score": 14.09614139782242 }, { "content": "//! CLI (command line interface).\n\n\n\nuse std::{fmt, net::IpAddr, path::PathBuf, str::FromStr as _};\n\n\n\nuse anyhow::anyhow;\n\nuse ephyr_log::slog;\n\nuse structopt::StructOpt;\n\n\n\n/// CLI (command line interface) of the re-streamer server.\n\n#[derive(Clone, Debug, StructOpt)]\n\n#[structopt(about = \"RTMP re-streamer server\")]\n\npub struct Opts {\n\n /// Debug mode of the server.\n\n #[structopt(short, long, help = \"Enables debug mode\")]\n\n pub debug: bool,\n\n\n\n /// IP address for the server to listen client HTTP requests on.\n\n #[structopt(\n\n long,\n\n env = \"EPHYR_RESTREAMER_CLIENT_HTTP_IP\",\n", "file_path": "components/restreamer/src/cli.rs", "rank": 35, "score": 14.007504794868659 }, { "content": "}\n\n\n\n/// [`Option`] support.\n\npub mod opt {\n\n use std::time::Duration;\n\n\n\n use serde::{de::Deserializer, ser::Serializer, Deserialize};\n\n\n\n /// Serializes [`Option`]ed [`Duration`] into a `%H:%M:%S` time-like format.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Never errors.\n\n #[inline]\n\n pub fn serialize<S>(\n\n dur: &Option<Duration>,\n\n serializer: S,\n\n ) -> Result<S::Ok, S::Error>\n\n where\n\n S: Serializer,\n", "file_path": "common/serde/src/timelike.rs", "rank": 36, "score": 13.997343510663539 }, { "content": " /// Goes as `errors.message` field of GraphQL response.\n\n ///\n\n /// [1]: https://facebook.github.io/graphql/June2018/#sec-Errors\n\n #[inline]\n\n #[must_use]\n\n pub fn message<M: fmt::Display + ?Sized>(mut self, m: &M) -> Self {\n\n self.set_message(m);\n\n self\n\n }\n\n\n\n /// Attaches given backtrace to this [`Error`](struct@Error).\n\n ///\n\n /// If set, goes as `errors.extensions.backtrace` field of GraphQL response.\n\n #[inline]\n\n #[must_use]\n\n pub fn backtrace<B: fmt::Display + ?Sized>(mut self, bt: &B) -> Self {\n\n self.set_backtrace(bt);\n\n self\n\n }\n\n\n", "file_path": "components/restreamer/src/api/graphql/mod.rs", "rank": 37, "score": 13.870856896851826 }, { "content": " #[serde(default, skip_serializing_if = \"Volume::is_origin\")]\n\n pub volume: Volume,\n\n\n\n /// Delay that this `Mixin` should wait before being mixed with an `Output`.\n\n ///\n\n /// Very useful to fix de-synchronization issues and correct timings between\n\n /// a `Mixin` and its `Output`.\n\n #[serde(default, skip_serializing_if = \"Delay::is_zero\")]\n\n pub delay: Delay,\n\n\n\n /// `Status` of this `Mixin` indicating whether it provides an actual media\n\n /// stream to be mixed with its `Output`.\n\n #[serde(skip)]\n\n pub status: Status,\n\n}\n\n\n\nimpl Mixin {\n\n /// Creates a new [`Mixin`] out of the given [`spec::v1::Mixin`].\n\n #[inline]\n\n #[must_use]\n", "file_path": "components/restreamer/src/state.rs", "rank": 38, "score": 13.367969729427454 }, { "content": "//! CLI (command line interface).\n\n\n\nuse std::{fmt, net::IpAddr, path::PathBuf, str::FromStr as _};\n\n\n\nuse anyhow::anyhow;\n\nuse byte_unit::Byte;\n\nuse ephyr_log::slog;\n\nuse structopt::StructOpt;\n\n\n\n/// CLI (command line interface) of the server.\n\n#[derive(Clone, Debug, StructOpt)]\n\n#[structopt(about = \"VOD playlists server\")]\n\npub struct Opts {\n\n /// IP address for the server to listen HTTP requests on.\n\n #[structopt(\n\n long,\n\n env = \"EPHYR_VOD_META_HTTP_IP\",\n\n default_value = \"0.0.0.0\",\n\n help = \"IP to listen HTTP on\",\n\n long_help = \"IP address for the server to listen HTTP requests on\"\n", "file_path": "components/vod-meta-server/src/cli.rs", "rank": 39, "score": 13.176883358507794 }, { "content": " GraphQLScalarValue,\n\n Into,\n\n PartialEq,\n\n Serialize,\n\n)]\n\npub struct EndpointId(Uuid);\n\n\n\nimpl EndpointId {\n\n /// Generates a new random [`EndpointId`].\n\n #[inline]\n\n #[must_use]\n\n pub fn random() -> Self {\n\n Self(Uuid::new_v4())\n\n }\n\n}\n\n\n\n/// Source to pull a live stream by an `Input` from.\n\n#[derive(\n\n Clone, Debug, Deserialize, Eq, From, GraphQLUnion, PartialEq, Serialize,\n\n)]\n", "file_path": "components/restreamer/src/state.rs", "rank": 40, "score": 12.919367933772214 }, { "content": " }\n\n\n\n /// Indicates whether this [`Delay`] introduces no actual delay.\n\n #[inline]\n\n #[must_use]\n\n pub fn is_zero(&self) -> bool {\n\n self.0 == Duration::default()\n\n }\n\n}\n\n\n\n/// Type of a `Mixin` delay in milliseconds.\n\n///\n\n/// Negative values are not allowed.\n\n#[graphql_scalar]\n\nimpl<S> GraphQLScalar for Delay\n\nwhere\n\n S: ScalarValue,\n\n{\n\n fn resolve(&self) -> Value {\n\n Value::scalar(self.as_millis())\n", "file_path": "components/restreamer/src/state.rs", "rank": 41, "score": 12.763483288093795 }, { "content": " Into,\n\n PartialEq,\n\n Serialize,\n\n)]\n\npub struct MixinId(Uuid);\n\n\n\nimpl MixinId {\n\n /// Generates a new random [`MixinId`].\n\n #[inline]\n\n #[must_use]\n\n pub fn random() -> Self {\n\n Self(Uuid::new_v4())\n\n }\n\n}\n\n\n\n/// [`Url`] of a [`Mixin::src`].\n\n///\n\n/// Only the following URLs are allowed at the moment:\n\n/// - [TeamSpeak] URL (starting with `ts://` scheme and having a host);\n\n/// - [MP3] HTTP URL (starting with `http://` or `https://` scheme, having a\n", "file_path": "components/restreamer/src/state.rs", "rank": 42, "score": 12.726525994710032 }, { "content": "\n\nimpl From<slog::Level> for LogLevel {\n\n #[inline]\n\n fn from(lvl: slog::Level) -> Self {\n\n match lvl {\n\n slog::Level::Critical | slog::Level::Error => Self::Error,\n\n slog::Level::Warning | slog::Level::Info => Self::Warn,\n\n slog::Level::Debug => Self::Trace,\n\n slog::Level::Trace => Self::Info,\n\n }\n\n }\n\n}\n\n\n\n/// [`Display`]able wrapper around [`PathBuf`] for using in\n\n/// [`askama::Template`]s.\n\n///\n\n/// [`Display`]: std::fmt::Display\n\n#[derive(AsRef, Clone, Debug, Deref, Display, From, Into)]\n\n#[as_ref(forward)]\n\n#[display(fmt = \"{}\", \"_0.display()\")]\n\npub struct DisplayablePath(PathBuf);\n", "file_path": "components/restreamer/src/srs.rs", "rank": 43, "score": 12.651983916343651 }, { "content": "//! Manager of the server [`State`].\n\n\n\nuse std::{path::Path, sync::Arc};\n\n\n\nuse anyhow::anyhow;\n\nuse chrono::Utc;\n\nuse tokio::{fs, io::AsyncReadExt as _, sync::RwLock};\n\n\n\nuse super::{Playlist, PlaylistSlug, State};\n\n\n\n/// Manager of the server [`State`].\n\n///\n\n/// It provides access to a synchronized [`State`] and takes care about\n\n/// persisting it to filesystem to survive application restarts.\n\n#[derive(Clone, Debug)]\n\npub struct Manager {\n\n /// Path to the file where the [`Manager::state`] should be persisted.\n\n file: Arc<Path>,\n\n\n\n /// Server's [`State`] to keep synchronized and persisted, along with its\n", "file_path": "components/vod-meta-server/src/vod/meta/state/manager.rs", "rank": 44, "score": 12.581703010814367 }, { "content": " unused_qualifications,\n\n unused_results\n\n)]\n\n\n\npub mod api;\n\npub mod cli;\n\npub mod dvr;\n\npub mod ffmpeg;\n\npub mod serde;\n\npub mod server;\n\npub mod spec;\n\npub mod srs;\n\npub mod state;\n\npub mod teamspeak;\n\n\n\nuse std::{any::Any, mem};\n\n\n\nuse ephyr_log::slog;\n\n\n\npub use self::{spec::Spec, state::State};\n\n\n\n/// Runs application.\n\n///\n\n/// # Errors\n\n///\n\n/// If running has failed and could not be performed. The appropriate error\n\n/// is logged.\n", "file_path": "components/restreamer/src/lib.rs", "rank": 45, "score": 12.501969673766572 }, { "content": " (5..=30).contains(&dur.as_secs())\n\n }\n\n\n\n /// Converts this [`SegmentDuration`] to a regular [`Duration`] value.\n\n #[inline]\n\n #[must_use]\n\n pub fn as_duration(&self) -> Duration {\n\n self.0\n\n }\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for SegmentDuration {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n use serde::de::Error as _;\n\n Self::new(serde_humantime::deserialize(deserializer)?)\n\n .ok_or_else(|| D::Error::custom(\"not a valid segment duration\"))\n\n }\n", "file_path": "components/vod-meta-server/src/vod/meta/state/mod.rs", "rank": 46, "score": 12.471598888410304 }, { "content": " Ok(())\n\n}\n\n\n\n/// Client HTTP server responding to client requests.\n\npub mod client {\n\n use std::time::Duration;\n\n\n\n use actix_service::Service as _;\n\n use actix_web::{\n\n dev::ServiceRequest, get, middleware, route, web, App, Error,\n\n HttpRequest, HttpResponse, HttpServer,\n\n };\n\n use actix_web_httpauth::extractors::{\n\n basic::{self, BasicAuth},\n\n AuthExtractor as _, AuthExtractorConfig, AuthenticationError,\n\n };\n\n use actix_web_static_files::ResourceFiles;\n\n use ephyr_log::log;\n\n use futures::{future, FutureExt as _};\n\n use juniper::http::playground::playground_source;\n", "file_path": "components/restreamer/src/server.rs", "rank": 47, "score": 12.4535140349643 }, { "content": "\n\n /// [`Url`] to pull a live stream from.\n\n pub from_url: Url,\n\n\n\n /// [`Url`] to publish the pulled live stream onto.\n\n pub to_url: Url,\n\n}\n\n\n\nimpl CopyRestreamer {\n\n /// Checks whether this [`CopyRestreamer`] process must be restarted, as\n\n /// cannot apply the new `actual` params on itself correctly, without\n\n /// interruptions.\n\n #[inline]\n\n #[must_use]\n\n pub fn needs_restart(&self, actual: &Self) -> bool {\n\n self.from_url != actual.from_url || self.to_url != actual.to_url\n\n }\n\n\n\n /// Properly setups the given [FFmpeg] [`Command`] for this\n\n /// [`CopyRestreamer`] before running it.\n", "file_path": "components/restreamer/src/ffmpeg.rs", "rank": 48, "score": 12.394584837595465 }, { "content": " /// Creates a new [`RestreamKey`] if the given value meets its invariants.\n\n #[must_use]\n\n pub fn new<'s, S: Into<Cow<'s, str>>>(val: S) -> Option<Self> {\n\n static REGEX: Lazy<Regex> =\n\n Lazy::new(|| Regex::new(\"^[a-z0-9_-]{1,20}$\").unwrap());\n\n\n\n let val = val.into();\n\n (!val.is_empty() && REGEX.is_match(&val))\n\n .then(|| Self(val.into_owned()))\n\n }\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for RestreamKey {\n\n #[inline]\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n Self::new(<Cow<'_, str>>::deserialize(deserializer)?)\n\n .ok_or_else(|| D::Error::custom(\"Not a valid Restream.key\"))\n", "file_path": "components/restreamer/src/state.rs", "rank": 49, "score": 12.309387170543898 }, { "content": "\n\nimpl Deref for Context {\n\n type Target = HttpRequest;\n\n\n\n #[inline]\n\n fn deref(&self) -> &Self::Target {\n\n &*self.0.as_ref().unwrap()\n\n }\n\n}\n\n\n\n/// Error returned to the client by GraphQL API.\n\n#[derive(Clone, Debug, Display, Error, SmartDefault)]\n\n#[display(fmt = \"{}\", message)]\n\npub struct Error {\n\n /// Unique literal code of this [`Error`](struct@Error).\n\n #[default = \"UNKNOWN\"]\n\n pub code: Cow<'static, str>,\n\n\n\n /// HTTP status code of this [`Error`](struct@Error).\n\n #[default(http::StatusCode::INTERNAL_SERVER_ERROR)]\n", "file_path": "components/restreamer/src/api/graphql/mod.rs", "rank": 50, "score": 12.236442913465877 }, { "content": " pub fn new<C: Into<Cow<'static, str>>>(code: C) -> Self {\n\n Self {\n\n code: code.into(),\n\n ..Self::default()\n\n }\n\n }\n\n\n\n /// Attaches given [`http::StatusCode`] to this [`Error`](struct@Error).\n\n ///\n\n /// Goes as `errors.extensions.status` field of GraphQL response.\n\n #[inline]\n\n #[must_use]\n\n pub fn status<S: Into<http::StatusCode>>(mut self, s: S) -> Self {\n\n self.set_status(s);\n\n self\n\n }\n\n\n\n /// Attaches given message to this [`Error`](struct@Error) as required by\n\n /// [GraphQL errors spec][1].\n\n ///\n", "file_path": "components/restreamer/src/api/graphql/mod.rs", "rank": 51, "score": 12.227718184772627 }, { "content": " #[inline]\n\n #[must_use]\n\n pub fn export(&self) -> spec::v1::InputEndpoint {\n\n spec::v1::InputEndpoint { kind: self.kind }\n\n }\n\n\n\n /// Indicates whether this [`InputEndpoint`] is an\n\n /// [`InputEndpointKind::Rtmp`].\n\n #[inline]\n\n #[must_use]\n\n pub fn is_rtmp(&self) -> bool {\n\n matches!(self.kind, InputEndpointKind::Rtmp)\n\n }\n\n}\n\n\n\n/// Possible kinds of an `InputEndpoint`.\n\n#[derive(\n\n Clone,\n\n Copy,\n\n Debug,\n", "file_path": "components/restreamer/src/state.rs", "rank": 52, "score": 12.198214130731174 }, { "content": "\n\n /// `Output`s that a live stream is re-streamed to.\n\n #[serde(default, skip_serializing_if = \"Vec::is_empty\")]\n\n pub outputs: Vec<Output>,\n\n}\n\n\n\nimpl Restream {\n\n /// Creates a new [`Restream`] out of the given [`spec::v1::Restream`].\n\n #[inline]\n\n #[must_use]\n\n pub fn new(spec: spec::v1::Restream) -> Self {\n\n Self {\n\n id: RestreamId::random(),\n\n key: spec.key,\n\n label: spec.label,\n\n input: Input::new(spec.input),\n\n outputs: spec.outputs.into_iter().map(Output::new).collect(),\n\n }\n\n }\n\n\n", "file_path": "components/restreamer/src/state.rs", "rank": 53, "score": 11.763159152613307 }, { "content": "//! Custom [`serde`] serialization/deserialization functions for [`Duration`]\n\n//! in a whole seconds format.\n\n\n\nuse std::time::Duration;\n\n\n\nuse serde::{Deserialize as _, Deserializer, Serializer};\n\n\n\n/// Serializes [`Duration`] as whole seconds.\n\n///\n\n/// # Errors\n\n///\n\n/// Never errors.\n\n#[inline]\n", "file_path": "common/serde/src/seconds.rs", "rank": 54, "score": 11.756310264129969 }, { "content": "use smart_default::SmartDefault;\n\nuse url::Url;\n\n\n\npub use crate::api::allatra::video::{Resolution, YoutubeId};\n\n\n\npub use self::manager::Manager;\n\n\n\n/// State of the server, representing a set of [`Playlist`]s for different\n\n/// audiences.\n\n#[derive(Clone, Debug, Default, Deref, DerefMut, Deserialize, Serialize)]\n\npub struct State(HashMap<PlaylistSlug, Playlist>);\n\n\n\nimpl State {\n\n /// Parses new [`State`] from the given API request.\n\n ///\n\n /// # Errors\n\n ///\n\n /// If some [`Playlist`] fails to parse.\n\n pub async fn parse_request(\n\n req: api::vod::meta::Request,\n", "file_path": "components/vod-meta-server/src/vod/meta/state/mod.rs", "rank": 55, "score": 11.534404849121806 }, { "content": " Some(Self(num))\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n /// Displays this [`Volume`] as a fraction of `1`, i.e. `100%` as `1`, `50%`\n\n /// as `0.50`, and so on.\n\n #[must_use]\n\n pub fn display_as_fraction(self) -> String {\n\n format!(\"{}.{:02}\", self.0 / 100, self.0 % 100)\n\n }\n\n\n\n /// Indicates whether this [`Volume`] rate value corresponds is the\n\n /// [`Volume::ORIGIN`]al one.\n\n #[allow(clippy::trivially_copy_pass_by_ref)] // required for `serde`\n\n #[inline]\n\n #[must_use]\n\n pub fn is_origin(&self) -> bool {\n\n *self == Self::ORIGIN\n", "file_path": "components/restreamer/src/state.rs", "rank": 56, "score": 11.353444640778099 }, { "content": "//! Version 1 of a shareable (exportable and importable) specification of\n\n//! application's [`State`].\n\n//!\n\n//! [`State`]: state::State\n\n\n\nuse std::collections::HashSet;\n\n\n\nuse serde::{de::Error as _, Deserialize, Deserializer, Serialize};\n\n\n\nuse crate::{serde::is_false, state};\n\nuse url::Url;\n\n\n\n/// Shareable (exportable and importable) specification of a [`State`].\n\n///\n\n/// [`State`]: state::State\n\n#[derive(Clone, Debug, Deserialize, Eq, PartialEq, Serialize)]\n\npub struct Spec {\n\n /// [`Settings`] to be performed.\n\n pub settings: Settings,\n\n\n", "file_path": "components/restreamer/src/spec/v1.rs", "rank": 57, "score": 11.304307073740869 }, { "content": " pub fn parse_log_level(lvl: &str) -> Result<slog::Level, anyhow::Error> {\n\n #[allow(clippy::map_err_ignore)]\n\n slog::Level::from_str(lvl).map_err(|_| {\n\n anyhow!(\n\n \"'{}' is invalid verbosity level, allowed levels are: \\\n\n OFF | CRIT | ERRO | WARN | INFO | DEBG | TRCE\",\n\n lvl,\n\n )\n\n })\n\n }\n\n}\n\n\n\n/// Error type indicating non-zero process exit code.\n\npub struct Failure;\n\n\n\nimpl fmt::Debug for Failure {\n\n #[inline]\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"\")\n\n }\n\n}\n\n\n\nimpl From<()> for Failure {\n\n #[inline]\n\n fn from(_: ()) -> Self {\n\n Self\n\n }\n\n}\n", "file_path": "components/restreamer/src/cli.rs", "rank": 58, "score": 11.118643794256865 }, { "content": " /// Returns [`cli::Opts`] parameters stored in [`HttpRequest`]'s context.\n\n ///\n\n /// [`cli::Opts`]: crate::cli::Opts\n\n #[inline]\n\n #[must_use]\n\n #[allow(clippy::missing_panics_doc)]\n\n pub fn config(&self) -> &crate::cli::Opts {\n\n self.app_data::<crate::cli::Opts>().unwrap()\n\n }\n\n\n\n /// Returns current [`State`] stored in [`HttpRequest`]'s context.\n\n ///\n\n /// [`State`]: crate::State\n\n #[inline]\n\n #[must_use]\n\n #[allow(clippy::missing_panics_doc)]\n\n pub fn state(&self) -> &crate::State {\n\n self.app_data::<crate::State>().unwrap()\n\n }\n\n}\n", "file_path": "components/restreamer/src/api/graphql/mod.rs", "rank": 59, "score": 11.107260709941762 }, { "content": "//! Definitions of API provided by this application's [VOD] meta server.\n\n//!\n\n//! [VOD]: https://en.wikipedia.org/wiki/Video_on_demand\n\n\n\nuse std::{\n\n collections::{HashMap, HashSet},\n\n time::Duration,\n\n};\n\n\n\nuse chrono::{FixedOffset as TimeZone, Weekday};\n\nuse ephyr_serde::{timelike, timezone};\n\nuse isolang::Language;\n\nuse serde::{Deserialize, Serialize};\n\nuse url::Url;\n\n\n\npub use crate::vod::meta::state::{PlaylistSlug, Resolution, SegmentDuration};\n\n\n\n/// Set of [`Playlist`]s to be provided th the server.\n\npub type Request = HashMap<PlaylistSlug, Playlist>;\n\n\n", "file_path": "components/vod-meta-server/src/api/vod/meta.rs", "rank": 60, "score": 11.038172657373455 }, { "content": "use once_cell::sync::Lazy;\n\nuse regex::Regex;\n\nuse serde::{de::Error as _, Deserialize, Deserializer, Serialize};\n\nuse smart_default::SmartDefault;\n\nuse tokio::{fs, io::AsyncReadExt as _};\n\nuse url::Url;\n\nuse uuid::Uuid;\n\n\n\nuse crate::{display_panic, serde::is_false, spec, srs, Spec};\n\n\n\n/// Server's settings.\n\n///\n\n/// It keeps different settings not related to restreams but to whole server\n\n#[derive(Clone, Debug, Deserialize, Serialize, Eq, PartialEq)]\n\npub struct Settings {\n\n /// [`argon2`] hash of password which protects access to this application's\n\n /// public APIs.\n\n pub password_hash: Option<String>,\n\n\n\n /// Title for the server\n", "file_path": "components/restreamer/src/state.rs", "rank": 61, "score": 10.939823957048173 }, { "content": "impl AudioCapture {\n\n /// Creates new [`AudioCapture`] from the given [`Connection`] and for\n\n /// the given [`AudioHandler`].\n\n #[inline]\n\n #[must_use]\n\n #[allow(clippy::missing_panics_doc)]\n\n pub fn new(conn: Connection, audio: Arc<Mutex<AudioHandler>>) -> Self {\n\n audio.lock().unwrap().reset();\n\n Self {\n\n conn: ManuallyDrop::new(conn),\n\n audio,\n\n }\n\n }\n\n\n\n /// Generates a new random HWID (hardware identification string).\n\n ///\n\n /// # Panics\n\n ///\n\n /// No panics, because we guarantee to pass proper range to\n\n /// the [`hex::encode_to_slice`].\n", "file_path": "components/restreamer/src/teamspeak.rs", "rank": 63, "score": 10.816187682564443 }, { "content": "#[serde(rename_all = \"lowercase\")]\n\npub enum InputSrc {\n\n /// Remote endpoint.\n\n Remote(RemoteInputSrc),\n\n\n\n /// Multiple local endpoints forming a failover source.\n\n Failover(FailoverInputSrc),\n\n}\n\n\n\nimpl InputSrc {\n\n /// Creates a new [`InputSrc`] out of the given [`spec::v1::InputSrc`].\n\n #[inline]\n\n #[must_use]\n\n pub fn new(spec: spec::v1::InputSrc) -> Self {\n\n match spec {\n\n spec::v1::InputSrc::RemoteUrl(url) => {\n\n Self::Remote(RemoteInputSrc { url })\n\n }\n\n spec::v1::InputSrc::FailoverInputs(inputs) => {\n\n Self::Failover(FailoverInputSrc {\n", "file_path": "components/restreamer/src/state.rs", "rank": 64, "score": 10.733211973195882 }, { "content": " /// Creates new [`SegmentDuration`] from the given [`Duration`] if it\n\n /// represents a [valid segment duration][1].\n\n ///\n\n /// [1]: SegmentDuration::validate\n\n #[must_use]\n\n pub fn new(dur: Duration) -> Option<Self> {\n\n if Self::validate(dur) {\n\n Some(Self(dur))\n\n } else {\n\n None\n\n }\n\n }\n\n\n\n /// Validates whether the given [`Duration`] represents a valid\n\n /// [`SegmentDuration`].\n\n ///\n\n /// Valid segment durations are between 5 and 30 seconds (inclusively).\n\n #[inline]\n\n #[must_use]\n\n pub fn validate(dur: Duration) -> bool {\n", "file_path": "components/vod-meta-server/src/vod/meta/state/mod.rs", "rank": 65, "score": 10.707469122295118 }, { "content": " unused_qualifications,\n\n unused_results\n\n)]\n\n\n\nuse std::time::Duration;\n\n\n\nuse derive_more::{Display, Error, From};\n\nuse ephyr_serde::seconds;\n\nuse mime::Mime;\n\nuse serde::{Deserialize, Serialize};\n\nuse serde_repr::{Deserialize_repr, Serialize_repr};\n\nuse url::Url;\n\n\n\n/// [API] of [allatra.video][1] site.\n\n///\n\n/// [API]: https://en.wikipedia.org/wiki/Application_programming_interface\n\n/// [1]: https://allatra.video\n\n#[derive(Clone, Copy, Debug)]\n\npub struct Api;\n\n\n", "file_path": "common/api/allatra-video/src/lib.rs", "rank": 66, "score": 10.696938427970426 }, { "content": " ///\n\n /// So, potentially allows duplication.\n\n ///\n\n /// [FFmpeg]: https://ffmpeg.org\n\n pool: HashMap<Uuid, Restreamer>,\n\n\n\n /// Application [`State`] dictating which [FFmpeg] processes should run.\n\n ///\n\n /// [FFmpeg]: https://ffmpeg.org\n\n state: State,\n\n}\n\n\n\nimpl RestreamersPool {\n\n /// Creates a new [`RestreamersPool`] out of the given parameters.\n\n #[inline]\n\n #[must_use]\n\n pub fn new<P: Into<PathBuf>>(ffmpeg_path: P, state: State) -> Self {\n\n Self {\n\n ffmpeg_path: ffmpeg_path.into(),\n\n pool: HashMap::new(),\n", "file_path": "components/restreamer/src/ffmpeg.rs", "rank": 67, "score": 10.653647407999976 }, { "content": " ///\n\n /// [1]: https://en.wikipedia.org/wiki/Clean_URL#Slug\n\n #[must_use]\n\n pub fn validate<S: AsRef<str> + ?Sized>(slug: &S) -> bool {\n\n static SLUG_REGEX: Lazy<Regex> =\n\n Lazy::new(|| Regex::new(r\"^[a-z0-9]+(?:-[a-z0-9]+)*$\").unwrap());\n\n\n\n let slug = slug.as_ref();\n\n !slug.is_empty() && SLUG_REGEX.is_match(slug)\n\n }\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for PlaylistSlug {\n\n fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>\n\n where\n\n D: Deserializer<'de>,\n\n {\n\n use serde::de::Error as _;\n\n Self::new(<Cow<'_, str>>::deserialize(deserializer)?)\n\n .ok_or_else(|| D::Error::custom(\"not a valid URL slug\"))\n", "file_path": "components/vod-meta-server/src/vod/meta/state/mod.rs", "rank": 68, "score": 10.59079405405556 }, { "content": "//! [FFmpeg]-based definitions and implementations.\n\n//!\n\n//! [FFmpeg]: https://ffmpeg.org\n\n\n\nuse std::{\n\n borrow::Cow,\n\n collections::HashMap,\n\n panic::AssertUnwindSafe,\n\n path::{Path, PathBuf},\n\n process::Stdio,\n\n sync::Arc,\n\n time::Duration,\n\n};\n\n\n\nuse derive_more::From;\n\nuse ephyr_log::{log, Drain as _};\n\nuse futures::{future, pin_mut, FutureExt as _, TryFutureExt as _};\n\nuse tokio::{io, process::Command, sync::Mutex, time};\n\nuse url::Url;\n\nuse uuid::Uuid;\n", "file_path": "components/restreamer/src/ffmpeg.rs", "rank": 69, "score": 10.526472036781861 }, { "content": "//! Definitions related to [VOD] files cache.\n\n//!\n\n//! [VOD]: https://en.wikipedia.org/wiki/Video_on_demand\n\n\n\nuse std::{\n\n panic::AssertUnwindSafe,\n\n path::{self, Path, PathBuf},\n\n};\n\n\n\nuse anyhow::anyhow;\n\nuse ephyr_log::log;\n\nuse futures::{sink, FutureExt as _, StreamExt as _, TryStreamExt as _};\n\nuse tempfile::TempDir;\n\nuse tokio::{fs, io, sync::mpsc};\n\nuse tokio_util::compat::FuturesAsyncReadCompatExt as _;\n\nuse url::Url;\n\n\n\nuse crate::util::display_panic;\n\n\n\n/// Manager of [VOD] files cache.\n", "file_path": "components/vod-meta-server/src/vod/file/cache.rs", "rank": 70, "score": 10.307442424187823 }, { "content": "use byteorder::{BigEndian, ByteOrder as _};\n\nuse derive_more::{Display, Error};\n\nuse ephyr_log::log;\n\nuse futures::{\n\n future, ready, sink, FutureExt as _, Stream, StreamExt as _,\n\n TryFutureExt as _,\n\n};\n\nuse once_cell::sync::Lazy;\n\nuse rand::Rng as _;\n\nuse tokio::{\n\n io::{self, AsyncRead},\n\n task::JoinHandle,\n\n time,\n\n};\n\nuse tsclientlib::{DisconnectOptions, StreamItem};\n\nuse tsproto_packets::packets::AudioData;\n\n\n\npub use tsclientlib::{ConnectOptions as Config, Connection};\n\n\n\n/// Handler responsible for decoding, tracking and mixing audio of all\n\n/// [TeamSpeak] channel members.\n\n///\n\n/// [TeamSpeak]: https://teamspeak.com\n\npub type AudioHandler = tsclientlib::audio::AudioHandler<MemberId>;\n\n\n\n/// Type of [TeamSpeak] channel member ID.\n\n///\n\n/// [TeamSpeak]: https://teamspeak.com\n", "file_path": "components/restreamer/src/teamspeak.rs", "rank": 71, "score": 10.267746585936173 }, { "content": " /// Kind of a spawned [FFmpeg] process describing the actual job it\n\n /// performs.\n\n ///\n\n /// [FFmpeg]: https://ffmpeg.org\n\n kind: RestreamerKind,\n\n}\n\n\n\nimpl Restreamer {\n\n /// Creates a new [`Restreamer`] spawning the actual [FFmpeg] process in\n\n /// background. Once this [`Restreamer`] is dropped, its [FFmpeg] process is\n\n /// aborted.\n\n ///\n\n /// [FFmpeg]: https://ffmpeg.org\n\n #[must_use]\n\n pub fn run<P: AsRef<Path> + Send + 'static>(\n\n ffmpeg_path: P,\n\n kind: RestreamerKind,\n\n state: State,\n\n ) -> Self {\n\n let (kind_for_abort, state_for_abort) = (kind.clone(), state.clone());\n", "file_path": "components/restreamer/src/ffmpeg.rs", "rank": 72, "score": 10.227520839796298 }, { "content": " MixingRestreamer::new(output, from_url, prev).into()\n\n })\n\n }\n\n\n\n /// Extracts the correct [`Url`] acceptable by [FFmpeg] for sinking a live\n\n /// stream by the given [`state::Output`].\n\n ///\n\n /// [FFmpeg]: https://ffmpeg.org\n\n #[inline]\n\n #[must_use]\n\n fn dst_url(output: &state::Output) -> Url {\n\n (output.dst.scheme() == \"file\")\n\n .then(|| dvr::Storage::global().file_url(output).unwrap())\n\n .unwrap_or_else(|| output.dst.clone().into())\n\n }\n\n\n\n /// Checks whether this [`Restreamer`] must be restarted, as cannot apply\n\n /// the new `actual` params on itself correctly, without interruptions.\n\n #[inline]\n\n #[must_use]\n", "file_path": "components/restreamer/src/ffmpeg.rs", "rank": 73, "score": 10.043013189723027 }, { "content": " /// [`spec::v1::InputEndpoint`].\n\n #[inline]\n\n #[must_use]\n\n pub fn new(spec: spec::v1::InputEndpoint) -> Self {\n\n Self {\n\n id: EndpointId::random(),\n\n kind: spec.kind,\n\n status: Status::Offline,\n\n srs_publisher_id: None,\n\n srs_player_ids: HashSet::new(),\n\n }\n\n }\n\n\n\n /// Applies the given [`spec::v1::InputEndpoint`] to this [`InputEndpoint`].\n\n #[inline]\n\n pub fn apply(&mut self, new: spec::v1::InputEndpoint) {\n\n self.kind = new.kind;\n\n }\n\n\n\n /// Exports this [`InputEndpoint`] as a [`spec::v1::InputEndpoint`].\n", "file_path": "components/restreamer/src/state.rs", "rank": 74, "score": 9.824542738322112 }, { "content": "//! [GraphQL] APIs provided by application.\n\n//!\n\n//! [GraphQL]: https://graphql.com\n\n\n\npub mod client;\n\n\n\nuse std::{borrow::Cow, convert::Infallible, fmt, ops::Deref};\n\n\n\nuse actix_web::{http, HttpRequest};\n\nuse derive_more::{Display, Error};\n\nuse juniper::{\n\n graphql_value, http::GraphQLResponse, FieldError, IntoFieldError,\n\n ScalarValue,\n\n};\n\nuse send_wrapper::SendWrapper;\n\nuse smart_default::SmartDefault;\n\n\n\n/// Context containing [`HttpRequest`] for providing additional information when\n\n/// executing GraphQL operations.\n\n#[derive(Clone, Debug)]\n", "file_path": "components/restreamer/src/api/graphql/mod.rs", "rank": 75, "score": 9.818155294162466 }, { "content": "impl fmt::Debug for Failure {\n\n #[inline]\n\n fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {\n\n write!(f, \"\")\n\n }\n\n}\n\n\n\nimpl From<()> for Failure {\n\n #[inline]\n\n fn from(_: ()) -> Self {\n\n Self\n\n }\n\n}\n", "file_path": "components/vod-meta-server/src/cli.rs", "rank": 76, "score": 9.800955592947602 }, { "content": " /// Sets [`http::StatusCode`] for this [`Error`](struct@Error).\n\n ///\n\n /// Goes as `errors.extensions.status` field of GraphQL response.\n\n #[inline]\n\n pub fn set_status<S: Into<http::StatusCode>>(&mut self, s: S) {\n\n self.status = s.into()\n\n }\n\n\n\n /// Sets given [`Error`](struct@Error)'s message as required by\n\n /// [GraphQL errors spec][1].\n\n ///\n\n /// Goes as `errors.message` field of GraphQL response.\n\n ///\n\n /// [1]: https://facebook.github.io/graphql/June2018/#sec-Errors\n\n #[inline]\n\n pub fn set_message<M: fmt::Display + ?Sized>(&mut self, m: &M) {\n\n self.message = format!(\"{}\", m).into()\n\n }\n\n\n\n /// Sets backtrace of this [`Error`](struct@Error).\n", "file_path": "components/restreamer/src/api/graphql/mod.rs", "rank": 77, "score": 9.785949501270633 }, { "content": " /// No data can be received from [TeamSpeak] server.\n\n ///\n\n /// [TeamSpeak]: https://teamspeak.com\n\n #[display(fmt = \"Unable to receive data from TeamSpeak server\")]\n\n NoData,\n\n\n\n /// Input buffer provided to read [`Input`] is too small to read any data.\n\n #[display(fmt = \"Input buffer is too small\")]\n\n TooSmallBuffer,\n\n}\n\n\n\nimpl From<InputError> for io::Error {\n\n fn from(e: InputError) -> Self {\n\n use InputError as E;\n\n\n\n let kind = match e {\n\n E::NoData => io::ErrorKind::NotConnected,\n\n E::TooSmallBuffer => io::ErrorKind::InvalidData,\n\n };\n\n io::Error::new(kind, e)\n", "file_path": "components/restreamer/src/teamspeak.rs", "rank": 78, "score": 9.762994752981793 }, { "content": " #[inline]\n\n pub fn new(url: Url) -> Result<Self, Url> {\n\n if Self::validate(&url) {\n\n Ok(Self(url))\n\n } else {\n\n Err(url)\n\n }\n\n }\n\n\n\n /// Validates the given [`Url`] to represent a valid [`InputSrcUrl`].\n\n #[must_use]\n\n pub fn validate(url: &Url) -> bool {\n\n match url.scheme() {\n\n \"rtmp\" | \"rtmps\" => url.has_host(),\n\n \"http\" | \"https\" => {\n\n url.has_host()\n\n && Path::new(url.path()).extension()\n\n == Some(\"m3u8\".as_ref())\n\n }\n\n _ => false,\n", "file_path": "components/restreamer/src/state.rs", "rank": 79, "score": 9.528848364727724 }, { "content": "//! Custom [`serde`] serialization/deserialization functions for [`TimeZone`]\n\n//! in a [RFC 3339 format][1] (`+04:03`, for example).\n\n//!\n\n//! [`TimeZone`]: chrono::FixedOffset\n\n//! [1]: https://tools.ietf.org/html/rfc3339#section-4.2\n\n\n\nuse std::{borrow::Cow, convert::TryFrom as _};\n\n\n\nuse chrono::FixedOffset as TimeZone;\n\nuse serde::{de::Error as _, Deserialize as _, Deserializer, Serializer};\n\n\n\n/// Serializes [`TimeZone`] in a [RFC 3339 format][1] (`+04:03`, for example).\n\n///\n\n/// # Errors\n\n///\n\n/// Never errors.\n\n///\n\n/// [1]: https://tools.ietf.org/html/rfc3339#section-4.2\n\n#[allow(clippy::trivially_copy_pass_by_ref)]\n\n#[inline]\n", "file_path": "common/serde/src/timezone.rs", "rank": 80, "score": 9.456749988770492 }, { "content": "pub struct Volume(#[default(Self::ORIGIN.0)] u16);\n\n\n\nimpl Volume {\n\n /// Maximum possible value of a [`Volume`] rate.\n\n pub const MAX: Volume = Volume(1000);\n\n\n\n /// Value of a [`Volume`] rate corresponding to the original one of an audio\n\n /// track.\n\n pub const ORIGIN: Volume = Volume(100);\n\n\n\n /// Minimum possible value of a [`Volume`] rate. Actually, disables audio.\n\n pub const OFF: Volume = Volume(0);\n\n\n\n /// Creates a new [`Volume`] rate value if it satisfies the required\n\n /// invariants:\n\n /// - within [`Volume::OFF`] and [`Volume::MAX`] values.\n\n #[must_use]\n\n pub fn new<N: TryInto<u16>>(num: N) -> Option<Self> {\n\n let num = num.try_into().ok()?;\n\n if (Self::OFF.0..=Self::MAX.0).contains(&num) {\n", "file_path": "components/restreamer/src/state.rs", "rank": 81, "score": 9.449541128004341 }, { "content": " old.apply(new, replace);\n\n } else {\n\n restreams.push(Restream::new(new));\n\n }\n\n }\n\n }\n\n\n\n let mut settings = self.settings.lock_mut();\n\n settings.apply(new.settings);\n\n }\n\n\n\n /// Exports this [`State`] as a [`spec::v1::Spec`].\n\n #[inline]\n\n #[must_use]\n\n pub fn export(&self) -> Spec {\n\n spec::v1::Spec {\n\n settings: self.settings.get_cloned().export(),\n\n restreams: self\n\n .restreams\n\n .get_cloned()\n", "file_path": "components/restreamer/src/state.rs", "rank": 82, "score": 9.334570185465624 }, { "content": " /// Creates a new [`OutputDstUrl`] if the given [`Url`] is suitable for\n\n /// that.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returns the given [`Url`] back if it doesn't represent a valid\n\n /// [`OutputDstUrl`].\n\n #[inline]\n\n pub fn new(url: Url) -> Result<Self, Url> {\n\n if Self::validate(&url) {\n\n Ok(Self(url))\n\n } else {\n\n Err(url)\n\n }\n\n }\n\n\n\n /// Validates the given [`Url`] to represent a valid [`OutputDstUrl`].\n\n #[must_use]\n\n pub fn validate(url: &Url) -> bool {\n\n match url.scheme() {\n", "file_path": "components/restreamer/src/state.rs", "rank": 83, "score": 9.331535252869468 }, { "content": "//! [HTTP Callback API][1] of [SRS] exposed by application.\n\n//!\n\n//! [SRS]: https://github.com/ossrs/srs\n\n//! [1]: https://github.com/ossrs/srs/wiki/v3_EN_HTTPCallback\n\n\n\nuse std::net::IpAddr;\n\n\n\nuse serde::{Deserialize, Serialize};\n\n\n\n/// Request performed by [SRS] to [HTTP Callback API][1].\n\n///\n\n/// [SRS]: https://github.com/ossrs/srs\n\n/// [1]: https://github.com/ossrs/srs/wiki/v3_EN_HTTPCallback\n\n#[derive(Clone, Debug, Deserialize, Serialize)]\n\npub struct Request {\n\n /// Event that [SRS] reports about.\n\n ///\n\n /// [SRS]: https://github.com/ossrs/srs\n\n pub action: Event,\n\n\n", "file_path": "components/restreamer/src/api/srs/callback.rs", "rank": 84, "score": 9.33050982655024 }, { "content": " fn from(id: u32) -> Self {\n\n Self(Arc::new(id))\n\n }\n\n}\n\n\n\nimpl Deref for ClientId {\n\n type Target = u32;\n\n\n\n #[inline]\n\n fn deref(&self) -> &Self::Target {\n\n &*self.0\n\n }\n\n}\n\n\n\nimpl Borrow<u32> for ClientId {\n\n #[inline]\n\n fn borrow(&self) -> &u32 {\n\n &*self\n\n }\n\n}\n", "file_path": "components/restreamer/src/srs.rs", "rank": 85, "score": 9.298603685381538 }, { "content": " } else {\n\n Err(url)\n\n }\n\n }\n\n\n\n /// Validates the given [`Url`] to represent a valid [`MixinSrcUrl`].\n\n #[must_use]\n\n pub fn validate(url: &Url) -> bool {\n\n url.has_host()\n\n && match url.scheme() {\n\n \"ts\" => true,\n\n \"http\" | \"https\" => {\n\n Path::new(url.path()).extension() == Some(\"mp3\".as_ref())\n\n }\n\n _ => false,\n\n }\n\n }\n\n}\n\n\n\nimpl<'de> Deserialize<'de> for MixinSrcUrl {\n", "file_path": "components/restreamer/src/state.rs", "rank": 86, "score": 9.162144593659114 }, { "content": " /// Exports this [`Output`] as a [`spec::v1::Output`].\n\n #[inline]\n\n #[must_use]\n\n pub fn export(&self) -> spec::v1::Output {\n\n spec::v1::Output {\n\n dst: self.dst.clone(),\n\n label: self.label.clone(),\n\n preview_url: self.preview_url.clone(),\n\n volume: self.volume,\n\n mixins: self.mixins.iter().map(Mixin::export).collect(),\n\n enabled: self.enabled,\n\n }\n\n }\n\n}\n\n\n\n/// ID of an `Output`.\n\n#[derive(\n\n Clone,\n\n Copy,\n\n Debug,\n", "file_path": "components/restreamer/src/state.rs", "rank": 87, "score": 9.093344640839813 }, { "content": " ///\n\n /// [1]: https://en.wikipedia.org/wiki/TMPDIR\n\n tmp_dir: TempDir,\n\n}\n\n\n\nimpl Manager {\n\n /// Number of maximum allowed concurrent downloads at the same time.\n\n pub const CONCURRENT_DOWNLOADS: usize = 4;\n\n\n\n /// Creates new [`Manager`] running the background downloads queue\n\n /// processing.\n\n ///\n\n /// # Errors\n\n ///\n\n /// - If specified `dir` doesn't exist or cannot be resolved.\n\n /// - If temporary directory cannot be created.\n\n pub fn try_new<P: AsRef<Path>>(dir: P) -> io::Result<Self> {\n\n let cache_dir = dir.as_ref().canonicalize()?;\n\n\n\n let tmp_dir = tempfile::Builder::new()\n", "file_path": "components/vod-meta-server/src/vod/file/cache.rs", "rank": 88, "score": 9.085705189624344 }, { "content": "\n\nuse crate::{\n\n display_panic, dvr,\n\n state::{self, Delay, MixinId, MixinSrcUrl, State, Status, Volume},\n\n teamspeak,\n\n};\n\nuse std::result::Result::Err;\n\n\n\n/// Pool of [FFmpeg] processes performing re-streaming of a media traffic.\n\n///\n\n/// [FFmpeg]: https://ffmpeg.org\n\n#[derive(Debug)]\n\npub struct RestreamersPool {\n\n /// Path to a [FFmpeg] binary used for spawning processes.\n\n ///\n\n /// [FFmpeg]: https://ffmpeg.org\n\n ffmpeg_path: PathBuf,\n\n\n\n /// Pool of currently running [FFmpeg] re-streaming processes identified by\n\n /// an ID of the correspondent element in a [`State`].\n", "file_path": "components/restreamer/src/ffmpeg.rs", "rank": 89, "score": 9.04234389533104 }, { "content": "//! APIs used in application or provided by it.\n\n\n\npub mod graphql;\n\npub mod srs;\n", "file_path": "components/restreamer/src/api/mod.rs", "rank": 90, "score": 9.013508263194794 }, { "content": "//! Utils and helpers.\n\n\n\nuse std::any::Any;\n\n\n\n/// Interprets given [panic payload][1] as displayable message.\n\n///\n\n/// [1]: std::panic::PanicInfo::payload\n", "file_path": "components/vod-meta-server/src/util.rs", "rank": 91, "score": 8.956133705126794 }, { "content": "use actix_web_static_files::NpmBuild;\n\n\n", "file_path": "components/restreamer/build.rs", "rank": 94, "score": 8.855912144781712 }, { "content": " }\n\n\n\n fn from_str(value: ScalarToken<'_>) -> ParseScalarResult<'_, S> {\n\n <String as ParseScalarValue<S>>::from_str(value)\n\n }\n\n}\n\n\n\nimpl PartialEq<str> for RestreamKey {\n\n #[inline]\n\n fn eq(&self, other: &str) -> bool {\n\n self.0 == other\n\n }\n\n}\n\n\n\n/// Upstream source that a `Restream` receives a live stream from.\n\n#[derive(\n\n Clone, Debug, Deserialize, Eq, GraphQLObject, PartialEq, Serialize,\n\n)]\n\npub struct Input {\n\n /// Unique ID of this `Input`.\n", "file_path": "components/restreamer/src/state.rs", "rank": 95, "score": 8.818619970866607 }, { "content": "impl Mixin {\n\n /// Creates a new [`Mixin`] out of the given [`state::Mixin`].\n\n ///\n\n /// `prev` value may be specified to consume already initialized resources,\n\n /// which are unwanted to be re-created.\n\n ///\n\n /// Optional `label` may be used to identify this [`Mixin`] in a [TeamSpeak]\n\n /// channel.\n\n ///\n\n /// [TeamSpeak]: https://teamspeak.com\n\n #[allow(clippy::non_ascii_literal)]\n\n #[must_use]\n\n pub fn new(\n\n state: &state::Mixin,\n\n label: Option<&state::Label>,\n\n prev: Option<&Mixin>,\n\n ) -> Self {\n\n let stdin = (state.src.scheme() == \"ts\")\n\n .then(|| {\n\n prev.and_then(|m| m.stdin.clone()).or_else(|| {\n", "file_path": "components/restreamer/src/ffmpeg.rs", "rank": 96, "score": 8.80204785293801 }, { "content": " /// Indicator whether this `Input` is enabled, so is allowed to receive a\n\n /// live stream from its upstream sources.\n\n #[serde(default, skip_serializing_if = \"is_false\")]\n\n pub enabled: bool,\n\n}\n\n\n\nimpl Input {\n\n /// Creates a new [`Input`] out of the given [`spec::v1::Input`].\n\n #[must_use]\n\n pub fn new(spec: spec::v1::Input) -> Self {\n\n Self {\n\n id: InputId::random(),\n\n key: spec.key,\n\n endpoints: spec\n\n .endpoints\n\n .into_iter()\n\n .map(InputEndpoint::new)\n\n .collect(),\n\n src: spec.src.map(InputSrc::new),\n\n enabled: spec.enabled,\n", "file_path": "components/restreamer/src/state.rs", "rank": 97, "score": 8.721905230369726 }, { "content": " /// current version.\n\n ///\n\n /// Version is used for CAS (compare and swap) operations.\n\n state: Arc<RwLock<(State, u8)>>,\n\n}\n\n\n\nimpl Manager {\n\n /// Instantiates new [`Manager`] to read from and persist the [`State`] in\n\n /// the provided `file`.\n\n ///\n\n /// If no `file` exists, the new empty one will be created.\n\n ///\n\n /// # Errors\n\n ///\n\n /// If the `file`:\n\n /// - cannot be read;\n\n /// - contains broken [`State`].\n\n pub async fn try_new<P: AsRef<Path>>(\n\n file: P,\n\n ) -> Result<Self, anyhow::Error> {\n", "file_path": "components/vod-meta-server/src/vod/meta/state/manager.rs", "rank": 98, "score": 8.566665197080187 }, { "content": "/// host and `.mp3` extension in its path).\n\n///\n\n/// [MP3]: https://en.wikipedia.org/wiki/MP3\n\n/// [TeamSpeak]: https://teamspeak.com\n\n#[derive(\n\n Clone, Debug, Deref, Display, Eq, Hash, Into, PartialEq, Serialize,\n\n)]\n\npub struct MixinSrcUrl(Url);\n\n\n\nimpl MixinSrcUrl {\n\n /// Creates a new [`MixinSrcUrl`] if the given [`Url`] is suitable for that.\n\n ///\n\n /// # Errors\n\n ///\n\n /// Returns the given [`Url`] back if it doesn't represent a valid\n\n /// [`MixinSrcUrl`].\n\n #[inline]\n\n pub fn new(url: Url) -> Result<Self, Url> {\n\n if Self::validate(&url) {\n\n Ok(Self(url))\n", "file_path": "components/restreamer/src/state.rs", "rank": 99, "score": 8.544645623765728 } ]
Rust
src/file_watcher.rs
devzbysiu/podium
ee45e5e8c880b6b8bf638f5257db3f773df4e61b
use crate::contracts::file_to_process::{new_file_to_process, FileToProcess}; use crate::custom_tantivy::wrapper::*; use notify::{watcher, DebouncedEvent, RecursiveMode, Watcher}; use tracing::info; use walkdir::{DirEntry, WalkDir}; use std::path::PathBuf; use std::sync::mpsc::channel; use std::time::Duration; pub async fn start_watcher(directories: &Vec<PathBuf>, tantivy_wrapper: &mut TantivyWrapper) { info!("Starting file watcher thread on: {:?}", directories); let (watcher_tx, watcher_rx) = channel(); let mut watcher = watcher(watcher_tx, Duration::from_secs(10)).unwrap(); for directory in directories { watcher.watch(directory, RecursiveMode::Recursive).unwrap(); } loop { let watcher_event = watcher_rx.recv(); match watcher_event { Ok(event) => { info!("Received watcher event: {:?}", event); match event { DebouncedEvent::Create(path_buf) => { create_event(path_buf, &tantivy_wrapper).await; } DebouncedEvent::Write(path_buf) => { write_event(path_buf, &tantivy_wrapper).await; } DebouncedEvent::NoticeRemove(path_buf) => { remove_event(&path_buf, &tantivy_wrapper); } DebouncedEvent::Rename(src_path_buf, dst_path_buf) => { rename_event(&src_path_buf, &dst_path_buf, &tantivy_wrapper); } _ => { } } } Err(e) => error!("watch error: {:?}", e), } tantivy_wrapper.index_writer.commit().unwrap(); } } async fn create_event(path_buf: PathBuf, tantivy_wrapper: &TantivyWrapper) { if path_buf.is_dir() { let walker = WalkDir::new(path_buf).into_iter(); for entry in walker.filter_entry(|e| !is_hidden(e)) { let entry = entry.unwrap(); create( new_file_to_process(entry.into_path()).await, tantivy_wrapper, ) .await; } } else { create(new_file_to_process(path_buf).await, tantivy_wrapper).await; } } async fn create(file_to_process: FileToProcess, tantivy_wrapper: &TantivyWrapper) { tantivy_wrapper.process_file(file_to_process).await; } async fn write_event(path_buf: PathBuf, tantivy_wrapper: &TantivyWrapper) { if path_buf.is_dir() { let walker = WalkDir::new(path_buf).into_iter(); for entry in walker.filter_entry(|e| !is_hidden(e)) { let entry = entry.unwrap(); write( new_file_to_process(entry.into_path()).await, tantivy_wrapper, ) .await; } } else { write(new_file_to_process(path_buf).await, tantivy_wrapper).await; } } async fn write(file_to_process: FileToProcess, tantivy_wrapper: &TantivyWrapper) { let path_buf = file_to_process.path.clone(); remove(&path_buf, tantivy_wrapper); tantivy_wrapper.process_file(file_to_process).await; } fn remove_event(path_buf: &PathBuf, tantivy_wrapper: &TantivyWrapper) { if path_buf.is_dir() { let walker = WalkDir::new(path_buf).into_iter(); for entry in walker.filter_entry(|e| !is_hidden(e)) { let entry = entry.unwrap(); remove(&entry.into_path(), tantivy_wrapper); } } else { remove(path_buf, tantivy_wrapper); } } fn remove(path_buf: &PathBuf, tantivy_wrapper: &TantivyWrapper) { tantivy_wrapper.remove(path_buf); } fn rename_event( _src_path_buf: &PathBuf, _dst_path_buf: &PathBuf, _tantivy_wrapper: &TantivyWrapper, ) { unimplemented!(); } pub fn is_hidden(entry: &DirEntry) -> bool { entry .file_name() .to_str() .map(|s| s.starts_with('.')) .unwrap_or(false) }
use crate::contracts::file_to_process::{new_file_to_process, FileToProcess}; use crate::custom_tantivy::wrapper::*; use notify::{watcher, DebouncedEvent, RecursiveMode, Watcher}; use tracing::info; use walkdir::{DirEntry, WalkDir}; use std::path::PathBuf; use std::sync::mpsc::channel; use std::time::Duration; pub async fn start_watcher(directories: &Vec<PathBuf>, tantivy_wrapper: &mut TantivyWrapper) { info!("Starting file watcher thread on: {:?}", directories); let (watcher_tx, watcher_rx) = channel(); let mut watcher = watcher(watcher_tx, Duration::from_secs(10)).unwrap(); for directory in directories { watcher.watch(directory, RecursiveMode::Recursive).unwrap(); } loop { let watcher_event = watcher_rx.recv(); match watcher_event { Ok(event) => { info!("Received watcher event: {:?}", event); match event { DebouncedEvent::Create(path_buf) => { create_event(path_buf, &tantivy_wrapper).await; } DebouncedEvent::Write(path_buf) => { write_event(path_buf, &tantivy_wrapper).await; } DebouncedEvent::NoticeRemove(path_buf) => { remove_event(&path_buf, &tantivy_wrapper); } DebouncedEvent::Rename(src_path_buf, dst_path_buf) => { rename_event(&src_path_buf, &dst_path_buf, &tantivy_wrapper); } _ => { } } } Err(e) => error!("watch error: {:?}", e), } tantivy_wrapper.index_writer.commit().unwrap(); } } async fn create_event(path_buf: PathBuf, tantivy_wrapper: &TantivyWrapper) { if path_buf.is_dir() { let walker = WalkDir::new(path_buf).into_iter(); for entry in walker.filter_entry(|e| !is_hidden(e)) { let entry = entry.unwrap(); create( new_file_to_process(entry.into_path()).await, tantivy_wrapper, ) .await; } } else { create(new_file_to_process(path_buf).await, tantivy_wrapper).await; } } async fn create(file_to_process: FileToProcess, tantivy_wrapper: &TantivyWrapper) { tantivy_wrapper.process_file(file_to_process).await; } async fn write_event(path_buf: PathBuf, tantivy_wrapper: &TantivyWrapper) { if path_buf.is_dir() { let walker = WalkDir::new(path_buf).into_iter(); for entry in walker.filter_entry(|e| !is_hidden(e)) { let entry = entry.unwrap(); write( new_file_to_process(entry.into_path()).await, tantivy_wrapper, ) .await; } } else { write(new_file_to_process(path_buf).await, tantivy_wrapper).await; } } async fn write(file_to_process: FileToProcess, tantivy_wrapper: &TantivyWrapper) { let path_buf = file_to_process.path.clone(); remove(&path_buf, tantivy_wrapper); tantivy_wrapper.process_file(file_to_process).await; } fn remove_event(path_buf: &PathBuf, tantivy_wrapper: &TantivyWrapper) { if path_buf.is_dir() { let walker = WalkDir::new(path_buf).into_iter(); for entry in walker.filter_entry(|e| !is_hidden(e)) { let entry = entry.unwrap(); remove(&entry.into_path(), tantivy_wrapper); } } else { remove(path_buf, tantivy_wrapper); } } fn remove(path_buf: &PathBuf, tantivy_wrapper: &TantivyWrapper) { tantivy_wrapper.remove(path_buf); } fn rename_event( _src_path_buf: &PathBuf, _dst_path_buf: &PathBuf, _tantivy_wrapper: &TantivyWrapper, ) { unimplemented!(); }
pub fn is_hidden(entry: &DirEntry) -> bool { entry .file_name() .to_str() .map(|s| s.starts_with('.')) .unwrap_or(false) }
function_block-full_function
[ { "content": "pub fn log_and_return_error_string(error_string: String) -> String {\n\n error!(\"{}\", error_string);\n\n error_string\n\n}\n", "file_path": "src/error_adapter.rs", "rank": 3, "score": 106939.53277923616 }, { "content": "fn bench_indexing_text_file(c: &mut Criterion) {\n\n c.bench_function(\"indexing_text_file\", |b| {\n\n b.iter(|| {\n\n let bench_file_path = black_box(Path::new(\"./test_files/file.txt\"));\n\n TextIndexer.index_file(&FileToProcess::from(bench_file_path))\n\n });\n\n });\n\n}\n\n\n\n#[cfg(not(target_os = \"windows\"))]\n\ncriterion_group!(\n\n benches,\n\n bench_indexing_csv_file,\n\n bench_indexing_exif_file,\n\n bench_indexing_mobile_net_v2_file,\n\n bench_indexing_pdf_file,\n\n bench_indexing_exif_file,\n\n bench_indexing_pptx_file,\n\n bench_indexing_spreadsheet_file,\n\n bench_indexing_text_file,\n", "file_path": "benches/my_benchmark.rs", "rank": 4, "score": 105426.25309364467 }, { "content": "fn bench_indexing_pdf_file(c: &mut Criterion) {\n\n c.bench_function(\"indexing_pdf_file\", |b| {\n\n b.iter(|| {\n\n let bench_file_path = black_box(Path::new(\"./test_files/Cats.pdf\"));\n\n PdfIndexer.index_file(&FileToProcess::from(bench_file_path))\n\n });\n\n });\n\n}\n\n\n", "file_path": "benches/my_benchmark.rs", "rank": 5, "score": 105426.25309364467 }, { "content": "fn bench_indexing_pptx_file(c: &mut Criterion) {\n\n c.bench_function(\"indexing_pptx_file\", |b| {\n\n b.iter(|| {\n\n let bench_file_path = black_box(Path::new(\"./test_files/Cats.pptx\"));\n\n PptxIndexer.index_file(&FileToProcess::from(bench_file_path))\n\n });\n\n });\n\n}\n\n\n", "file_path": "benches/my_benchmark.rs", "rank": 6, "score": 105426.25309364467 }, { "content": "fn bench_indexing_csv_file(c: &mut Criterion) {\n\n c.bench_function(\"indexing_csv_file\", |b| {\n\n b.iter(|| {\n\n let bench_file_path = black_box(Path::new(\"./test_files/data.csv\"));\n\n CsvIndexer.index_file(&FileToProcess::from(bench_file_path))\n\n });\n\n });\n\n}\n\n\n", "file_path": "benches/my_benchmark.rs", "rank": 7, "score": 105426.25309364467 }, { "content": "fn bench_indexing_exif_file(c: &mut Criterion) {\n\n c.bench_function(\"indexing_exif_file\", |b| {\n\n b.iter(|| {\n\n let bench_file_path = black_box(Path::new(\"./test_files/IMG_2551.jpeg\"));\n\n ExifIndexer.index_file(&FileToProcess::from(bench_file_path))\n\n });\n\n });\n\n}\n\n\n", "file_path": "benches/my_benchmark.rs", "rank": 8, "score": 105426.25309364467 }, { "content": "fn bench_indexing_spreadsheet_file(c: &mut Criterion) {\n\n c.bench_function(\"indexing_spreadsheet_file\", |b| {\n\n b.iter(|| {\n\n let bench_file_path = black_box(Path::new(\"./test_files/Cats.xlsx\"));\n\n SpreadsheetIndexer.index_file(&FileToProcess::from(bench_file_path))\n\n });\n\n });\n\n}\n\n\n", "file_path": "benches/my_benchmark.rs", "rank": 9, "score": 105426.25309364467 }, { "content": "pub fn server_config(cfg: &mut web::ServiceConfig) {\n\n cfg.route(\"/search/{query}\", web::get().to(index));\n\n}\n\n\n\nasync fn index(app_state: web::Data<AppState>, req: HttpRequest) -> HttpResponse {\n\n println!(\"{:?}\", req);\n\n\n\n let query: String = req.match_info().query(\"query\").parse().unwrap();\n\n println!(\"{:?}\", query);\n\n\n\n let response = span!(Level::INFO, \"search_query\").in_scope(|| app_state.searcher.search(query));\n\n\n\n let result = serde_json::to_string(&response).unwrap();\n\n\n\n info!(\"Found results: {:?}\", &result);\n\n\n\n HttpResponse::Ok().body(result)\n\n}\n", "file_path": "src/routes/search.rs", "rank": 11, "score": 103763.20538998599 }, { "content": "#[cfg(not(target_os = \"windows\"))]\n\nfn bench_indexing_mobile_net_v2_file(c: &mut Criterion) {\n\n c.bench_function(\"indexing_mobile_net_v2_file\", |b| {\n\n b.iter(|| {\n\n let bench_file_path = black_box(Path::new(\"./test_files/IMG_2551.jpeg\"));\n\n MobileNetV2Indexer.index_file(&FileToProcess::from(bench_file_path))\n\n });\n\n });\n\n}\n\n\n", "file_path": "benches/my_benchmark.rs", "rank": 12, "score": 100137.01518695442 }, { "content": "pub fn get_config() -> AppConfig {\n\n let matches = app_from_crate!()\n\n .arg(\n\n Arg::with_name(\"scan-directories\")\n\n .short(\"s\")\n\n .long(\"scan-directories\")\n\n .required(true)\n\n .takes_value(true)\n\n .use_delimiter(true)\n\n .require_delimiter(true)\n\n .value_delimiter(\",\")\n\n .validator(path_validator)\n\n .help(\"Directories to scan then watch\"),\n\n )\n\n .arg(\n\n Arg::with_name(\"verbose\")\n\n .multiple(true)\n\n .short(\"v\")\n\n .required(false)\n\n .help(\"Verbosity level. Up to 4.\"),\n", "file_path": "src/config.rs", "rank": 13, "score": 85399.53288685418 }, { "content": "/// Builds the tantivy schema\n\npub fn build_schema() -> Schema {\n\n let mut schema_builder = Schema::builder();\n\n\n\n schema_builder.add_text_field(\"title\", TEXT | STORED);\n\n\n\n schema_builder.add_text_field(\"hash\", STRING | STORED);\n\n\n\n schema_builder.add_facet_field(\"location\");\n\n\n\n schema_builder.add_text_field(\"body\", TEXT | STORED);\n\n\n\n schema_builder.build()\n\n}\n\n\n", "file_path": "src/custom_tantivy/utils.rs", "rank": 14, "score": 83436.17820403814 }, { "content": "#[async_trait]\n\npub trait FileProcessor {\n\n async fn process_file(&self, file_to_process: FileToProcess) -> Option<Document>;\n\n}\n\n\n\n#[async_trait]\n\nimpl FileProcessor for TantivyWrapper {\n\n #[instrument(skip(self, file_to_process))]\n\n async fn process_file(&self, file_to_process: FileToProcess) -> Option<Document> {\n\n let entry_path = file_to_process.path.clone();\n\n let path = entry_path.as_path();\n\n let file_hash = file_to_process.hash;\n\n if entry_path.extension() == None {\n\n info!(\"Skipping, no file extension: {:?}\", entry_path);\n\n return None;\n\n }\n\n\n\n let location_facet = &entry_path.to_facet_value();\n\n\n\n info!(\"Processing: {:?}\", entry_path);\n\n info!(\"Hash of file is: {:?}\", file_hash);\n", "file_path": "src/custom_tantivy/wrapper.rs", "rank": 15, "score": 72509.81681795265 }, { "content": "pub fn calculate_hash(input: &[u8]) -> blake2b_simd::Hash {\n\n let file_hash = blake2b(input);\n\n info!(\"Hash of file is: {:?}\", file_hash);\n\n file_hash\n\n}\n", "file_path": "src/custom_tantivy/utils.rs", "rank": 16, "score": 67393.37109316872 }, { "content": "pub fn destructure_schema(schema: &Schema) -> (Field, Field, Field, Field) {\n\n (\n\n schema.get_field(\"title\").unwrap(),\n\n schema.get_field(\"hash\").unwrap(),\n\n schema.get_field(\"location\").unwrap(),\n\n schema.get_field(\"body\").unwrap(),\n\n )\n\n}\n\n\n", "file_path": "src/custom_tantivy/utils.rs", "rank": 23, "score": 62302.69334396937 }, { "content": "pub fn tantivy_init(settings: &TantivyConfig) -> tantivy::Result<(Searcher, TantivyWrapper)> {\n\n let index_path = &settings.index_path;\n\n\n\n let schema = build_schema();\n\n\n\n let index = Index::open_or_create(MmapDirectory::open(&index_path).unwrap(), schema.clone())?;\n\n\n\n let index_reader = index\n\n .reader_builder()\n\n .reload_policy(ReloadPolicy::OnCommit)\n\n .try_into()?;\n\n\n\n let index_writer = index.writer(50_000_000)?;\n\n\n\n let searcher = Searcher::new(index, index_reader.clone(), schema.clone());\n\n\n\n let tantivy_wrapper = TantivyWrapper::new(index_reader, index_writer, schema);\n\n\n\n Ok((searcher, tantivy_wrapper))\n\n}\n", "file_path": "src/tantivy_process.rs", "rank": 24, "score": 60992.33018311793 }, { "content": "fn get_or_create_settings(app_config: &AppConfig) -> TantivyConfig {\n\n let index_path = app_dir(AppDataType::UserData, &APP_INFO, \"index\").unwrap();\n\n info!(\"Using index file in: {:?}\", index_path);\n\n\n\n let state_path = app_dir(AppDataType::UserData, &APP_INFO, \"state\").unwrap();\n\n let mut initial_processing_file = state_path.clone();\n\n initial_processing_file.push(\"initial_processing\");\n\n\n\n TantivyConfig {\n\n index_path: index_path,\n\n scan_directories: app_config.scan_directories.clone(),\n\n initial_processing_file: initial_processing_file,\n\n }\n\n}\n\n\n", "file_path": "src/bin.rs", "rank": 25, "score": 60443.29007459042 }, { "content": "/// Takes a default new doc, adds the values from old doc, but uses a different set of locations\n\n/// Used when removing 1 location from a list of locations\n\nfn new_doc_for_update(\n\n new_doc: &mut Document,\n\n old_doc: &Document,\n\n locations: Vec<&Value>,\n\n schema: &Schema,\n\n) {\n\n let (title, hash_field, location, body) = destructure_schema(&schema);\n\n\n\n info!(\"Setting title for new doc\");\n\n for title_value in old_doc.get_all(title) {\n\n new_doc.add_text(title, title_value.text().unwrap());\n\n }\n\n\n\n info!(\"Setting locations for new doc\");\n\n for location_value in locations {\n\n new_doc.add(FieldValue::new(location, location_value.clone()));\n\n }\n\n\n\n info!(\"Setting hash for new doc\");\n\n\n", "file_path": "src/custom_tantivy/wrapper.rs", "rank": 26, "score": 48431.153479978675 }, { "content": "/// Converts to/from Facet/PathBuf\n\npub trait TantivyConvert {\n\n fn to_facet_value(&self) -> String;\n\n fn from_facet_value(facet_val: &Facet) -> PathBuf;\n\n}\n\n\n\nimpl TantivyConvert for Path {\n\n #[cfg(target_os = \"windows\")]\n\n fn to_facet_value(&self) -> String {\n\n self.canonicalize()\n\n .unwrap()\n\n .to_str()\n\n .unwrap()\n\n .replace(\"\\\\\", \"/\")\n\n }\n\n\n\n #[cfg(not(target_os = \"windows\"))]\n\n fn to_facet_value(&self) -> String {\n\n String::from(self.canonicalize().unwrap().to_str().unwrap())\n\n }\n\n\n", "file_path": "src/custom_tantivy/path_facet_convert.rs", "rank": 27, "score": 43846.997306923964 }, { "content": "/// Each Indexer needs to be able to say if a file extension is supported and extract information from a supported file\n\npub trait Indexer: Send + Sync {\n\n /// If the Indexer supports a file extension\n\n /// Eg: PdfIndexer supports .pdf extensions\n\n fn supports_extension(&self, extension: &OsStr) -> bool;\n\n\n\n /// The logic behind the Indexer to extract information from a file\n\n fn index_file(&self, file_to_process: &FileToProcess) -> Result<DocumentSchema>;\n\n\n\n fn supported_extensions(&self) -> Vec<OsString>;\n\n}\n\n\n\n/// Container for all Indexers\n\npub struct Analyzer {\n\n pub supported_extensions: HashSet<OsString>,\n\n}\n\n\n\nimpl Default for Analyzer {\n\n #[cfg(not(target_os = \"windows\"))]\n\n fn default() -> Analyzer {\n\n let indexers: Vec<Box<dyn Indexer>> = vec![\n", "file_path": "src/indexers/mod.rs", "rank": 28, "score": 41126.75446830552 }, { "content": "fn path_validator(v: String) -> Result<(), String> {\n\n let broken_paths: Vec<&str> = v\n\n .split(\",\")\n\n .filter(|path| !Path::new(path).exists())\n\n .collect();\n\n\n\n if broken_paths.len() > 0 {\n\n return Err(format!(\n\n \"The following paths could not be resolved: {:?}\",\n\n broken_paths\n\n ));\n\n }\n\n Ok(())\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 29, "score": 38841.47788155132 }, { "content": "fn port_validator(v: String) -> Result<(), String> {\n\n let try_port = v.parse::<u16>();\n\n if let Ok(port) = try_port {\n\n if port >= 1 {\n\n return Ok(());\n\n }\n\n }\n\n\n\n Err(String::from(\n\n \"The port value needs to be a number >= 1 and <= 65535\",\n\n ))\n\n}\n", "file_path": "src/config.rs", "rank": 30, "score": 38841.47788155132 }, { "content": "fn value_to_deg(val: &[Rational]) -> f64 {\n\n def_to_dec_dec(val[0].to_f64(), val[1].to_f64(), val[2].to_f64())\n\n}\n\n\n", "file_path": "src/indexers/exif_indexer.rs", "rank": 31, "score": 38471.009054640504 }, { "content": "fn setup_global_subscriber(config: &AppConfig) -> impl Drop {\n\n let (flame_layer, _guard) = FlameLayer::with_file(\"./tracing.folded\").unwrap();\n\n let t = tracing_subscriber::fmt()\n\n .with_max_level(config.verbosity.clone())\n\n .finish()\n\n .with(flame_layer)\n\n .try_init();\n\n\n\n _guard\n\n}\n", "file_path": "src/bin.rs", "rank": 32, "score": 36117.08167005098 }, { "content": "fn extract_text(shape_group: &ShapeGroup) -> Option<String> {\n\n let mut total_text = String::new();\n\n match shape_group {\n\n ShapeGroup::Shape(shape) => {\n\n if let Some(text_body) = &shape.text_body {\n\n for paragraph in &text_body.paragraph_array {\n\n for text_run in &paragraph.text_run_list {\n\n if let TextRun::RegularTextRun(regular_text_run) = text_run {\n\n total_text.push_str(&regular_text_run.text);\n\n total_text.push_str(\" \");\n\n }\n\n }\n\n }\n\n }\n\n }\n\n ShapeGroup::GroupShape(group_shape) => {\n\n let res_text = group_shape\n\n .shape_array\n\n .iter()\n\n .map(|s_g| extract_text(s_g))\n", "file_path": "src/indexers/pptx_indexer.rs", "rank": 33, "score": 34591.07855284447 }, { "content": "fn def_to_dec_dec(deg: f64, min: f64, sec: f64) -> f64 {\n\n deg + min / 30.0 + sec / 3600.0\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::contracts::file_to_process::new_file_to_process;\n\n use std::path::Path;\n\n\n\n #[tokio::test(core_threads = 1)]\n\n async fn test_indexing_text_file() {\n\n let test_file_path = Path::new(\"./test_files/IMG_2551.jpeg\");\n\n let indexed_document = ExifIndexer\n\n .index_file(&new_file_to_process(test_file_path).await)\n\n .unwrap();\n\n\n\n assert_eq!(indexed_document.name, \"\");\n\n assert_eq!(indexed_document.body, \"Pacureti Prahova Comuna Pacureti RO\");\n\n }\n", "file_path": "src/indexers/exif_indexer.rs", "rank": 34, "score": 30214.799784910087 }, { "content": "use crate::custom_tantivy::utils::calculate_hash;\n\nuse std::fmt::Debug;\n\nuse std::path::{Path, PathBuf};\n\nuse tokio::fs;\n\nuse tracing::{info_span, instrument};\n\n\n\n#[derive(Debug, Clone)]\n\npub struct FileToProcess {\n\n pub path: std::path::PathBuf,\n\n pub hash: blake2b_simd::Hash,\n\n pub contents: Vec<u8>,\n\n}\n\n\n\n#[instrument]\n\npub async fn new_file_to_process<T: AsRef<Path> + Debug>(path: T) -> FileToProcess {\n\n let contents = fs::read(&path).await.unwrap();\n\n\n\n let span = info_span!(\"calculating hash\");\n\n let _enter = span.enter();\n\n let hash = calculate_hash(&contents);\n\n drop(_enter);\n\n\n\n FileToProcess {\n\n path: PathBuf::from(path.as_ref()),\n\n hash: hash,\n\n contents: contents,\n\n }\n\n}\n", "file_path": "src/contracts/file_to_process.rs", "rank": 35, "score": 30210.03812994225 }, { "content": "const createWindow = async () => {\n\n if (process.env.NODE_ENV !== 'production') {\n\n await installExtensions();\n\n }\n\n\n\n win = new BrowserWindow({\n\n width: 680,\n\n height: 50,\n\n frame: false,\n\n /* transparent: true, */ resizable: false\n\n });\n\n\n\n if (process.env.NODE_ENV !== 'production') {\n\n process.env.ELECTRON_DISABLE_SECURITY_WARNINGS = '1'; // eslint-disable-line require-atomic-updates\n\n win.loadURL(`http://localhost:2003`);\n\n } else {\n\n win.loadURL(\n\n url.format({\n\n pathname: path.join(__dirname, 'index.html'),\n\n protocol: 'file:',\n\n slashes: true\n\n })\n\n );\n\n }\n\n\n\n if (process.env.NODE_ENV !== 'production') {\n\n // Open DevTools, see https://github.com/electron/electron/issues/12438 for why we wait for dom-ready\n\n win.webContents.once('dom-ready', () => {\n\n win!.webContents.openDevTools({\n\n mode: 'detach'\n\n });\n\n });\n\n }\n\n\n\n win.on('closed', () => {\n\n win = null;\n\n });\n", "file_path": "client/src/main/main.ts", "rank": 36, "score": 25273.33564154961 }, { "content": "module.exports = 'test-file-stub';\n", "file_path": "client/mocks/fileMock.js", "rank": 37, "score": 20510.06446252007 }, { "content": "/// Starts watching directories\n\n/// Does initial processing\n\n/// Consumes watcher events to continue processing files\n\npub async fn start_tantivy(\n\n settings: &TantivyConfig,\n\n tantivy_wrapper: &mut TantivyWrapper,\n\n) -> tantivy::Result<()> {\n\n let directories = &settings.scan_directories;\n\n let directories_clone = directories.clone();\n\n\n\n let initial_processing_done = settings.initial_processing_file.exists();\n\n\n\n let analyzer = Analyzer::default();\n\n\n\n if !initial_processing_done {\n\n info!(\"Initial processing was not previously done, doing now\");\n\n let initial_processing_span = span!(Level::INFO, \"initial_processing\");\n\n let _initial_processing_entry = initial_processing_span.enter();\n\n for directory in directories {\n\n let walker = WalkDir::new(directory).into_iter();\n", "file_path": "src/tantivy_process.rs", "rank": 38, "score": 16.197210235903146 }, { "content": " for entry in walker.filter_entry(|e| !is_hidden(e)) {\n\n match entry {\n\n Err(_) => {\n\n error!(\"Failed to read entry from dir walker: {:?}\", entry);\n\n continue;\n\n }\n\n _ => {}\n\n }\n\n let entry = entry.unwrap();\n\n let entry_path = entry.path().to_str().unwrap();\n\n let process_file_span = span!(Level::INFO, \"processing_file\", entry_path);\n\n let _process_file_entry = process_file_span.enter();\n\n if !entry.file_type().is_dir() {\n\n let entry_path = entry.path();\n\n\n\n match entry_path.extension() {\n\n None => continue,\n\n Some(extension) => {\n\n if !analyzer.supported_extensions.contains(extension) {\n\n continue;\n", "file_path": "src/tantivy_process.rs", "rank": 39, "score": 15.75455922209716 }, { "content": " }\n\n }\n\n }\n\n\n\n let file_to_process = new_file_to_process(entry_path).await;\n\n\n\n tantivy_wrapper.process_file(file_to_process).await;\n\n tantivy_wrapper.index_writer.commit()?;\n\n }\n\n }\n\n }\n\n fs::File::create(&settings.initial_processing_file).unwrap();\n\n } else {\n\n info!(\"Initial processing already done! Starting a reader\");\n\n }\n\n\n\n start_watcher(&directories_clone, tantivy_wrapper).await;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "src/tantivy_process.rs", "rank": 40, "score": 14.897561797621881 }, { "content": "use crate::contracts::file_to_process::new_file_to_process;\n\nuse crate::custom_tantivy::{utils::build_schema, wrapper::*};\n\nuse crate::file_watcher::*;\n\nuse crate::indexers::Analyzer;\n\nuse crate::searcher::Searcher;\n\n\n\nuse tantivy::directory::*;\n\nuse tantivy::{Index, ReloadPolicy};\n\nuse tracing::{info, span, Level};\n\nuse walkdir::WalkDir;\n\n\n\nuse std::fs;\n\nuse std::path::PathBuf;\n\n\n\npub struct TantivyConfig {\n\n pub scan_directories: Vec<PathBuf>,\n\n pub initial_processing_file: PathBuf,\n\n pub index_path: PathBuf,\n\n}\n\n\n", "file_path": "src/tantivy_process.rs", "rank": 41, "score": 14.642000099366058 }, { "content": "};\n\n\n\n#[tokio::main]\n\nasync fn main() -> io::Result<()> {\n\n let config = get_config();\n\n\n\n setup_global_subscriber(&config);\n\n\n\n let local = tokio::task::LocalSet::new();\n\n\n\n // Get or create settings\n\n let settings = get_or_create_settings(&config);\n\n\n\n let (searcher, mut tantivy_wrapper) = tantivy_init(&settings).unwrap();\n\n\n\n let _tantivy_thread = tokio::spawn(async move {\n\n start_tantivy(&settings, &mut tantivy_wrapper)\n\n .await\n\n .unwrap();\n\n });\n", "file_path": "src/bin.rs", "rank": 42, "score": 12.196251389203786 }, { "content": " name: String::new(),\n\n body: strings,\n\n })\n\n })\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::contracts::file_to_process::new_file_to_process;\n\n\n\n use std::path::Path;\n\n\n\n #[tokio::test(core_threads = 1)]\n\n async fn test_indexing_spreadsheet_file() {\n\n let test_file_path = Path::new(\"./test_files/Cats.xlsx\");\n\n let indexed_document = SpreadsheetIndexer\n\n .index_file(&new_file_to_process(test_file_path).await)\n\n .unwrap();\n", "file_path": "src/indexers/spreadsheet_indexer.rs", "rank": 43, "score": 11.967307284330742 }, { "content": " })\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::contracts::file_to_process::new_file_to_process;\n\n\n\n use std::path::Path;\n\n\n\n #[tokio::test(core_threads = 1)]\n\n async fn test_indexing_text_file() {\n\n let test_file_path = Path::new(\"./test_files/file.txt\");\n\n let indexed_document = TextIndexer\n\n .index_file(&new_file_to_process(test_file_path).await)\n\n .unwrap();\n\n\n\n assert_eq!(indexed_document.name, \"file.txt\");\n\n assert_eq!(\n", "file_path": "src/indexers/text_indexer.rs", "rank": 44, "score": 11.695014069782573 }, { "content": "#[macro_use]\n\nextern crate lazy_static;\n\n#[macro_use]\n\nextern crate log;\n\n\n\npub mod config;\n\npub mod contracts;\n\npub mod custom_tantivy;\n\npub mod error_adapter;\n\npub mod indexers;\n\npub mod routes;\n\npub mod searcher;\n\npub mod tantivy_process;\n\n\n\nmod file_watcher;\n", "file_path": "src/lib.rs", "rank": 45, "score": 10.827738994546435 }, { "content": "use clap::{\n\n app_from_crate, crate_authors, crate_description, crate_name, crate_version, App, Arg,\n\n ArgMatches,\n\n};\n\n\n\nuse tracing::Level;\n\n\n\nuse std::path::{Path, PathBuf};\n\n\n\n#[derive(Debug)]\n\npub struct AppConfig {\n\n pub scan_directories: Vec<PathBuf>,\n\n pub verbosity: Level,\n\n pub port: u16,\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 46, "score": 10.749064155459989 }, { "content": "use tantivy::collector::{Count, TopDocs};\n\nuse tantivy::query::TermQuery;\n\nuse tantivy::schema::*;\n\nuse tantivy::DocAddress;\n\nuse tantivy::{IndexReader, IndexWriter};\n\nuse tracing::{info, instrument};\n\n\n\nuse crate::contracts::file_to_process::FileToProcess;\n\nuse crate::indexers::*;\n\n\n\nuse async_trait::async_trait;\n\nuse blake2b_simd;\n\n\n\nuse std::path::{Path, PathBuf};\n\n\n\nuse crate::custom_tantivy::{path_facet_convert::*, utils::destructure_schema};\n\n\n\npub struct TantivyWrapper {\n\n pub index_reader: IndexReader,\n\n pub index_writer: IndexWriter,\n", "file_path": "src/custom_tantivy/wrapper.rs", "rank": 47, "score": 10.264577903917129 }, { "content": "\n\n Ok(&\"\")\n\n })?;\n\n\n\n Ok(DocumentSchema {\n\n name: String::new(),\n\n body: body_res.to_string(),\n\n })\n\n })\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::contracts::file_to_process::new_file_to_process;\n\n use std::path::Path;\n\n\n\n #[tokio::test(core_threads = 1)]\n\n async fn test_indexing_mobile_net_v2_file() {\n", "file_path": "src/indexers/mobile_net_v2_indexer.rs", "rank": 48, "score": 10.081522115158014 }, { "content": "use super::DocumentSchema;\n\nuse super::Indexer;\n\nuse crate::contracts::file_to_process::FileToProcess;\n\nuse crate::error_adapter::log_and_return_error_string;\n\nuse anyhow::{Context, Error, Result};\n\nuse std::ffi::{OsStr, OsString};\n\nuse std::io::Cursor;\n\nuse tracing::{span, Level};\n\n\n\npub struct CsvIndexer;\n\n\n\nimpl Indexer for CsvIndexer {\n\n fn supports_extension(&self, extension: &OsStr) -> bool {\n\n extension == OsStr::new(\"csv\")\n\n }\n\n\n\n fn supported_extensions(&self) -> Vec<OsString> {\n\n vec![OsString::from(\"csv\")]\n\n }\n\n\n", "file_path": "src/indexers/csv_indexer.rs", "rank": 49, "score": 9.488970920075946 }, { "content": "use super::DocumentSchema;\n\nuse super::Indexer;\n\nuse crate::contracts::file_to_process::FileToProcess;\n\nuse crate::error_adapter::log_and_return_error_string;\n\nuse anyhow::{Context, Error, Result};\n\nuse std::ffi::{OsStr, OsString};\n\nuse tracing::{span, Level};\n\n\n\nuse pdf_extract::*;\n\nuse regex::Regex;\n\n\n\npub struct PdfIndexer;\n\n\n\nimpl Indexer for PdfIndexer {\n\n fn supports_extension(&self, extension: &OsStr) -> bool {\n\n extension == OsStr::new(\"pdf\")\n\n }\n\n\n\n fn supported_extensions(&self) -> Vec<OsString> {\n\n vec![OsString::from(\"pdf\")]\n", "file_path": "src/indexers/pdf_indexer.rs", "rank": 50, "score": 9.475306240857876 }, { "content": "\n\n use std::path::Path;\n\n\n\n #[tokio::test(core_threads = 1)]\n\n async fn test_indexing_pdf_file() {\n\n let test_file_path = Path::new(\"./test_files/Cats.pdf\");\n\n let indexed_document = PdfIndexer\n\n .index_file(&new_file_to_process(test_file_path).await)\n\n .unwrap();\n\n\n\n assert_eq!(indexed_document.name, \"\");\n\n assert_eq!(indexed_document.body, \"\\n\\nCats \\n\\nThis is an example document about cats. \\n\\n \\n\\nCats have paws. \");\n\n }\n\n\n\n #[test]\n\n fn test_supports_pdf_extension() {\n\n assert_eq!(true, PdfIndexer.supports_extension(OsStr::new(\"pdf\")));\n\n assert_eq!(false, PdfIndexer.supports_extension(OsStr::new(\"docx\")))\n\n }\n\n}\n", "file_path": "src/indexers/pdf_indexer.rs", "rank": 51, "score": 9.260418476825762 }, { "content": " use super::*;\n\n use crate::contracts::file_to_process::new_file_to_process;\n\n\n\n use std::path::Path;\n\n\n\n #[tokio::test(core_threads = 1)]\n\n async fn test_indexing_pptx_file() {\n\n let test_file_path = Path::new(\"./test_files/Cats.pptx\");\n\n let indexed_document = PptxIndexer\n\n .index_file(&new_file_to_process(test_file_path).await)\n\n .unwrap();\n\n\n\n assert_eq!(indexed_document.name, \"\");\n\n assert!(indexed_document.body.contains(\"Cats\"));\n\n assert!(indexed_document.body.contains(\"quick\"));\n\n assert!(indexed_document.body.contains(\"story\"));\n\n assert!(indexed_document.body.contains(\"Paws\"));\n\n assert!(indexed_document.body.contains(\"cool\"));\n\n }\n\n\n\n #[test]\n\n fn test_supports_pptgx_extension() {\n\n assert_eq!(true, PptxIndexer.supports_extension(OsStr::new(\"pptx\")));\n\n assert_eq!(false, PptxIndexer.supports_extension(OsStr::new(\"ppt\")));\n\n }\n\n}\n", "file_path": "src/indexers/pptx_indexer.rs", "rank": 52, "score": 9.242934646734842 }, { "content": " }\n\n\n\n fn index_file(&self, file_to_process: &FileToProcess) -> Result<DocumentSchema> {\n\n let path = file_to_process.path.to_str().unwrap();\n\n span!(Level::INFO, \"spreadsheet_indexer: indexing spreadsheet file\", path).in_scope(|| {\n\n let mut workbook: Xlsx<_> = span!(Level::INFO, \"spreadsheet_indexer: Load from disk\").in_scope(|| {\n\n match open_workbook(&file_to_process.path) {\n\n Ok(workbook) => Ok(workbook),\n\n Err(e) => Err(anyhow::anyhow!(format!(\n\n \"spreadsheet_indexer: Failed to open workbook at path: {:?} with additional error info {:?}\",\n\n file_to_process.path,\n\n e\n\n )))\n\n }\n\n })?;\n\n\n\n let strings = span!(Level::INFO, \"spreadsheet_indexer: Process file\").in_scope(|| {\n\n workbook\n\n .sheet_names()\n\n .to_vec()\n", "file_path": "src/indexers/spreadsheet_indexer.rs", "rank": 53, "score": 9.218402075319739 }, { "content": "use super::DocumentSchema;\n\nuse super::Indexer;\n\nuse crate::contracts::file_to_process::FileToProcess;\n\nuse crate::error_adapter::log_and_return_error_string;\n\nuse std::ffi::{OsStr, OsString};\n\nuse std::io::Cursor;\n\n\n\nuse anyhow::{Context, Error, Result};\n\nuse exif::{Rational, Tag, Value};\n\nuse reverse_geocoder::{Locations, Record, ReverseGeocoder};\n\nuse tracing::{span, Level};\n\n\n\nlazy_static! {\n\n static ref LOCATIONS: Locations = Locations::from_memory();\n\n static ref GEOCODER: ReverseGeocoder<'static> = ReverseGeocoder::new(&LOCATIONS);\n\n}\n\n\n\npub struct ExifIndexer;\n\n\n\nimpl Indexer for ExifIndexer {\n", "file_path": "src/indexers/exif_indexer.rs", "rank": 54, "score": 9.216380749903845 }, { "content": "use super::DocumentSchema;\n\nuse super::Indexer;\n\nuse crate::contracts::file_to_process::FileToProcess;\n\nuse crate::error_adapter::log_and_return_error_string;\n\nuse anyhow::{Context, Result};\n\nuse std::ffi::{OsStr, OsString};\n\nuse std::str;\n\nuse tracing::{span, Level};\n\n\n\npub struct TextIndexer;\n\n\n\nimpl Indexer for TextIndexer {\n\n fn supports_extension(&self, extension: &OsStr) -> bool {\n\n extension == OsStr::new(\"txt\")\n\n }\n\n\n\n fn supported_extensions(&self) -> Vec<OsString> {\n\n vec![OsString::from(\"txt\")]\n\n }\n\n\n", "file_path": "src/indexers/text_indexer.rs", "rank": 55, "score": 9.143397836504313 }, { "content": " vec![OsString::from(\"pptx\")]\n\n }\n\n\n\n fn index_file(&self, file_to_process: &FileToProcess) -> Result<DocumentSchema> {\n\n let path = file_to_process.path.to_str().unwrap();\n\n span!(Level::INFO, \"pptx_indexer: indexing powerpoint file\", path).in_scope(|| {\n\n let mut total_text = String::new();\n\n let document = span!(Level::INFO, \"pptx_indexer: Load from disk\").in_scope(|| {\n\n match PPTXDocument::from_file(file_to_process.path.as_path()) {\n\n Ok(doc) => Ok(doc),\n\n Err(e) => Err(anyhow::anyhow!(format!(\n\n \"pptx_indexer: Failed to open PPTX Document from file at path: {:?} with additional error info {:?}\",\n\n file_to_process.path,\n\n e\n\n )))\n\n }\n\n })?;\n\n\n\n span!(Level::INFO, \"pptx_indexer: Process file\").in_scope(|| {\n\n for slide in document.slide_map.values() {\n", "file_path": "src/indexers/pptx_indexer.rs", "rank": 56, "score": 9.139119276218505 }, { "content": "use super::DocumentSchema;\n\nuse super::Indexer;\n\nuse crate::contracts::file_to_process::FileToProcess;\n\nuse crate::error_adapter::log_and_return_error_string;\n\nuse anyhow::Result;\n\nuse std::ffi::{OsStr, OsString};\n\nuse tracing::{span, Level};\n\n\n\nuse calamine::{open_workbook, Reader, Xlsx};\n\n\n\npub struct SpreadsheetIndexer;\n\n\n\nimpl Indexer for SpreadsheetIndexer {\n\n fn supports_extension(&self, extension: &OsStr) -> bool {\n\n // Only xslx for now\n\n extension == OsStr::new(\"xlsx\")\n\n }\n\n\n\n fn supported_extensions(&self) -> Vec<OsString> {\n\n vec![OsString::from(\"xlsx\")]\n", "file_path": "src/indexers/spreadsheet_indexer.rs", "rank": 57, "score": 8.964792713159556 }, { "content": " let re = Regex::new(r\"\\b \").with_context(|| {\n\n log_and_return_error_string(format!(\"pdf_indexer: Failed to create regex\"))\n\n })?;\n\n\n\n Ok(re.replace_all(&res, \"\").to_string())\n\n },\n\n )?;\n\n\n\n Ok(DocumentSchema {\n\n name: String::new(),\n\n body: clean,\n\n })\n\n })\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::contracts::file_to_process::new_file_to_process;\n", "file_path": "src/indexers/pdf_indexer.rs", "rank": 58, "score": 8.875877267046738 }, { "content": "use super::DocumentSchema;\n\nuse super::Indexer;\n\nuse crate::contracts::file_to_process::FileToProcess;\n\nuse crate::error_adapter::log_and_return_error_string;\n\nuse anyhow::Result;\n\nuse std::ffi::{OsStr, OsString};\n\nuse tracing::{span, Level};\n\n\n\nuse msoffice_pptx::document::PPTXDocument;\n\nuse msoffice_pptx::pml::ShapeGroup;\n\nuse msoffice_shared::drawingml::TextRun;\n\n\n\npub struct PptxIndexer;\n\n\n\nimpl Indexer for PptxIndexer {\n\n fn supports_extension(&self, extension: &OsStr) -> bool {\n\n extension == OsStr::new(\"pptx\")\n\n }\n\n\n\n fn supported_extensions(&self) -> Vec<OsString> {\n", "file_path": "src/indexers/pptx_indexer.rs", "rank": 59, "score": 8.851785612543662 }, { "content": "\n\n // open image, resize it and make a Tensor out of it\n\n match image::io::Reader::with_format(Cursor::new(&file_to_process.contents), image_format).decode() {\n\n Ok(image) => Ok(image),\n\n Err(e) => Err(anyhow::anyhow!(format!(\n\n \"mobile_net_v2_indexer: Failed to load image with format with additional error info {:?}\",\n\n e\n\n )))\n\n }\n\n // image crate seems to be more tolerant to malformed image filies using the open function\n\n })?;\n\n\n\n let image: Tensor = span!(Level::INFO, \"mobile_net_v2_indexer: Pre-process image\").in_scope(|| {\n\n let resized =\n\n image::imageops::resize(&image, 224, 224, image::imageops::FilterType::Triangle);\n\n\n\n ndarray::Array4::from_shape_fn((1, 224, 224, 3), |(_, y, x, c)| {\n\n f32::from(resized[(x as _, y as _)][c]) / 255.0\n\n })\n\n .into()\n", "file_path": "src/indexers/mobile_net_v2_indexer.rs", "rank": 60, "score": 8.659539351464478 }, { "content": "\n\n let sys = actix_rt::System::run_in_tokio(\"server\", &local);\n\n\n\n let app_state = web::Data::new(AppState { searcher: searcher });\n\n\n\n let server_res = HttpServer::new(move || {\n\n App::new()\n\n .wrap(\n\n Cors::new() // <- Construct CORS middleware builder\n\n .send_wildcard()\n\n .finish(),\n\n )\n\n .app_data(app_state.clone())\n\n .configure(search::server_config)\n\n })\n\n .bind(format!(\"127.0.0.1:{}\", config.port))?\n\n .run()\n\n .await?;\n\n\n\n sys.await?;\n\n\n\n Ok(server_res)\n\n\n\n // if tantivy_thread.unwrap().join().is_err() {\n\n // error!(\"Failed to join tantivy thread\");\n\n // }\n\n}\n\n\n", "file_path": "src/bin.rs", "rank": 61, "score": 8.459880715058983 }, { "content": "use std::iter::FromIterator;\n\n\n\nuse anyhow::Result;\n\nuse once_cell::sync::Lazy;\n\nuse tracing::{instrument, span};\n\n\n\nuse crate::contracts::file_to_process::FileToProcess;\n\n\n\n/// The schema of the information that an Indexer extracts from a file\n\n#[derive(Debug)]\n\npub struct DocumentSchema {\n\n pub name: String,\n\n pub body: String,\n\n}\n\n\n\n/// Each Indexer needs to be able to say if a file extension is supported and extract information from a supported file\n", "file_path": "src/indexers/mod.rs", "rank": 62, "score": 8.269219512026277 }, { "content": "use super::DocumentSchema;\n\nuse super::Indexer;\n\nuse crate::contracts::file_to_process::FileToProcess;\n\nuse std::collections::HashMap;\n\nuse std::ffi::{OsStr, OsString};\n\nuse std::io::Cursor;\n\n\n\nuse anyhow::{Error, Result};\n\nuse image::ImageFormat;\n\nuse once_cell::sync::Lazy;\n\nuse tracing::{span, Level};\n\nuse tract_core::ndarray;\n\nuse tract_tensorflow::prelude::*;\n\n\n\nstatic MODEL: Lazy<TypedModel> = Lazy::new(|| {\n\n span!(Level::INFO, \"mobile_net_v2_indexer: Preparing typed model\").in_scope(|| {\n\n let mut model = span!(Level::INFO, \"mobile_net_v2_indexer: Loading model\").in_scope(|| {\n\n // load the model\n\n let model_bytes = include_bytes!(\"../../models/mobilenet_v2_1.4_224_frozen.pb\");\n\n let mut model_bytes = Cursor::new(&model_bytes[..]);\n", "file_path": "src/indexers/mobile_net_v2_indexer.rs", "rank": 63, "score": 8.15824207684021 }, { "content": " let path = file_to_process.path.to_str().unwrap();\n\n span!(Level::INFO, \"mobile_net_v2_indexer: indexing image file\", path).in_scope(|| {\n\n let t_model: &TypedModel = &*MODEL;\n\n let plan = span!(Level::INFO, \"mobile_net_v2_indexer: Creating plan\").in_scope(|| {\n\n match TypedSimplePlan::new(t_model) {\n\n Ok(plan) => Ok(plan),\n\n Err(e) => Err(anyhow::anyhow!(format!(\n\n \"mobile_net_v2_indexer: Failed to create plan for model with additional error info {:?}\",\n\n e\n\n )))\n\n }\n\n })?;\n\n\n\n let image = span!(Level::INFO, \"mobile_net_v2_indexer: Load image\").in_scope(|| {\n\n let image_format = match IMAGE_FORMATS.get(&file_to_process.path.extension().unwrap().to_os_string()) {\n\n Some(image_format) => Ok(image_format),\n\n None => Err(anyhow::anyhow!(format!(\n\n \"mobile_net_v2_indexer: Failed to recognize image format\",\n\n )))\n\n }?.clone();\n", "file_path": "src/indexers/mobile_net_v2_indexer.rs", "rank": 64, "score": 8.038545736530493 }, { "content": "use serde::{Deserialize, Serialize};\n\nuse tantivy::collector::TopDocs;\n\nuse tantivy::query::QueryParser;\n\nuse tantivy::schema::*;\n\nuse tantivy::Index;\n\nuse tantivy::IndexReader;\n\nuse tracing::info;\n\n\n\nuse crate::custom_tantivy::{path_facet_convert::TantivyConvert, utils::destructure_schema};\n\n\n\nuse std::path::*;\n\n\n\npub type QueryResponse = Vec<Response>;\n\n\n\n/// Each tantivy document is stored in this format to be communicated to the ui\n\n#[derive(Serialize, Deserialize, Debug)]\n\npub struct Response {\n\n /// File title\n\n pub title: String,\n\n /// Where the file can be found\n", "file_path": "src/searcher.rs", "rank": 65, "score": 7.993543382992768 }, { "content": " 4 => Level::TRACE,\n\n 3 => Level::DEBUG,\n\n 2 => Level::INFO,\n\n 1 => Level::WARN,\n\n 0 | _ => Level::ERROR,\n\n };\n\n\n\n let port = match matches.value_of(\"port\") {\n\n Some(port_val) => port_val.parse::<u16>().unwrap(),\n\n None => 8080,\n\n };\n\n\n\n AppConfig {\n\n scan_directories,\n\n verbosity,\n\n port,\n\n }\n\n}\n\n\n", "file_path": "src/config.rs", "rank": 66, "score": 7.917033874768737 }, { "content": " });\n\n\n\n // run the plan on the input\n\n let result = span!(Level::INFO, \"mobile_net_v2_indexer: Run image through model\").in_scope(||{\n\n match plan.run(tvec!(image)) {\n\n Ok(result) => Ok(result),\n\n Err(e) => Err(anyhow::anyhow!(format!(\n\n \"mobile_net_v2_indexer: Failed to run the image through the model with additional error info {:?}\",\n\n e\n\n )))\n\n }\n\n })?;\n\n\n\n let body_res = span!(Level::INFO, \"mobile_net_v2_indexer: Map model output\").in_scope(|| -> Result<&&str, Error> {\n\n // find and display the max value with its index\n\n let best = match result[0].to_array_view::<f32>() {\n\n Ok(arr) => Ok(arr),\n\n Err(e) => Err(anyhow::anyhow!(format!(\n\n \"mobile_net_v2_indexer: Failed to convert to array view with additional error info {:?}\",\n\n e\n", "file_path": "src/indexers/mobile_net_v2_indexer.rs", "rank": 67, "score": 7.882063958904774 }, { "content": " acc\n\n }))\n\n },\n\n )?;\n\n\n\n Ok(DocumentSchema {\n\n name: String::new(),\n\n body: headers,\n\n })\n\n })\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n use crate::contracts::file_to_process::new_file_to_process;\n\n use std::path::Path;\n\n\n\n #[tokio::test(core_threads = 1)]\n", "file_path": "src/indexers/csv_indexer.rs", "rank": 68, "score": 7.794494703841774 }, { "content": "mod exif_indexer;\n\nmod mobile_net_v2_indexer;\n\nmod pdf_indexer;\n\nmod text_indexer;\n\n// mod docx_indexer;\n\nmod csv_indexer;\n\nmod pptx_indexer;\n\nmod spreadsheet_indexer;\n\n\n\npub use self::exif_indexer::ExifIndexer;\n\npub use self::mobile_net_v2_indexer::MobileNetV2Indexer;\n\npub use self::pdf_indexer::PdfIndexer;\n\npub use self::text_indexer::TextIndexer;\n\n// pub use self::docx_indexer::DocxIndexer;\n\npub use self::csv_indexer::CsvIndexer;\n\npub use self::pptx_indexer::PptxIndexer;\n\npub use self::spreadsheet_indexer::SpreadsheetIndexer;\n\n\n\nuse std::collections::HashSet;\n\nuse std::ffi::{OsStr, OsString};\n", "file_path": "src/indexers/mod.rs", "rank": 69, "score": 7.0209686319786595 }, { "content": " exif::Reader::new(&mut Cursor::new(&file_to_process.contents)).with_context(|| {\n\n log_and_return_error_string(format!(\n\n \"exif_indexer: Failed to initialize exif reader for file at path: {:?}\",\n\n file_to_process.path\n\n ))\n\n })\n\n })?;\n\n\n\n let mut lat = 0.0;\n\n let mut lon = 0.0;\n\n\n\n span!(Level::INFO, \"exif_indexer: Processing exif fields\").in_scope(|| {\n\n let mut lat_direction = 0_u8 as char;\n\n let mut lon_direction = 0_u8 as char;\n\n for f in reader.fields() {\n\n match f.tag {\n\n Tag::GPSLatitudeRef => {\n\n if let Value::Ascii(val) = &f.value {\n\n lat_direction = val[0][0] as char;\n\n }\n", "file_path": "src/indexers/exif_indexer.rs", "rank": 70, "score": 6.7787324328688525 }, { "content": " #[test]\n\n fn test_path_facet_conversion() {\n\n use super::*;\n\n use std::env;\n\n use std::fs::File;\n\n\n\n let mut current_dir = env::current_dir().unwrap();\n\n current_dir.push(\"Cargo.toml\");\n\n println!(\"{:?}\", current_dir);\n\n\n\n let current_dir_facet_string = current_dir.to_facet_value();\n\n println!(\"{:?}\", current_dir_facet_string);\n\n\n\n let facet = Facet::from_text(&current_dir_facet_string);\n\n println!(\"{:?}\", facet);\n\n\n\n let dir_from_facet = Path::from_facet_value(&facet);\n\n println!(\"{:?}\", dir_from_facet);\n\n\n\n File::open(dir_from_facet).unwrap();\n\n }\n\n}\n", "file_path": "src/custom_tantivy/path_facet_convert.rs", "rank": 71, "score": 6.742482271644496 }, { "content": " )\n\n .arg(\n\n Arg::with_name(\"port\")\n\n .short(\"p\")\n\n .default_value(\"8080\")\n\n .required(false)\n\n .validator(port_validator)\n\n .help(\"Port to host query resolver\"),\n\n )\n\n .get_matches();\n\n\n\n let scan_directories = matches\n\n .values_of(\"scan-directories\")\n\n .unwrap()\n\n .map(|path| PathBuf::from(path))\n\n .collect::<Vec<PathBuf>>();\n\n\n\n dbg!(matches.occurrences_of(\"verbose\"));\n\n\n\n let verbosity = match matches.occurrences_of(\"verbose\") {\n", "file_path": "src/config.rs", "rank": 72, "score": 6.5599016316590335 }, { "content": " // We're just going to leave this out for now\n\n fn index_file(&self, path: &Path) -> DocumentSchema {\n\n let mut docx = Docx::from_file(path).unwrap();\n\n dbg!(docx);\n\n\n\n DocumentSchema {\n\n name: String::new(),\n\n body: String::new(),\n\n }\n\n }\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::*;\n\n\n\n #[test]\n\n fn test_indexing_docx_file() {\n\n let test_file_path = Path::new(\"./test_files/Cats.docx\");\n\n let indexed_document = DocxIndexer.index_file(test_file_path);\n\n\n\n assert_eq!(indexed_document.name, \"file.txt\");\n\n assert_eq!(indexed_document.body, \"this is a file with some contents in it\");\n\n }\n\n}\n", "file_path": "src/indexers/docx_indexer.rs", "rank": 73, "score": 6.504559599824919 }, { "content": "use crate::searcher::Searcher;\n\n\n\npub struct AppState {\n\n pub searcher: Searcher,\n\n}\n", "file_path": "src/contracts/app_state.rs", "rank": 74, "score": 6.386146546331858 }, { "content": " async fn test_indexing_csv_file() {\n\n let test_file_path = Path::new(\"./test_files/data.csv\");\n\n let indexed_document = CsvIndexer\n\n .index_file(&new_file_to_process(test_file_path).await)\n\n .unwrap();\n\n\n\n assert_eq!(indexed_document.name, \"\");\n\n assert_eq!(\n\n indexed_document.body,\n\n \"first_name last_name street city state postal_code \"\n\n );\n\n }\n\n\n\n #[test]\n\n fn test_supports_csv_extension() {\n\n assert_eq!(true, CsvIndexer.supports_extension(OsStr::new(\"csv\")));\n\n assert_eq!(false, CsvIndexer.supports_extension(OsStr::new(\"xslx\")));\n\n }\n\n}\n", "file_path": "src/indexers/csv_indexer.rs", "rank": 75, "score": 6.375857061565486 }, { "content": " fn index_file(&self, file_to_process: &FileToProcess) -> Result<DocumentSchema> {\n\n let path = file_to_process.path.to_str().unwrap();\n\n span!(Level::INFO, \"csv_indexer: indexing csv file\", path).in_scope(|| {\n\n let mut reader = span!(Level::INFO, \"csv_indexer: Loading csv from memory\")\n\n .in_scope(|| csv::Reader::from_reader(Cursor::new(&file_to_process.contents)));\n\n\n\n let headers = span!(Level::INFO, \"csv_indexer: Processing csv info\").in_scope(\n\n || -> Result<String, Error> {\n\n Ok(reader\n\n .headers()\n\n .with_context(|| {\n\n log_and_return_error_string(format!(\n\n \"csv_indexer: Failed to get headers from csv at path: {:?}\",\n\n file_to_process.path\n\n ))\n\n })?\n\n .iter()\n\n .fold(String::new(), |mut acc, x| {\n\n acc.push_str(&x);\n\n acc.push_str(\" \");\n", "file_path": "src/indexers/csv_indexer.rs", "rank": 76, "score": 6.327098242568615 }, { "content": "#[macro_use]\n\nextern crate criterion;\n\nextern crate podium_lib;\n\n\n\nuse podium_lib::contracts::file_to_process::FileToProcess;\n\nuse podium_lib::indexers::*;\n\n\n\nuse criterion::black_box;\n\nuse criterion::Criterion;\n\n\n\nuse std::path::Path;\n\n\n", "file_path": "benches/my_benchmark.rs", "rank": 77, "score": 5.825502594989935 }, { "content": " pub schema: Schema,\n\n}\n\n\n\nimpl TantivyWrapper {\n\n pub fn new(index_reader: IndexReader, index_writer: IndexWriter, schema: Schema) -> Self {\n\n TantivyWrapper {\n\n index_reader,\n\n index_writer,\n\n schema,\n\n }\n\n }\n\n\n\n #[instrument(skip(self, hash))]\n\n pub fn update_doc_by_hash(\n\n &self,\n\n entry_path: &Path,\n\n hash: &blake2b_simd::Hash,\n\n ) -> Option<Document> {\n\n let searcher = self.index_reader.searcher();\n\n let location_facet = &entry_path.to_facet_value();\n", "file_path": "src/custom_tantivy/wrapper.rs", "rank": 78, "score": 5.764755606459627 }, { "content": " location_value_facet, &location_facet\n\n );\n\n if location_value_facet == &location_facet {\n\n old_location_index = Some(index)\n\n }\n\n }\n\n }\n\n info!(\"Index to remove: {:?}\", old_location_index);\n\n match old_location_index {\n\n Some(index) => {\n\n locations.remove(index);\n\n }\n\n None => {\n\n panic!(\"Tried to remove location {0:?} from document {1:?} but the location was not found\", path_buf, old_doc);\n\n }\n\n }\n\n\n\n let mut new_doc = Document::default();\n\n new_doc_for_update(&mut new_doc, &old_doc, locations, &self.schema);\n\n\n\n info!(\"The new doc after modifications {:?}\", new_doc);\n\n self.index_writer.add_document(new_doc.clone());\n\n }\n\n }\n\n }\n\n}\n\n\n\n#[async_trait]\n", "file_path": "src/custom_tantivy/wrapper.rs", "rank": 79, "score": 5.740995669068257 }, { "content": "use super::Indexer;\n\nuse super::DocumentSchema;\n\nuse std::path::Path;\n\nuse std::ffi::{OsStr, OsString};\n\nuse std::fs;\n\n\n\nuse docx::prelude::*;\n\n\n\npub struct DocxIndexer;\n\n\n\nimpl Indexer for DocxIndexer {\n\n fn supports_extension(&self, extension: &OsStr) -> bool {\n\n extension == OsStr::new(\"docx\")\n\n }\n\n\n\n fn supported_extensions(self) -> Vec<OsString> {\n\n vec![OsString::from(\"docx\")]\n\n }\n\n\n\n // Parsing Cats.docx panics the `docx` library...\n", "file_path": "src/indexers/docx_indexer.rs", "rank": 80, "score": 5.236632628136072 }, { "content": "\n\n // Check if the file has already been indexed\n\n if let Some(doc) = self.update_doc_by_hash(path, &file_to_process.hash) {\n\n // TODO: Since this file has been seen before, we should simply add the location of this current field to process to the document\n\n return Some(doc);\n\n }\n\n\n\n // We're indexing the file for the first time\n\n let results = analyze(\n\n entry_path.extension().unwrap().to_os_string(),\n\n file_to_process,\n\n )\n\n .await;\n\n if !results.is_empty() {\n\n info!(\"This is a new file, we need to process it\");\n\n let title = &results[0].name;\n\n let body = results.iter().fold(String::new(), |mut acc, x| {\n\n acc.push_str(&x.body);\n\n acc.push_str(\" \");\n\n acc\n", "file_path": "src/custom_tantivy/wrapper.rs", "rank": 81, "score": 5.198709599434166 }, { "content": "# podium client\n\n\n\n## Geting started\n\n\n\nTo get started with running the client you probably want the podium daemon running. \n\n\n\n1. `cargo run -- -s ./test_files/` from the root of the repo\n\n2. `npm install` from this directory\n\n3. `npm run start-dev` to start up the client\n\n\n\n\n\n## Usage\n\nBoth processes have to be started **simultaneously** in different console tabs:\n\n\n\n```bash\n\nnpm run start-renderer-dev\n\nnpm run start-main-dev\n\n```\n\n\n\nThis will start the application with hot-reload so you can instantly start developing your application.\n\n\n\nYou can also run do the following to start both in a single process:\n\n\n\n```bash\n\nnpm run start-dev\n\n```\n\n\n\n## Packaging\n\nWe use [Electron builder](https://www.electron.build/) to build and package the application. By default you can run the following to package for your current platform:\n\n\n\n```bash\n\nnpm run dist\n\n```\n\n\n\nThis will create a installer for your platform in the `releases` folder.\n\n\n\nYou can make builds for specific platforms (or multiple platforms) by using the options found [here](https://www.electron.build/cli). E.g. building for all platforms (Windows, Mac, Linux):\n\n\n\n```bash\n\nnpm run dist -- -mwl\n\n```\n\n\n\n## Husky and Prettier\n\nThis project comes with both Husky and Prettier setup to ensure a consistent code style. \n\n\n\nTo change the code style, you can change the configuration in `.prettierrc`. \n\n\n\nIn case you want to get rid of this, you can removing the following from `package.json`:\n\n\n\n1. Remove `precommit` from the `scripts` section\n\n1. Remove the `lint-staged` section\n\n1. Remove `lint-staged`, `prettier`, `eslint-config-prettier`, and `husky` from the `devDependencies`\n\n\n\nAlso remove all mentions of Prettier from the `extends` section in `.eslintrc.json`.\n", "file_path": "client/README.md", "rank": 82, "score": 5.109828334149552 }, { "content": "extern crate podium_lib;\n\nuse podium_lib::config::{get_config, AppConfig};\n\nuse podium_lib::contracts::app_state::*;\n\nuse podium_lib::routes::search;\n\nuse podium_lib::tantivy_process::{start_tantivy, tantivy_init, TantivyConfig};\n\n\n\nuse std::io;\n\n\n\nuse actix_cors::Cors;\n\nuse actix_web::{web, App, HttpServer};\n\nuse app_dirs::*;\n\nuse tracing::info;\n\nuse tracing_subscriber::{fmt, layer::SubscriberExt, prelude::*, registry::Registry};\n\n\n\nuse std::{fs::File, io::BufWriter};\n\nuse tracing_flame::FlameLayer;\n\n\n\nconst APP_INFO: AppInfo = AppInfo {\n\n name: \"Podium\",\n\n author: \"Teodor Voinea\",\n", "file_path": "src/bin.rs", "rank": 83, "score": 5.0027416121259645 }, { "content": " supported_extensions: supported_extensions,\n\n }\n\n }\n\n}\n\n\n\nstatic INDEXERS: Lazy<Vec<Box<dyn Indexer>>> = Lazy::new(|| {\n\n let indexers: Vec<Box<dyn Indexer>> = vec![\n\n Box::new(TextIndexer),\n\n Box::new(ExifIndexer),\n\n Box::new(PdfIndexer),\n\n Box::new(MobileNetV2Indexer),\n\n Box::new(PptxIndexer),\n\n Box::new(CsvIndexer),\n\n Box::new(SpreadsheetIndexer),\n\n ];\n\n indexers\n\n});\n\n\n\n#[instrument(skip(file_to_process))]\n\npub async fn analyze(extension: OsString, file_to_process: FileToProcess) -> Vec<DocumentSchema> {\n", "file_path": "src/indexers/mod.rs", "rank": 84, "score": 4.861967528615521 }, { "content": " let test_file_path = Path::new(\"./test_files/IMG_2551.jpeg\");\n\n let indexed_document = MobileNetV2Indexer\n\n .index_file(&new_file_to_process(test_file_path).await)\n\n .unwrap();\n\n\n\n assert_eq!(indexed_document.name, \"\");\n\n assert_eq!(indexed_document.body, \"eggnog\");\n\n }\n\n\n\n #[test]\n\n fn test_supports_mobile_net_v2_extension() {\n\n assert_eq!(\n\n true,\n\n MobileNetV2Indexer.supports_extension(OsStr::new(\"tif\"))\n\n );\n\n assert_eq!(\n\n true,\n\n MobileNetV2Indexer.supports_extension(OsStr::new(\"tiff\"))\n\n );\n\n assert_eq!(\n", "file_path": "src/indexers/mobile_net_v2_indexer.rs", "rank": 85, "score": 4.6986177178382835 }, { "content": " let processing_task = tokio::task::spawn_blocking(move || {\n\n INDEXERS\n\n .iter()\n\n .filter(|indexer| indexer.supports_extension(extension.as_os_str()))\n\n .filter_map(|indexer| indexer.index_file(&file_to_process).ok())\n\n .collect()\n\n });\n\n\n\n processing_task.await.unwrap()\n\n}\n", "file_path": "src/indexers/mod.rs", "rank": 86, "score": 4.475430052600861 }, { "content": " if lat_direction != 'N' {\n\n lat *= -1.0;\n\n }\n\n\n\n if lon_direction != 'E' {\n\n lon *= -1.0;\n\n }\n\n });\n\n\n\n let res = span!(Level::INFO, \"exif_indexer: Look up the coordinates\").in_scope(|| -> Result<&&Record, Error>{\n\n Ok(\n\n GEOCODER.search(&[lat, lon])\n\n .with_context(|| log_and_return_error_string(format!(\"exif_indexer: Failed to search for location in geocoder: lat = {:?} lon = {:?}\", lat, lon)))?\n\n .get(0)\n\n .with_context(|| log_and_return_error_string(format!(\"exif_indexer: Failed to get first result from search in geocoder\")))?\n\n .1\n\n )\n\n })?;\n\n\n\n Ok(DocumentSchema {\n\n name: String::new(),\n\n body: format!(\"{} {} {} {}\", res.name, res.admin1, res.admin2, res.admin3),\n\n })\n\n })\n\n }\n\n}\n\n\n", "file_path": "src/indexers/exif_indexer.rs", "rank": 87, "score": 4.456477296522824 }, { "content": " tract_tensorflow::tensorflow()\n\n .model_for_read(&mut model_bytes)\n\n .unwrap()\n\n // .expect(&log_and_return_error_string(\n\n // \"mobile_net_v2_indexer: Failed to read model from bytes\".to_string(),\n\n // ));\n\n });\n\n\n\n // specify input type and shape\n\n model\n\n .set_input_fact(\n\n 0,\n\n InferenceFact::dt_shape(f32::datum_type(), tvec!(1, 224, 224, 3)),\n\n )\n\n .unwrap();\n\n\n\n // .expect(&log_and_return_error_string(\n\n // \"mobile_net_v2_indexer: Failed to specify input type and shape for model\"\n\n // .to_string(),\n\n // ));\n", "file_path": "src/indexers/mobile_net_v2_indexer.rs", "rank": 88, "score": 4.445741853085871 }, { "content": " }\n\n\n\n fn index_file(&self, file_to_process: &FileToProcess) -> Result<DocumentSchema> {\n\n let path = file_to_process.path.to_str().unwrap();\n\n span!(Level::INFO, \"pdf_indexer: indexing pdf file\", path).in_scope(|| {\n\n let res = span!(Level::INFO, \"pdf_indexer: Loading from disk and processing\")\n\n .in_scope(|| {\n\n // TODO: the resulting string from this is poorly extracted\n\n // better than nothing but it should be fixed\n\n extract_text(&file_to_process.path).with_context(|| {\n\n log_and_return_error_string(format!(\n\n \"pdf_indexer: Failed to extract text from pdf at path: {:?}\",\n\n file_to_process.path\n\n ))\n\n })\n\n })?;\n\n\n\n let clean = span!(Level::INFO, \"pdf_indexer: Processing file\").in_scope(\n\n || -> Result<String, Error> {\n\n // THIS IS A BAD HACK\n", "file_path": "src/indexers/pdf_indexer.rs", "rank": 89, "score": 4.375415634526604 }, { "content": "<p align=\"center\">\n\n <img src=\"./assets/Podium.svg\">\n\n</p>\n\n\n\n\n\n# Podium\n\n\n\nPodium is a smart indexer and searcher for your files.\n\n\n\nA search engine doesn't expect you to know the name of the website you're searching for, you only search for the content and it does the work of finding the website.\n\nPodium is the same, you don't need to know the name of the file you're looking for, or where you saved it. You only need to know what it's about. Podium does the work of figuring out where it is.\n\n\n\nPodium doesn't interfere with how you already organize your files, but it can help you when you're stuck.\n\n\n\n## Features\n\n\n\n* **Private** - Your files and data never leave your computer\n\n* **Flexible** - Works on all desktop environments\n\n* **Fast** - New files are processed in under 1 second\n\n* **Extensible** - Built with plugins at its core\n\n* **Smart** - Uses modern AI models to accurately identify the content of your files\n\n\n\n### Supported file types\n\n\n\n| Type | Extensions |\n\n|--------------------------|--------------------------------------------------|\n\n| Image - object detection | .tif, .tiff, .jpg, .jpeg, .png, .bmp, .ico, .gif |\n\n| Image - exif metadata | .tif, .tiff, .jpg, .jpeg |\n\n| Spreadsheed | .csv, .xlsx |\n\n| Text | .txt, .docx |\n\n| Slideshow | .pptx |\n\n| PDF | .pdf |\n\n\n\n\n", "file_path": "README.md", "rank": 90, "score": 4.186024368207136 }, { "content": " fn index_file(&self, file_to_process: &FileToProcess) -> Result<DocumentSchema> {\n\n span!(Level::INFO, \"text_indexer: indexing text file\").in_scope(|| {\n\n let name = file_to_process\n\n .path\n\n .file_name()\n\n .unwrap()\n\n .to_string_lossy()\n\n .to_string();\n\n\n\n let body = str::from_utf8(&file_to_process.contents).with_context(|| {\n\n log_and_return_error_string(format!(\n\n \"text_indexer: Failed to read file to string at path: {:?}\",\n\n file_to_process.path\n\n ))\n\n })?;\n\n\n\n Ok(DocumentSchema {\n\n name: name,\n\n body: body.to_string(),\n\n })\n", "file_path": "src/indexers/text_indexer.rs", "rank": 91, "score": 4.133493383361462 }, { "content": " acc.push_str(\" \");\n\n acc\n\n });\n\n let location = retrieved_doc\n\n .get_all(location)\n\n .iter()\n\n .filter_map(|val| match &val {\n\n Value::Facet(loc_str) => Some(Path::from_facet_value(loc_str)),\n\n _ => None,\n\n })\n\n .collect();\n\n let body = retrieved_doc\n\n .get_all(body)\n\n .iter()\n\n .map(|val| val.text())\n\n .fold(String::new(), |mut acc, x| {\n\n acc.push_str(x.unwrap());\n\n acc.push_str(\" \");\n\n acc\n\n });\n", "file_path": "src/searcher.rs", "rank": 92, "score": 3.914557645813537 }, { "content": " if count > 1 {\n\n for (_score, doc_address) in top_docs {\n\n let retrieved_doc = searcher.doc(doc_address).unwrap();\n\n error!(\"{:?}\", retrieved_doc);\n\n }\n\n panic!(\"More than 1 document with the same hash!!!\");\n\n }\n\n None\n\n }\n\n }\n\n\n\n #[instrument(skip(self, location_field, location_facet))]\n\n pub fn get_doc_by_location(\n\n &self,\n\n location_field: Field,\n\n location_facet: &Facet,\n\n ) -> Option<DocAddress> {\n\n let searcher = self.index_reader.searcher();\n\n let query = TermQuery::new(\n\n Term::from_facet(location_field, location_facet),\n", "file_path": "src/custom_tantivy/wrapper.rs", "rank": 93, "score": 3.7847263556708373 }, { "content": "pub mod path_facet_convert;\n\npub mod utils;\n\npub mod wrapper;\n", "file_path": "src/custom_tantivy/mod.rs", "rank": 94, "score": 3.756489627631873 }, { "content": "use tantivy::schema::*;\n\nuse tracing::info;\n\n\n\nuse blake2b_simd::blake2b;\n\n\n", "file_path": "src/custom_tantivy/utils.rs", "rank": 95, "score": 3.6921024032668948 }, { "content": " .iter()\n\n .filter_map(|sheet_name| workbook.worksheet_range(sheet_name))\n\n .filter_map(Result::ok)\n\n .map(|range| {\n\n range\n\n .used_cells()\n\n .filter(|(_, _, cell)| cell.is_string())\n\n .filter_map(|(_, _, cell)| cell.get_string())\n\n .map(std::string::ToString::to_string)\n\n .collect::<Vec<String>>()\n\n })\n\n .flatten()\n\n .fold(String::new(), |mut acc, x| {\n\n acc.push_str(&x);\n\n acc.push_str(\" \");\n\n acc\n\n })\n\n });\n\n\n\n Ok(DocumentSchema {\n", "file_path": "src/indexers/spreadsheet_indexer.rs", "rank": 96, "score": 3.663313411397991 }, { "content": ");\n\n\n\n#[cfg(target_os = \"windows\")]\n\ncriterion_group!(\n\n benches,\n\n bench_indexing_csv_file,\n\n bench_indexing_exif_file,\n\n bench_indexing_pdf_file,\n\n bench_indexing_exif_file,\n\n bench_indexing_pptx_file,\n\n bench_indexing_spreadsheet_file,\n\n bench_indexing_text_file,\n\n);\n\n\n\ncriterion_main!(benches);\n", "file_path": "benches/my_benchmark.rs", "rank": 97, "score": 3.584371410286732 }, { "content": "pub mod app_state;\n\npub mod file_to_process;\n", "file_path": "src/contracts/mod.rs", "rank": 98, "score": 3.5830496754245584 }, { "content": " pub location: Vec<PathBuf>,\n\n /// The content that was indexed from the file\n\n pub body: String,\n\n}\n\npub struct Searcher {\n\n index: Index,\n\n index_reader: IndexReader,\n\n schema: Schema,\n\n}\n\n\n\nimpl Searcher {\n\n pub fn new(index: Index, index_reader: IndexReader, schema: Schema) -> Self {\n\n Searcher {\n\n index,\n\n index_reader,\n\n schema,\n\n }\n\n }\n\n\n\n pub fn search(&self, query_string: String) -> QueryResponse {\n", "file_path": "src/searcher.rs", "rank": 99, "score": 3.5306186455304926 } ]
Rust
tests/integration/cli/tests/create_exe.rs
psy-repos-rust/wasmer
75a98ab171bee010b9a7cd0f836919dc4519dcaf
use anyhow::{bail, Context}; use std::fs; use std::io::prelude::*; use std::path::PathBuf; use std::process::Command; use wasmer_integration_tests_cli::*; fn create_exe_test_wasm_path() -> String { format!("{}/{}", C_ASSET_PATH, "qjs.wasm") } const JS_TEST_SRC_CODE: &[u8] = b"function greet(name) { return JSON.stringify('Hello, ' + name); }; print(greet('World'));\n"; #[derive(Debug)] struct WasmerCreateExe { current_dir: PathBuf, wasmer_path: PathBuf, wasm_path: PathBuf, native_executable_path: PathBuf, compiler: Compiler, } impl Default for WasmerCreateExe { fn default() -> Self { #[cfg(not(windows))] let native_executable_path = PathBuf::from("wasm.out"); #[cfg(windows)] let native_executable_path = PathBuf::from("wasm.exe"); Self { current_dir: std::env::current_dir().unwrap(), wasmer_path: get_wasmer_path(), wasm_path: PathBuf::from(create_exe_test_wasm_path()), native_executable_path, compiler: Compiler::Cranelift, } } } impl WasmerCreateExe { fn run(&self) -> anyhow::Result<()> { let output = Command::new(&self.wasmer_path) .current_dir(&self.current_dir) .arg("create-exe") .arg(&self.wasm_path.canonicalize()?) .arg(&self.compiler.to_flag()) .arg("-o") .arg(&self.native_executable_path) .output()?; if !output.status.success() { bail!( "wasmer create-exe failed with: stdout: {}\n\nstderr: {}", std::str::from_utf8(&output.stdout) .expect("stdout is not utf8! need to handle arbitrary bytes"), std::str::from_utf8(&output.stderr) .expect("stderr is not utf8! need to handle arbitrary bytes") ); } Ok(()) } } #[test] fn create_exe_works() -> anyhow::Result<()> { let temp_dir = tempfile::tempdir()?; let operating_dir: PathBuf = temp_dir.path().to_owned(); let wasm_path = operating_dir.join(create_exe_test_wasm_path()); #[cfg(not(windows))] let executable_path = operating_dir.join("wasm.out"); #[cfg(windows)] let executable_path = operating_dir.join("wasm.exe"); WasmerCreateExe { current_dir: operating_dir.clone(), wasm_path, native_executable_path: executable_path.clone(), compiler: Compiler::Cranelift, ..Default::default() } .run() .context("Failed to create-exe wasm with Wasmer")?; let result = run_code( &operating_dir, &executable_path, &["--eval".to_string(), "function greet(name) { return JSON.stringify('Hello, ' + name); }; print(greet('World'));".to_string()], ) .context("Failed to run generated executable")?; let result_lines = result.lines().collect::<Vec<&str>>(); assert_eq!(result_lines, vec!["\"Hello, World\""],); Ok(()) } #[test] fn create_exe_works_with_file() -> anyhow::Result<()> { let temp_dir = tempfile::tempdir()?; let operating_dir: PathBuf = temp_dir.path().to_owned(); let wasm_path = operating_dir.join(create_exe_test_wasm_path()); #[cfg(not(windows))] let executable_path = operating_dir.join("wasm.out"); #[cfg(windows)] let executable_path = operating_dir.join("wasm.exe"); WasmerCreateExe { current_dir: operating_dir.clone(), wasm_path, native_executable_path: executable_path.clone(), compiler: Compiler::Cranelift, ..Default::default() } .run() .context("Failed to create-exe wasm with Wasmer")?; { let mut f = fs::OpenOptions::new() .write(true) .create_new(true) .open(operating_dir.join("test.js"))?; f.write_all(JS_TEST_SRC_CODE)?; } let result = run_code( &operating_dir, &executable_path, &[ "--dir=.".to_string(), "--script".to_string(), "test.js".to_string(), ], ) .context("Failed to run generated executable")?; let result_lines = result.lines().collect::<Vec<&str>>(); assert_eq!(result_lines, vec!["\"Hello, World\""],); let result = run_code( &operating_dir, &executable_path, &[ "--mapdir=abc:.".to_string(), "--script".to_string(), "abc/test.js".to_string(), ], ) .context("Failed to run generated executable")?; let result_lines = result.lines().collect::<Vec<&str>>(); assert_eq!(result_lines, vec!["\"Hello, World\""],); Ok(()) }
use anyhow::{bail, Context}; use std::fs; use std::io::prelude::*; use std::path::PathBuf; use std::process::Command; use wasmer_integration_tests_cli::*; fn create_exe_test_wasm_path() -> String { format!("{}/{}", C_ASSET_PATH, "qjs.wasm") } const JS_TEST_SRC_CODE: &[u8] = b"function greet(name) { return JSON.stringify('Hello, ' + name); }; print(greet('World'));\n"; #[derive(Debug)] struct WasmerCreateExe { current_dir: PathBuf, wasmer_path: PathBuf, wasm_path: PathBuf, native_executable_path: PathBuf, compiler: Compiler, } impl Default for WasmerCreateExe { fn default() -> Self { #[cfg(not(windows))] let native_executable_path = PathBuf::from("wasm.out"); #[cfg(windows)] let native_executable_path = PathBuf::from("wasm.exe"); Self { current_dir: std::env::current_dir().unwrap(), wasmer_path: get_wasmer_path(), wasm_path: PathBuf::from(create_exe_test_wasm_path()), native_executable_path, compiler: Compiler::Cranelift, } } } impl WasmerCreateExe { fn run(&self) -> anyhow::Result<()> { let output = Command::new(&self.wasmer_path) .current_dir(&self.current_dir) .arg("create-exe") .arg(&self.wasm_path.canonicalize()?) .arg(&self.compiler.to_flag()) .arg("-o") .arg(&self.native_executable_path) .output()?; if !output.status.success() { bail!( "wasmer create-exe failed with: stdout: {}\n\nstderr: {}", std::str::from_utf8(&output.stdout) .expect("stdout is not utf8! need to handl
} #[test] fn create_exe_works() -> anyhow::Result<()> { let temp_dir = tempfile::tempdir()?; let operating_dir: PathBuf = temp_dir.path().to_owned(); let wasm_path = operating_dir.join(create_exe_test_wasm_path()); #[cfg(not(windows))] let executable_path = operating_dir.join("wasm.out"); #[cfg(windows)] let executable_path = operating_dir.join("wasm.exe"); WasmerCreateExe { current_dir: operating_dir.clone(), wasm_path, native_executable_path: executable_path.clone(), compiler: Compiler::Cranelift, ..Default::default() } .run() .context("Failed to create-exe wasm with Wasmer")?; let result = run_code( &operating_dir, &executable_path, &["--eval".to_string(), "function greet(name) { return JSON.stringify('Hello, ' + name); }; print(greet('World'));".to_string()], ) .context("Failed to run generated executable")?; let result_lines = result.lines().collect::<Vec<&str>>(); assert_eq!(result_lines, vec!["\"Hello, World\""],); Ok(()) } #[test] fn create_exe_works_with_file() -> anyhow::Result<()> { let temp_dir = tempfile::tempdir()?; let operating_dir: PathBuf = temp_dir.path().to_owned(); let wasm_path = operating_dir.join(create_exe_test_wasm_path()); #[cfg(not(windows))] let executable_path = operating_dir.join("wasm.out"); #[cfg(windows)] let executable_path = operating_dir.join("wasm.exe"); WasmerCreateExe { current_dir: operating_dir.clone(), wasm_path, native_executable_path: executable_path.clone(), compiler: Compiler::Cranelift, ..Default::default() } .run() .context("Failed to create-exe wasm with Wasmer")?; { let mut f = fs::OpenOptions::new() .write(true) .create_new(true) .open(operating_dir.join("test.js"))?; f.write_all(JS_TEST_SRC_CODE)?; } let result = run_code( &operating_dir, &executable_path, &[ "--dir=.".to_string(), "--script".to_string(), "test.js".to_string(), ], ) .context("Failed to run generated executable")?; let result_lines = result.lines().collect::<Vec<&str>>(); assert_eq!(result_lines, vec!["\"Hello, World\""],); let result = run_code( &operating_dir, &executable_path, &[ "--mapdir=abc:.".to_string(), "--script".to_string(), "abc/test.js".to_string(), ], ) .context("Failed to run generated executable")?; let result_lines = result.lines().collect::<Vec<&str>>(); assert_eq!(result_lines, vec!["\"Hello, World\""],); Ok(()) }
e arbitrary bytes"), std::str::from_utf8(&output.stderr) .expect("stderr is not utf8! need to handle arbitrary bytes") ); } Ok(()) }
function_block-function_prefixed
[ { "content": "fn compile_and_compare(name: &str, engine: impl Engine, wasm: &[u8]) {\n\n let store = Store::new(&engine);\n\n\n\n // compile for first time\n\n let module = Module::new(&store, wasm).unwrap();\n\n let first = module.serialize().unwrap();\n\n\n\n // compile for second time\n\n let module = Module::new(&store, wasm).unwrap();\n\n let second = module.serialize().unwrap();\n\n\n\n if first != second {\n\n panic!(\"non-deterministic compilation from {}\", name);\n\n }\n\n}\n\n\n\nfuzz_target!(|module: ConfiguredModule<NoImportsConfig>| {\n\n let wasm_bytes = module.to_bytes();\n\n\n\n let mut compiler = Cranelift::default();\n", "file_path": "fuzz/fuzz_targets/deterministic.rs", "rank": 0, "score": 371224.59680024406 }, { "content": "/// Subroutine to instantiate the loggers\n\npub fn set_up_logging(verbose: u8) -> Result<(), String> {\n\n let colors_line = ColoredLevelConfig::new()\n\n .error(Color::Red)\n\n .warn(Color::Yellow)\n\n .trace(Color::BrightBlack);\n\n let should_color = wasmer_should_print_color();\n\n\n\n let colors_level = colors_line.info(Color::Green);\n\n let level = match verbose {\n\n 1 => DebugLevel::Debug,\n\n _ => DebugLevel::Trace,\n\n };\n\n let dispatch = fern::Dispatch::new()\n\n .level(level)\n\n .chain({\n\n let base = if should_color {\n\n fern::Dispatch::new().format(move |out, message, record| {\n\n let time = time::SystemTime::now().duration_since(time::UNIX_EPOCH).expect(\"Can't get time\");\n\n out.finish(format_args!(\n\n \"{color_line}[{seconds}.{millis} {level} {target}{color_line}]{ansi_close} {message}\",\n", "file_path": "lib/cli-compiler/src/logging.rs", "rank": 1, "score": 301403.3373889966 }, { "content": "/// Extract a valid Rust identifier from the stem of a path.\n\npub fn extract_name(path: impl AsRef<Path>) -> String {\n\n path.as_ref()\n\n .file_stem()\n\n .expect(\"filename should have a stem\")\n\n .to_str()\n\n .expect(\"filename should be representable as a string\")\n\n .replace('-', \"_\")\n\n .replace('/', \"_\")\n\n}\n\n\n", "file_path": "tests/lib/test-generator/src/lib.rs", "rank": 2, "score": 291313.337389779 }, { "content": "fn retrieve_alias_pathbuf(alias: &str, real_dir: &str) -> Result<(String, PathBuf)> {\n\n let pb = PathBuf::from(&real_dir);\n\n if let Ok(pb_metadata) = pb.metadata() {\n\n if !pb_metadata.is_dir() {\n\n bail!(\"\\\"{}\\\" exists, but it is not a directory\", &real_dir);\n\n }\n\n } else {\n\n bail!(\"Directory \\\"{}\\\" does not exist\", &real_dir);\n\n }\n\n Ok((alias.to_string(), pb))\n\n}\n\n\n", "file_path": "lib/cli-compiler/src/utils.rs", "rank": 3, "score": 274167.5068021449 }, { "content": "fn get_stdio_output(rx: &mpsc::Receiver<Vec<u8>>) -> anyhow::Result<String> {\n\n let mut stdio = Vec::new();\n\n while let Ok(mut buf) = rx.try_recv() {\n\n stdio.append(&mut buf);\n\n }\n\n let stdout_str = std::str::from_utf8(&stdio[..])?;\n\n #[cfg(target_os = \"windows\")]\n\n // normalize line endings\n\n return Ok(stdout_str.replace(\"\\r\\n\", \"\\n\"));\n\n\n\n #[cfg(not(target_os = \"windows\"))]\n\n return Ok(stdout_str.to_string());\n\n}\n\n\n\n#[allow(dead_code)]\n\nimpl<'a> WasiTest<'a> {\n\n /// Turn a WASI WAST string into a list of tokens.\n\n pub fn lex_string(wast: &'a str) -> parser::Result<ParseBuffer<'a>> {\n\n ParseBuffer::new(wast)\n\n }\n", "file_path": "tests/lib/wast/src/wasi_wast.rs", "rank": 4, "score": 272355.96480613865 }, { "content": "pub fn print_info_on_error(output: &std::process::Output, context: &str) {\n\n if !output.status.success() {\n\n println!(\"{}\", context);\n\n println!(\n\n \"stdout:\\n{}\",\n\n std::str::from_utf8(&output.stdout[..]).unwrap()\n\n );\n\n eprintln!(\n\n \"stderr:\\n{}\",\n\n std::str::from_utf8(&output.stderr[..]).unwrap()\n\n );\n\n }\n\n}\n", "file_path": "tests/wasi-wast/src/util.rs", "rank": 5, "score": 263644.17739417544 }, { "content": "#[derive(WasmerEnv, Clone)]\n\nstruct MyTupleStructWithAttribute(#[wasmer(export(name = \"memory\"))] LazyInit<Memory>, u32);\n\n\n", "file_path": "lib/derive/tests/basic.rs", "rank": 6, "score": 257432.70360425723 }, { "content": "/// Subroutine to instantiate the loggers\n\npub fn set_up_logging(verbose: u8) -> Result<(), String> {\n\n let colors_line = ColoredLevelConfig::new()\n\n .error(Color::Red)\n\n .warn(Color::Yellow)\n\n .trace(Color::BrightBlack);\n\n let should_color = wasmer_should_print_color();\n\n\n\n let colors_level = colors_line.info(Color::Green);\n\n let level = match verbose {\n\n 1 => DebugLevel::Debug,\n\n _ => DebugLevel::Trace,\n\n };\n\n let dispatch = fern::Dispatch::new()\n\n .level(level)\n\n .chain({\n\n let base = if should_color {\n\n fern::Dispatch::new().format(move |out, message, record| {\n\n let time = time::SystemTime::now().duration_since(time::UNIX_EPOCH).expect(\"Can't get time\");\n\n out.finish(format_args!(\n\n \"{color_line}[{seconds}.{millis} {level} {target}{color_line}]{ansi_close} {message}\",\n", "file_path": "lib/cli/src/logging.rs", "rank": 7, "score": 257154.58513261887 }, { "content": "/// Check if the provided bytes are wasm-like\n\npub fn is_wasm(bytes: impl AsRef<[u8]>) -> bool {\n\n bytes.as_ref().starts_with(b\"\\0asm\")\n\n}\n", "file_path": "lib/types/src/utils.rs", "rank": 8, "score": 252402.2021679074 }, { "content": "fn impl_wasmer_env_for_struct(\n\n name: &Ident,\n\n data: &DataStruct,\n\n generics: &Generics,\n\n _attrs: &[Attribute],\n\n) -> TokenStream {\n\n let (trait_methods, helper_methods) = derive_struct_fields(data);\n\n let lifetimes_and_generics = generics.params.clone();\n\n let where_clause = generics.where_clause.clone();\n\n quote! {\n\n impl < #lifetimes_and_generics > ::wasmer::WasmerEnv for #name < #lifetimes_and_generics > #where_clause{\n\n #trait_methods\n\n }\n\n\n\n #[allow(dead_code)]\n\n impl < #lifetimes_and_generics > #name < #lifetimes_and_generics > #where_clause {\n\n #helper_methods\n\n }\n\n }\n\n}\n\n\n", "file_path": "lib/derive/src/env/mod.rs", "rank": 9, "score": 251468.60846535102 }, { "content": "fn compile_and_compare(wasm: &[u8]) -> Result<()> {\n\n let store = Default::default();\n\n\n\n // compile for first time\n\n let module = Module::new(&store, wasm)?;\n\n let first = module.serialize()?;\n\n\n\n // compile for second time\n\n let module = Module::new(&store, wasm)?;\n\n let second = module.serialize()?;\n\n\n\n assert!(first == second);\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/compilers/deterministic.rs", "rank": 10, "score": 247351.46218313638 }, { "content": "#[derive(WasmerEnv)]\n\nstruct BadExportArgRawString {\n\n #[wasmer(export(\"hello\"))] //~ Failed to parse `wasmer` attribute: unexpected token\n\n memory: LazyInit<Memory>,\n\n}\n\n\n", "file_path": "lib/derive/tests/compile-fail/bad-export-arg.rs", "rank": 11, "score": 241435.50350204692 }, { "content": "/// Parses a mapdir from a string\n\npub fn parse_mapdir(entry: &str) -> Result<(String, PathBuf)> {\n\n // We try first splitting by `::`\n\n if let [alias, real_dir] = entry.split(\"::\").collect::<Vec<&str>>()[..] {\n\n retrieve_alias_pathbuf(alias, real_dir)\n\n }\n\n // And then we try splitting by `:` (for compatibility with previous API)\n\n else if let [alias, real_dir] = entry.split(':').collect::<Vec<&str>>()[..] {\n\n retrieve_alias_pathbuf(alias, real_dir)\n\n } else {\n\n bail!(\n\n \"Directory mappings must consist of two paths separate by a `::` or `:`. Found {}\",\n\n &entry\n\n )\n\n }\n\n}\n\n\n", "file_path": "lib/cli-compiler/src/utils.rs", "rank": 12, "score": 241130.13522534785 }, { "content": "/// Parses an environment variable.\n\npub fn parse_envvar(entry: &str) -> Result<(String, String)> {\n\n let entry = entry.trim();\n\n\n\n match entry.find('=') {\n\n None => bail!(\n\n \"Environment variable must be of the form `<name>=<value>`; found `{}`\",\n\n &entry\n\n ),\n\n\n\n Some(0) => bail!(\n\n \"Environment variable is not well formed, the `name` is missing in `<name>=<value>`; got `{}`\",\n\n &entry\n\n ),\n\n\n\n Some(position) if position == entry.len() - 1 => bail!(\n\n \"Environment variable is not well formed, the `value` is missing in `<name>=<value>`; got `{}`\",\n\n &entry\n\n ),\n\n\n\n Some(position) => Ok((entry[..position].into(), entry[position + 1..].into())),\n", "file_path": "lib/cli-compiler/src/utils.rs", "rank": 13, "score": 235091.03007555177 }, { "content": "fn retrieve_alias_pathbuf(alias: &str, real_dir: &str) -> Result<(String, PathBuf)> {\n\n let pb = PathBuf::from(&real_dir);\n\n if let Ok(pb_metadata) = pb.metadata() {\n\n if !pb_metadata.is_dir() {\n\n bail!(\"\\\"{}\\\" exists, but it is not a directory\", &real_dir);\n\n }\n\n } else {\n\n bail!(\"Directory \\\"{}\\\" does not exist\", &real_dir);\n\n }\n\n Ok((alias.to_string(), pb))\n\n}\n\n\n", "file_path": "lib/cli/src/utils.rs", "rank": 14, "score": 233735.13329440114 }, { "content": "fn staticlib_engine_test_wasm_path() -> String {\n\n format!(\"{}/{}\", C_ASSET_PATH, \"qjs.wasm\")\n\n}\n\n\n\n/// Data used to run the `wasmer compile` command.\n", "file_path": "tests/integration/cli/tests/compile.rs", "rank": 15, "score": 233411.42926962674 }, { "content": "fn to_compile_error(err: impl Error) -> CompileError {\n\n CompileError::Codegen(format!(\"{}\", err))\n\n}\n\n\n\n/// Serializable struct that represents the compiled metadata.\n\n#[derive(Serialize, Deserialize, Debug, RkyvSerialize, RkyvDeserialize, Archive, PartialEq, Eq)]\n\npub struct ModuleMetadata {\n\n pub compile_info: CompileModuleInfo,\n\n pub function_frame_info: Option<PrimaryMap<LocalFunctionIndex, CompiledFunctionFrameInfo>>,\n\n pub prefix: String,\n\n pub data_initializers: Box<[OwnedDataInitializer]>,\n\n // The function body lengths (used to find function by address)\n\n pub function_body_lengths: PrimaryMap<LocalFunctionIndex, u64>,\n\n pub cpu_features: u64,\n\n}\n\n\n\npub struct ModuleMetadataSymbolRegistry<'a> {\n\n pub prefix: &'a String,\n\n}\n\n\n", "file_path": "lib/engine-dylib/src/serialize.rs", "rank": 16, "score": 231485.9085001527 }, { "content": "fn to_compile_error(err: impl Error) -> CompileError {\n\n CompileError::Codegen(err.to_string())\n\n}\n\n\n\nconst WASMER_METADATA_SYMBOL: &[u8] = b\"WASMER_METADATA\";\n\n\n\nimpl DylibArtifact {\n\n // Mach-O header in iOS/Mac\n\n #[allow(dead_code)]\n\n const MAGIC_HEADER_MH_CIGAM_64: &'static [u8] = &[207, 250, 237, 254];\n\n\n\n // ELF Magic header for Linux (32 bit)\n\n #[allow(dead_code)]\n\n const MAGIC_HEADER_ELF_32: &'static [u8] = &[0x7f, b'E', b'L', b'F', 1];\n\n\n\n // ELF Magic header for Linux (64 bit)\n\n #[allow(dead_code)]\n\n const MAGIC_HEADER_ELF_64: &'static [u8] = &[0x7f, b'E', b'L', b'F', 2];\n\n\n\n // COFF Magic header for Windows (64 bit)\n", "file_path": "lib/engine-dylib/src/artifact.rs", "rank": 17, "score": 231485.9085001527 }, { "content": "#[allow(dead_code)]\n\nfn to_compile_error(err: impl Error) -> CompileError {\n\n CompileError::Codegen(format!(\"{}\", err))\n\n}\n\n\n\n#[allow(dead_code)]\n\nconst WASMER_METADATA_SYMBOL: &[u8] = b\"WASMER_METADATA\";\n\n\n\nimpl StaticlibArtifact {\n\n // Mach-O header in Mac\n\n #[allow(dead_code)]\n\n const MAGIC_HEADER_MH_CIGAM_64: &'static [u8] = &[207, 250, 237, 254];\n\n\n\n // ELF Magic header for Linux (32 bit)\n\n #[allow(dead_code)]\n\n const MAGIC_HEADER_ELF_32: &'static [u8] = &[0x7f, b'E', b'L', b'F', 1];\n\n\n\n // ELF Magic header for Linux (64 bit)\n\n #[allow(dead_code)]\n\n const MAGIC_HEADER_ELF_64: &'static [u8] = &[0x7f, b'E', b'L', b'F', 2];\n\n\n", "file_path": "lib/engine-staticlib/src/artifact.rs", "rank": 18, "score": 231485.9085001527 }, { "content": "fn add_wasmer_version(pre_header: &mut String) {\n\n pre_header.push_str(&format!(\n\n r#\"\n\n// This file corresponds to the following Wasmer version.\n\n#define WASMER_VERSION \"{full}\"\n\n#define WASMER_VERSION_MAJOR {major}\n\n#define WASMER_VERSION_MINOR {minor}\n\n#define WASMER_VERSION_PATCH {patch}\n\n#define WASMER_VERSION_PRE \"{pre}\"\n\n\"#,\n\n full = env!(\"CARGO_PKG_VERSION\"),\n\n major = env!(\"CARGO_PKG_VERSION_MAJOR\"),\n\n minor = env!(\"CARGO_PKG_VERSION_MINOR\"),\n\n patch = env!(\"CARGO_PKG_VERSION_PATCH\"),\n\n pre = env!(\"CARGO_PKG_VERSION_PRE\"),\n\n ));\n\n}\n\n\n", "file_path": "lib/c-api/build.rs", "rank": 19, "score": 229997.94498531538 }, { "content": "struct ShortNames {}\n\n\n\nimpl SymbolRegistry for ShortNames {\n\n fn symbol_to_name(&self, symbol: Symbol) -> String {\n\n match symbol {\n\n Symbol::LocalFunction(index) => format!(\"f{}\", index.index()),\n\n Symbol::Section(index) => format!(\"s{}\", index.index()),\n\n Symbol::FunctionCallTrampoline(index) => format!(\"t{}\", index.index()),\n\n Symbol::DynamicFunctionTrampoline(index) => format!(\"d{}\", index.index()),\n\n }\n\n }\n\n\n\n fn name_to_symbol(&self, name: &str) -> Option<Symbol> {\n\n if name.len() < 2 {\n\n return None;\n\n }\n\n let (ty, idx) = name.split_at(1);\n\n let idx = idx.parse::<u32>().ok()?;\n\n match ty.chars().next().unwrap() {\n\n 'f' => Some(Symbol::LocalFunction(LocalFunctionIndex::from_u32(idx))),\n", "file_path": "lib/compiler-llvm/src/compiler.rs", "rank": 21, "score": 224794.6328618029 }, { "content": "#[derive(Debug)]\n\nstruct WasmerCompile {\n\n /// The directory to operate in.\n\n current_dir: PathBuf,\n\n /// Path to wasmer executable used to run the command.\n\n wasmer_path: PathBuf,\n\n /// Path to the Wasm file to compile.\n\n wasm_path: PathBuf,\n\n /// Path to the static object file produced by compiling the Wasm.\n\n wasm_object_path: PathBuf,\n\n /// Path to output the generated header to.\n\n header_output_path: PathBuf,\n\n /// Compiler with which to compile the Wasm.\n\n compiler: Compiler,\n\n /// Engine with which to use to generate the artifacts.\n\n engine: Engine,\n\n}\n\n\n\nimpl Default for WasmerCompile {\n\n fn default() -> Self {\n\n #[cfg(not(windows))]\n", "file_path": "tests/integration/cli/tests/compile.rs", "rank": 22, "score": 224568.0387527261 }, { "content": "fn to_compile_error(err: impl std::error::Error) -> CompileError {\n\n CompileError::Codegen(format!(\"{}\", err))\n\n}\n\n\n\npub struct FuncTranslator {\n\n ctx: Context,\n\n target_machine: TargetMachine,\n\n abi: Box<dyn Abi>,\n\n}\n\n\n\nimpl FuncTranslator {\n\n pub fn new(target_machine: TargetMachine) -> Self {\n\n let abi = get_abi(&target_machine);\n\n Self {\n\n ctx: Context::create(),\n\n target_machine,\n\n abi,\n\n }\n\n }\n\n\n", "file_path": "lib/compiler-llvm/src/translator/code.rs", "rank": 23, "score": 223906.3591954523 }, { "content": "/// Get the path to the `wasmer` executable to be used in this test.\n\npub fn get_wasmer_path() -> PathBuf {\n\n PathBuf::from(env::var(\"WASMER_TEST_WASMER_PATH\").unwrap_or_else(|_| WASMER_PATH.to_string()))\n\n}\n", "file_path": "tests/integration/cli/src/assets.rs", "rank": 24, "score": 223904.668586389 }, { "content": "fn ensure_unwind_offset(offset: u32) -> Option<u8> {\n\n if offset > 255 {\n\n panic!(\"function prologues cannot exceed 255 bytes in size for Windows x64\");\n\n }\n\n Some(offset as u8)\n\n}\n", "file_path": "lib/compiler-singlepass/src/unwind_winx64.rs", "rank": 25, "score": 214829.5850151745 }, { "content": "#[derive(WasmerEnv)]\n\nstruct BadAttribute {\n\n #[wasmer(extraport)] //~ Unexpected identifier `extraport`. Expected `export`.\n\n memory: LazyInit<Memory>,\n\n}\n\n\n", "file_path": "lib/derive/tests/compile-fail/bad-attribute.rs", "rank": 26, "score": 205103.86889242625 }, { "content": "fn get_env_var(var_name: &str) -> Result<String, env::VarError> {\n\n env::var(var_name)\n\n}\n\n\n", "file_path": "lib/wasi/tests/envvar.rs", "rank": 27, "score": 205083.19817547442 }, { "content": "/// A helper to extract all the `Type` listings of each variable in `params`\n\n/// for only parameters the return true for `is_wasm`, typically paired with\n\n/// `is_wasm_return` or `is_wasm_parameter`.\n\npub fn wasm_param_types(params: &[ir::AbiParam], is_wasm: impl Fn(usize) -> bool) -> Vec<Type> {\n\n let mut ret = Vec::with_capacity(params.len());\n\n for (i, param) in params.iter().enumerate() {\n\n if is_wasm(i) {\n\n ret.push(param.value_type);\n\n }\n\n }\n\n ret\n\n}\n", "file_path": "lib/compiler-cranelift/src/translator/code_translator.rs", "rank": 28, "score": 199979.09696103807 }, { "content": "fn get_env_var(var_name: &str) -> Result<String, env::VarError> {\n\n #[cfg(not(target = \"wasi\"))]\n\n match var_name {\n\n \"DOG\" => Ok(\"1\".to_string()),\n\n \"CAT\" => Ok(\"2\".to_string()),\n\n _ => Err(env::VarError::NotPresent),\n\n }\n\n #[cfg(target = \"wasi\")]\n\n env::var(var_name)\n\n}\n\n\n", "file_path": "tests/wasi-wast/wasi/tests/envvar.rs", "rank": 29, "score": 199919.3908460114 }, { "content": "fn impls_wasmer_env<T: WasmerEnv>() -> bool {\n\n true\n\n}\n\n\n", "file_path": "lib/derive/tests/basic.rs", "rank": 30, "score": 199729.0261261069 }, { "content": "fn main() {}\n", "file_path": "lib/derive/tests/compile-fail/bad-attribute.rs", "rank": 31, "score": 199580.90946633316 }, { "content": "fn main() {}\n", "file_path": "lib/derive/tests/compile-fail/no-lazy-init.rs", "rank": 32, "score": 199580.90946633316 }, { "content": "#[derive(WasmerEnv)]\n\nstruct BadExportArg {\n\n #[wasmer(export(this_is_not_a_real_argument = \"hello, world\"))]\n\n //~ Unrecognized argument in export options: expected `name` found `this_is_not_a_real_argument\n\n memory: LazyInit<Memory>,\n\n}\n\n\n", "file_path": "lib/derive/tests/compile-fail/bad-export-arg.rs", "rank": 33, "score": 198621.132760451 }, { "content": "#[derive(WasmerEnv)]\n\nstruct ExportNotWrappedInLazyInit {\n\n #[wasmer(export)]\n\n memory: Memory, //~ WasmerEnv derive expects all `export`s to be wrapped in `LazyInit`\n\n}\n\n\n", "file_path": "lib/derive/tests/compile-fail/no-lazy-init.rs", "rank": 34, "score": 198621.132760451 }, { "content": "/// Parses a mapdir from a string\n\npub fn parse_mapdir(entry: &str) -> Result<(String, PathBuf)> {\n\n // We try first splitting by `::`\n\n if let [alias, real_dir] = entry.split(\"::\").collect::<Vec<&str>>()[..] {\n\n retrieve_alias_pathbuf(alias, real_dir)\n\n }\n\n // And then we try splitting by `:` (for compatibility with previous API)\n\n else if let [alias, real_dir] = entry.split(':').collect::<Vec<&str>>()[..] {\n\n retrieve_alias_pathbuf(alias, real_dir)\n\n } else {\n\n bail!(\n\n \"Directory mappings must consist of two paths separate by a `::` or `:`. Found {}\",\n\n &entry\n\n )\n\n }\n\n}\n\n\n", "file_path": "lib/cli/src/utils.rs", "rank": 35, "score": 197538.4622995809 }, { "content": "/// The main function for the Wasmer CLI tool.\n\npub fn wasmer_main() {\n\n // We allow windows to print properly colors\n\n #[cfg(windows)]\n\n colored::control::set_virtual_terminal(true).unwrap();\n\n\n\n // We try to run wasmer with the normal arguments.\n\n // Eg. `wasmer <SUBCOMMAND>`\n\n // In case that fails, we fallback trying the Run subcommand directly.\n\n // Eg. `wasmer myfile.wasm --dir=.`\n\n //\n\n // In case we've been run as wasmer-binfmt-interpreter myfile.wasm args,\n\n // we assume that we're registered via binfmt_misc\n\n let args = std::env::args().collect::<Vec<_>>();\n\n let command = args.get(1);\n\n let options = {\n\n match command.unwrap_or(&\"\".to_string()).as_ref() {\n\n \"compile\" | \"config\" | \"help\" | \"inspect\" | \"validate\" => WasmerCLIOptions::from_args(),\n\n _ => {\n\n WasmerCLIOptions::from_iter_safe(args.iter()).unwrap_or_else(|e| {\n\n match e.kind {\n", "file_path": "lib/cli-compiler/src/cli.rs", "rank": 36, "score": 196954.06796889778 }, { "content": "fn main() {}\n", "file_path": "lib/derive/tests/compile-fail/bad-export-arg.rs", "rank": 37, "score": 196140.81191465555 }, { "content": "fn parse_function_name_subsection(\n\n mut naming_reader: NamingReader<'_>,\n\n) -> Option<HashMap<FunctionIndex, &str>> {\n\n let mut function_names = HashMap::new();\n\n for _ in 0..naming_reader.get_count() {\n\n let Naming { index, name } = naming_reader.read().ok()?;\n\n if index == std::u32::MAX {\n\n // We reserve `u32::MAX` for our own use.\n\n return None;\n\n }\n\n\n\n if function_names\n\n .insert(FunctionIndex::from_u32(index), name)\n\n .is_some()\n\n {\n\n // If the function index has been previously seen, then we\n\n // break out of the loop and early return `None`, because these\n\n // should be unique.\n\n return None;\n\n }\n\n }\n\n Some(function_names)\n\n}\n", "file_path": "lib/compiler/src/translator/sections.rs", "rank": 38, "score": 196121.3540611665 }, { "content": "/// Parses an environment variable.\n\npub fn parse_envvar(entry: &str) -> Result<(String, String)> {\n\n let entry = entry.trim();\n\n\n\n match entry.find('=') {\n\n None => bail!(\n\n \"Environment variable must be of the form `<name>=<value>`; found `{}`\",\n\n &entry\n\n ),\n\n\n\n Some(0) => bail!(\n\n \"Environment variable is not well formed, the `name` is missing in `<name>=<value>`; got `{}`\",\n\n &entry\n\n ),\n\n\n\n Some(position) if position == entry.len() - 1 => bail!(\n\n \"Environment variable is not well formed, the `value` is missing in `<name>=<value>`; got `{}`\",\n\n &entry\n\n ),\n\n\n\n Some(position) => Ok((entry[..position].into(), entry[position + 1..].into())),\n", "file_path": "lib/cli/src/utils.rs", "rank": 39, "score": 190510.21337394317 }, { "content": "/// OSX and BSD have completely different values, we must translate from emscripten's Linuxy\n\n/// value into one that we can pass to native syscalls\n\nfn translate_socket_name_flag(name: i32) -> i32 {\n\n match name {\n\n 2 => libc::SO_REUSEADDR,\n\n 3 => libc::SO_TYPE,\n\n 4 => libc::SO_ERROR,\n\n 5 => libc::SO_DONTROUTE,\n\n 6 => libc::SO_BROADCAST,\n\n 7 => libc::SO_SNDBUF,\n\n 8 => libc::SO_RCVBUF,\n\n 9 => libc::SO_KEEPALIVE,\n\n 10 => libc::SO_OOBINLINE,\n\n 13 => libc::SO_LINGER,\n\n 18 => libc::SO_RCVLOWAT,\n\n 19 => libc::SO_SNDLOWAT,\n\n 20 => libc::SO_RCVTIMEO,\n\n 21 => libc::SO_SNDTIMEO,\n\n // SO_DEBUG missing\n\n 30 => libc::SO_ACCEPTCONN,\n\n otherwise => otherwise,\n\n }\n\n}\n\n\n", "file_path": "lib/emscripten/src/syscalls/unix.rs", "rank": 40, "score": 188612.86721818036 }, { "content": "#[cfg(all(feature = \"compiler\", any(feature = \"universal\", feature = \"dylib\")))]\n\nfn get_default_compiler_config() -> Box<dyn CompilerConfig> {\n\n cfg_if! {\n\n if #[cfg(feature = \"cranelift\")] {\n\n Box::new(wasmer_compiler_cranelift::Cranelift::default())\n\n } else if #[cfg(feature = \"llvm\")] {\n\n Box::new(wasmer_compiler_llvm::LLVM::default())\n\n } else if #[cfg(feature = \"singlepass\")] {\n\n Box::new(wasmer_compiler_singlepass::Singlepass::default())\n\n } else {\n\n compile_error!(\"Please enable one of the compiler backends\")\n\n }\n\n }\n\n}\n\n\n\ncfg_if! {\n\n if #[cfg(all(feature = \"universal\", feature = \"compiler\"))] {\n\n /// Creates a new Universal engine with the default compiler.\n\n ///\n\n /// # Example\n\n ///\n", "file_path": "lib/c-api/src/wasm_c_api/engine.rs", "rank": 41, "score": 188560.10567682615 }, { "content": "/// Parses the Name section of the wasm module.\n\npub fn parse_name_section<'data>(\n\n mut names: NameSectionReader<'data>,\n\n environ: &mut ModuleEnvironment<'data>,\n\n) -> WasmResult<()> {\n\n while let Ok(subsection) = names.read() {\n\n match subsection {\n\n wasmparser::Name::Function(function_subsection) => {\n\n if let Some(function_names) = function_subsection\n\n .get_map()\n\n .ok()\n\n .and_then(parse_function_name_subsection)\n\n {\n\n for (index, name) in function_names {\n\n environ.declare_function_name(index, name)?;\n\n }\n\n }\n\n }\n\n wasmparser::Name::Module(module) => {\n\n if let Ok(name) = module.get_name() {\n\n environ.declare_module_name(name)?;\n", "file_path": "lib/compiler/src/translator/sections.rs", "rank": 42, "score": 188316.90815993023 }, { "content": "/// Whether or not Wasmer should print with color\n\npub fn wasmer_should_print_color() -> bool {\n\n env::var(\"WASMER_COLOR\")\n\n .ok()\n\n .and_then(|inner| inner.parse::<bool>().ok())\n\n .unwrap_or_else(|| atty::is(atty::Stream::Stdout))\n\n}\n\n\n", "file_path": "lib/cli-compiler/src/utils.rs", "rank": 43, "score": 188104.70907347256 }, { "content": "#[compiler_test(metering)]\n\nfn metering_fail(config: crate::Config) -> Result<()> {\n\n assert!(run_add_with_limit(config, 3).is_err());\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/compilers/metering.rs", "rank": 44, "score": 188020.00017720604 }, { "content": "#[compiler_test(imports)]\n\nfn multi_use_host_fn_manages_memory_correctly(config: crate::Config) -> Result<()> {\n\n let store = config.store();\n\n let module = get_module2(&store)?;\n\n\n\n #[allow(dead_code)]\n\n #[derive(Clone)]\n\n struct Env {\n\n memory: LazyInit<Memory>,\n\n }\n\n\n\n impl WasmerEnv for Env {\n\n fn init_with_instance(&mut self, instance: &Instance) -> Result<(), HostEnvInitError> {\n\n let memory = instance.exports.get_memory(\"memory\")?.clone();\n\n self.memory.initialize(memory);\n\n Ok(())\n\n }\n\n }\n\n\n\n let env: Env = Env {\n\n memory: LazyInit::default(),\n", "file_path": "tests/compilers/imports.rs", "rank": 45, "score": 187358.23251133028 }, { "content": "fn test_no_start_wat_path() -> String {\n\n format!(\"{}/{}\", ASSET_PATH, \"no_start.wat\")\n\n}\n\n\n", "file_path": "tests/integration/cli/tests/run.rs", "rank": 46, "score": 187236.45934636588 }, { "content": "fn wasi_test_wasm_path() -> String {\n\n format!(\"{}/{}\", C_ASSET_PATH, \"qjs.wasm\")\n\n}\n\n\n", "file_path": "tests/integration/cli/tests/run.rs", "rank": 47, "score": 187236.45934636588 }, { "content": "fn test_no_imports_wat_path() -> String {\n\n format!(\"{}/{}\", ASSET_PATH, \"fib.wat\")\n\n}\n\n\n", "file_path": "tests/integration/cli/tests/run.rs", "rank": 48, "score": 187236.45934636588 }, { "content": "/// This function trys to find an entry in mapdir\n\n/// translating paths into their correct value\n\npub fn get_cstr_path(ctx: &EmEnv, path: *const i8) -> Option<std::ffi::CString> {\n\n use std::collections::VecDeque;\n\n\n\n let path_str =\n\n unsafe { std::ffi::CStr::from_ptr(path as *const _).to_str().unwrap() }.to_string();\n\n let data = get_emscripten_data(ctx);\n\n let path = PathBuf::from(path_str);\n\n let mut prefix_added = false;\n\n let mut components = path.components().collect::<VecDeque<_>>();\n\n // TODO(mark): handle absolute/non-canonical/non-relative paths too (this\n\n // functionality should be shared among the abis)\n\n if components.len() == 1 {\n\n components.push_front(std::path::Component::CurDir);\n\n prefix_added = true;\n\n }\n\n let mut cumulative_path = PathBuf::new();\n\n for c in components.into_iter() {\n\n cumulative_path.push(c);\n\n if let Some(val) = data\n\n .mapped_dirs\n", "file_path": "lib/emscripten/src/utils.rs", "rank": 49, "score": 185182.0539550598 }, { "content": "#[compiler_test(traps)]\n\nfn test_trap_return(config: crate::Config) -> Result<()> {\n\n let store = config.store();\n\n let wat = r#\"\n\n (module\n\n (func $hello (import \"\" \"hello\"))\n\n (func (export \"run\") (call $hello))\n\n )\n\n \"#;\n\n\n\n let module = Module::new(&store, wat)?;\n\n let hello_type = FunctionType::new(vec![], vec![]);\n\n let hello_func = Function::new(&store, &hello_type, |_| Err(RuntimeError::new(\"test 123\")));\n\n\n\n let instance = Instance::new(\n\n &module,\n\n &imports! {\n\n \"\" => {\n\n \"hello\" => hello_func\n\n }\n\n },\n", "file_path": "tests/compilers/traps.rs", "rank": 50, "score": 184733.78692999016 }, { "content": "#[compiler_test(imports)]\n\nfn static_function_that_fails(config: crate::Config) -> Result<()> {\n\n let store = config.store();\n\n let wat = r#\"\n\n (import \"host\" \"0\" (func))\n\n\n\n (func $foo\n\n call 0\n\n )\n\n (start $foo)\n\n \"#;\n\n\n\n let module = Module::new(&store, &wat)?;\n\n\n\n let result = Instance::new(\n\n &module,\n\n &imports! {\n\n \"host\" => {\n\n \"0\" => Function::new_native(&store, || -> Result<Infallible, RuntimeError> {\n\n Err(RuntimeError::new(\"oops\"))\n\n }),\n", "file_path": "tests/compilers/imports.rs", "rank": 51, "score": 184725.5381257903 }, { "content": "/// Compute an `ir::ExternalName` for a given wasm function index.\n\npub fn get_function_name(func_index: FunctionIndex) -> ir::ExternalName {\n\n ir::ExternalName::user(0, func_index.as_u32())\n\n}\n\n\n", "file_path": "lib/compiler-cranelift/src/func_environ.rs", "rank": 52, "score": 178569.9796372464 }, { "content": "/// Get the cache dir\n\npub fn get_cache_dir() -> PathBuf {\n\n match env::var(\"WASMER_CACHE_DIR\") {\n\n Ok(dir) => {\n\n let mut path = PathBuf::from(dir);\n\n path.push(VERSION);\n\n path\n\n }\n\n Err(_) => {\n\n // We use a temporal directory for saving cache files\n\n let mut temp_dir = env::temp_dir();\n\n temp_dir.push(\"wasmer\");\n\n temp_dir.push(VERSION);\n\n temp_dir\n\n }\n\n }\n\n}\n", "file_path": "lib/cli-compiler/src/common.rs", "rank": 53, "score": 177955.78621777106 }, { "content": "/// Returns the tag as the first return value\n\n/// The data as the second return value\n\n/// and the amount of data to read from it as the third value\n\npub fn bytes_for_input_event(input_event: InputEvent) -> (u8, [u8; 8], usize) {\n\n let mut data = [0u8; 8];\n\n match input_event {\n\n InputEvent::KeyPress(k) => {\n\n data[0] = map_key_to_bytes(k);\n\n (KEY_PRESS, data, 1)\n\n }\n\n InputEvent::KeyRelease(k) => {\n\n data[0] = map_key_to_bytes(k);\n\n (KEY_RELEASE, data, 1)\n\n }\n\n InputEvent::MouseEvent(x, y, btn) => {\n\n let tag = match btn {\n\n MouseButton::Left => MOUSE_PRESS_LEFT,\n\n MouseButton::Right => MOUSE_PRESS_RIGHT,\n\n MouseButton::Middle => MOUSE_PRESS_MIDDLE,\n\n };\n\n let x_bytes = x.to_le_bytes();\n\n data[..4].clone_from_slice(&x_bytes[..4]);\n\n let y_bytes = y.to_le_bytes();\n", "file_path": "lib/wasi-experimental-io-devices/src/util.rs", "rank": 54, "score": 177849.5672999089 }, { "content": "#[allow(dead_code)] // it's used in `env/windows/mod.rs`.\n\npub fn read_string_from_wasm(memory: &Memory, offset: u32) -> String {\n\n WasmPtr::<u8>::new(offset)\n\n .read_utf8_string_with_nul(memory)\n\n .unwrap()\n\n}\n\n\n", "file_path": "lib/emscripten/src/utils.rs", "rank": 55, "score": 176478.1939180061 }, { "content": "pub fn impl_wasmer_env(input: &DeriveInput) -> TokenStream {\n\n let struct_name = &input.ident;\n\n\n\n set_dummy(quote! {\n\n impl ::wasmer::WasmerEnv for #struct_name {\n\n fn init_with_instance(&mut self, instance: &::wasmer::Instance) -> ::core::result::Result<(), ::wasmer::HostEnvInitError> {\n\n Ok(())\n\n }\n\n }\n\n });\n\n\n\n match &input.data {\n\n Data::Struct(ds) => {\n\n impl_wasmer_env_for_struct(struct_name, ds, &input.generics, &input.attrs)\n\n }\n\n _ => todo!(),\n\n }\n\n /*match input.data {\n\n Struct(ds /*DataStruct {\n\n fields: syn::Fields::Named(ref fields),\n\n ..\n\n }*/) => ,\n\n Enum(ref e) => impl_wasmer_env_for_enum(struct_name, &e.variants, &input.attrs),\n\n _ => abort_call_site!(\"Clap only supports non-tuple structs and enums\"),\n\n }*/\n\n}\n\n\n", "file_path": "lib/derive/src/env/mod.rs", "rank": 56, "score": 174964.1886022973 }, { "content": "#[compiler_test(imports)]\n\nfn dynamic_function_with_env_wasmer_env_init_works(config: crate::Config) -> Result<()> {\n\n let store = config.store();\n\n let module = get_module2(&store)?;\n\n\n\n #[allow(dead_code)]\n\n #[derive(WasmerEnv, Clone)]\n\n struct Env {\n\n #[wasmer(export)]\n\n memory: LazyInit<Memory>,\n\n }\n\n\n\n let env: Env = Env {\n\n memory: LazyInit::default(),\n\n };\n\n let instance = Instance::new(\n\n &module,\n\n &imports! {\n\n \"host\" => {\n\n \"fn\" => Function::new_with_env(&store, FunctionType::new(vec![], vec![]), env, |env, _values| {\n\n assert!(env.memory_ref().is_some());\n\n Ok(vec![])\n\n }),\n\n },\n\n },\n\n )?;\n\n let f: TypedFunction<(), ()> = instance.exports.get_native_function(\"main\")?;\n\n f.call()?;\n\n Ok(())\n\n}\n\n\n", "file_path": "tests/compilers/imports.rs", "rank": 57, "score": 172620.3206657316 }, { "content": "/// Generate C source code from some `CStatements` into a String.\n\n// TODO: add config section\n\npub fn generate_c(statements: &[CStatement]) -> String {\n\n let mut out = String::new();\n\n for statement in statements {\n\n statement.generate_c(&mut out);\n\n }\n\n out\n\n}\n\n\n\n#[cfg(test)]\n\nmod test {\n\n use super::*;\n\n\n\n #[test]\n\n fn generate_types() {\n\n macro_rules! assert_c_type {\n\n ($ctype:expr, $expected:expr) => {\n\n let mut w = String::new();\n\n let ctype = $ctype;\n\n ctype.generate_c(&mut w);\n\n assert_eq!(w, $expected);\n", "file_path": "lib/cli/src/c_gen/mod.rs", "rank": 58, "score": 172429.91196806653 }, { "content": "fn get_wasmer_dir() -> anyhow::Result<PathBuf> {\n\n Ok(PathBuf::from(\n\n env::var(\"WASMER_DIR\")\n\n .or_else(|e| {\n\n option_env!(\"WASMER_INSTALL_PREFIX\")\n\n .map(str::to_string)\n\n .ok_or(e)\n\n })\n\n .context(\"Trying to read env var `WASMER_DIR`\")?,\n\n ))\n\n}\n\n\n", "file_path": "lib/cli/src/commands/create_exe.rs", "rank": 59, "score": 172024.68482259393 }, { "content": "fn transform_err(err: BinaryReaderError) -> String {\n\n err.message().into()\n\n}\n\n\n", "file_path": "lib/api/src/js/module_info_polyfill.rs", "rank": 60, "score": 170298.11636420543 }, { "content": "fn get_wasmer_include_directory() -> anyhow::Result<PathBuf> {\n\n let mut path = get_wasmer_dir()?;\n\n path.push(\"include\");\n\n Ok(path)\n\n}\n\n\n", "file_path": "lib/cli/src/commands/create_exe.rs", "rank": 61, "score": 170138.6849443922 }, { "content": "pub fn map_key_to_bytes(key: Key) -> u8 {\n\n match key {\n\n Key::Backspace => 8,\n\n Key::Tab => 9,\n\n Key::NumPadEnter | Key::Enter => 13,\n\n Key::LeftShift | Key::RightShift => 16,\n\n Key::LeftCtrl | Key::RightCtrl => 17,\n\n Key::LeftAlt | Key::RightAlt => 18,\n\n Key::Pause => 19,\n\n Key::CapsLock => 20,\n\n Key::Escape => 27,\n\n Key::Space => 32,\n\n Key::PageUp => 33,\n\n Key::PageDown => 34,\n\n Key::End => 35,\n\n Key::Home => 36,\n\n\n\n Key::Left => 37,\n\n Key::Up => 38,\n\n Key::Right => 39,\n", "file_path": "lib/wasi-experimental-io-devices/src/util.rs", "rank": 62, "score": 166552.4703397174 }, { "content": "fn generate_header(header_file_src: &[u8]) -> anyhow::Result<()> {\n\n let header_file_path = Path::new(\"my_wasm.h\");\n\n let mut header = std::fs::OpenOptions::new()\n\n .create(true)\n\n .truncate(true)\n\n .write(true)\n\n .open(&header_file_path)?;\n\n\n\n use std::io::Write;\n\n header.write_all(header_file_src)?;\n\n\n\n Ok(())\n\n}\n\n\n", "file_path": "lib/cli/src/commands/create_exe.rs", "rank": 63, "score": 166552.4703397174 }, { "content": "#[derive(Debug)]\n\nstruct Fusion {\n\n state: i32,\n\n}\n\n\n\nimpl ModuleMiddleware for FusionGen {\n\n fn generate_function_middleware(&self, _: LocalFunctionIndex) -> Box<dyn FunctionMiddleware> {\n\n Box::new(Fusion { state: 0 })\n\n }\n\n}\n\n\n\nimpl FunctionMiddleware for Fusion {\n\n fn feed<'a>(\n\n &mut self,\n\n operator: Operator<'a>,\n\n state: &mut MiddlewareReaderState<'a>,\n\n ) -> Result<(), MiddlewareError> {\n\n match (operator, self.state) {\n\n (Operator::I32Add, 0) => {\n\n self.state = 1;\n\n }\n", "file_path": "tests/compilers/middlewares.rs", "rank": 64, "score": 164614.82465442788 }, { "content": "// allows us to handle parens more cleanly\n\nstruct WasmerAttrInner(WasmerAttr);\n\n\n\nimpl Parse for WasmerAttrInner {\n\n fn parse(input: ParseStream<'_>) -> syn::Result<Self> {\n\n let ident: Ident = input.parse()?;\n\n let ident_str = ident.to_string();\n\n let span = ident.span();\n\n let out = match ident_str.as_str() {\n\n \"export\" => {\n\n let export_expr;\n\n let (name, optional, aliases) = if input.peek(token::Paren) {\n\n let _: token::Paren = parenthesized!(export_expr in input);\n\n\n\n let expr = export_expr.parse::<ExportExpr>()?;\n\n (expr.name, expr.optional, expr.aliases)\n\n } else {\n\n (None, false, vec![])\n\n };\n\n\n\n WasmerAttr::Export {\n", "file_path": "lib/derive/src/env/parse.rs", "rank": 65, "score": 163340.82897542353 }, { "content": "#[cfg(feature = \"cranelift\")]\n\nfn maybe_instantiate_cranelift(wasm_bytes: &[u8]) -> Result<Option<Instance>> {\n\n let mut compiler = Cranelift::default();\n\n compiler.canonicalize_nans(true);\n\n compiler.enable_verifier();\n\n let store = Store::new(&Universal::new(compiler).engine());\n\n let module = Module::new(&store, &wasm_bytes)?;\n\n let instance = Instance::new(&module, &imports! {})?;\n\n Ok(Some(instance))\n\n}\n\n\n", "file_path": "fuzz/fuzz_targets/equivalence_universal.rs", "rank": 66, "score": 163132.83507312438 }, { "content": "#[cfg(feature = \"llvm\")]\n\nfn maybe_instantiate_llvm(wasm_bytes: &[u8]) -> Result<Option<Instance>> {\n\n let mut compiler = LLVM::default();\n\n compiler.canonicalize_nans(true);\n\n compiler.enable_verifier();\n\n let store = Store::new(&Universal::new(compiler).engine());\n\n let module = Module::new(&store, &wasm_bytes)?;\n\n let instance = Instance::new(&module, &imports! {})?;\n\n Ok(Some(instance))\n\n}\n\n\n", "file_path": "fuzz/fuzz_targets/equivalence_universal.rs", "rank": 67, "score": 163132.83507312438 }, { "content": "pub fn dirent_to_le_bytes(ent: &__wasi_dirent_t) -> Vec<u8> {\n\n let out: Vec<u8> = std::iter::empty()\n\n .chain(ent.d_next.to_le_bytes())\n\n .chain(ent.d_ino.to_le_bytes())\n\n .chain(ent.d_namlen.to_le_bytes())\n\n .chain(u32::from(ent.d_type).to_le_bytes())\n\n .collect();\n\n\n\n assert_eq!(out.len(), mem::size_of::<__wasi_dirent_t>());\n\n out\n\n}\n\n\n\n#[cfg(test)]\n\nmod tests {\n\n use super::{__wasi_dirent_t, dirent_to_le_bytes};\n\n\n\n #[test]\n\n fn test_dirent_to_le_bytes() {\n\n let s = __wasi_dirent_t {\n\n d_next: 0x0123456789abcdef,\n", "file_path": "lib/wasi-types/src/directory.rs", "rank": 68, "score": 163132.83507312438 }, { "content": "#[cfg(feature = \"singlepass\")]\n\nfn maybe_instantiate_singlepass(wasm_bytes: &[u8]) -> Result<Option<Instance>> {\n\n let compiler = Singlepass::default();\n\n let store = Store::new(&Universal::new(compiler).engine());\n\n let module = Module::new(&store, &wasm_bytes);\n\n let module = match module {\n\n Ok(m) => m,\n\n Err(e) => {\n\n let error_message = format!(\"{}\", e);\n\n if error_message.contains(\"Validation error: invalid result arity: func type returns multiple values\") || error_message.contains(\"Validation error: blocks, loops, and ifs may only produce a resulttype when multi-value is not enabled\") || error_message.contains(\"multi-value returns not yet implemented\") {\n\n return Ok(None);\n\n }\n\n return Err(e.into());\n\n }\n\n };\n\n let instance = Instance::new(&module, &imports! {})?;\n\n Ok(Some(instance))\n\n}\n\n\n", "file_path": "fuzz/fuzz_targets/equivalence_universal.rs", "rank": 69, "score": 163132.83507312438 }, { "content": "pub fn run_basic_static_function(store: &Store, compiler_name: &str, c: &mut Criterion) {\n\n let module = Module::new(store, BASIC_WAT).unwrap();\n\n let import_object = imports! {\n\n \"env\" => {\n\n \"multiply\" => Function::new_native(store, |a: i32, b: i32| a * b),\n\n },\n\n };\n\n let instance = Instance::new(&module, &import_object).unwrap();\n\n let dyn_f: &Function = instance.exports.get(\"add\").unwrap();\n\n let f: TypedFunction<(i32, i32), i32> = dyn_f.native().unwrap();\n\n\n\n c.bench_function(&format!(\"basic static func {}\", compiler_name), |b| {\n\n b.iter(|| {\n\n let result = black_box(f.call(4, 6).unwrap());\n\n assert_eq!(result, 10);\n\n })\n\n });\n\n\n\n let dyn_f_many: &Function = instance.exports.get(\"add20\").unwrap();\n\n let f_many: TypedFunction<\n", "file_path": "benches/static_and_dynamic_functions.rs", "rank": 70, "score": 162123.32202017223 }, { "content": "pub fn run_basic_dynamic_function(store: &Store, compiler_name: &str, c: &mut Criterion) {\n\n let module = Module::new(store, BASIC_WAT).unwrap();\n\n let import_object = imports! {\n\n \"env\" => {\n\n \"multiply\" => Function::new_native(store, |a: i32, b: i32| a * b),\n\n },\n\n };\n\n let instance = Instance::new(&module, &import_object).unwrap();\n\n\n\n let dyn_f: &Function = instance.exports.get(\"add\").unwrap();\n\n c.bench_function(&format!(\"basic dynfunc {}\", compiler_name), |b| {\n\n b.iter(|| {\n\n let dyn_result = black_box(dyn_f.call(&[Val::I32(4), Val::I32(6)]).unwrap());\n\n assert_eq!(dyn_result[0], Val::I32(10));\n\n })\n\n });\n\n\n\n let dyn_f_many: &Function = instance.exports.get(\"add20\").unwrap();\n\n c.bench_function(\n\n &format!(\"basic dynfunc with many args {}\", compiler_name),\n", "file_path": "benches/static_and_dynamic_functions.rs", "rank": 71, "score": 162123.32202017223 }, { "content": "#[derive(Debug)]\n\nstruct FusionGen;\n\n\n", "file_path": "tests/compilers/middlewares.rs", "rank": 72, "score": 161871.3964660075 }, { "content": "#[derive(Debug)]\n\nstruct Add2Mul {\n\n value_off: i32,\n\n}\n\n\n\nimpl ModuleMiddleware for Add2MulGen {\n\n fn generate_function_middleware(&self, _: LocalFunctionIndex) -> Box<dyn FunctionMiddleware> {\n\n Box::new(Add2Mul {\n\n value_off: self.value_off,\n\n })\n\n }\n\n}\n\n\n\nimpl FunctionMiddleware for Add2Mul {\n\n fn feed<'a>(\n\n &mut self,\n\n operator: Operator<'a>,\n\n state: &mut MiddlewareReaderState<'a>,\n\n ) -> Result<(), MiddlewareError> {\n\n match operator {\n\n Operator::I32Add => {\n", "file_path": "tests/compilers/middlewares.rs", "rank": 73, "score": 161871.3964660075 }, { "content": "/// Compile the C code.\n\nfn run_c_compile(\n\n current_dir: &Path,\n\n path_to_c_src: &Path,\n\n output_name: &Path,\n\n) -> anyhow::Result<()> {\n\n #[cfg(not(windows))]\n\n let c_compiler = \"cc\";\n\n #[cfg(windows)]\n\n let c_compiler = \"clang++\";\n\n\n\n let output = Command::new(c_compiler)\n\n .current_dir(current_dir)\n\n .arg(\"-O2\")\n\n .arg(\"-c\")\n\n .arg(path_to_c_src)\n\n .arg(\"-I\")\n\n .arg(WASMER_INCLUDE_PATH)\n\n .arg(\"-o\")\n\n .arg(output_name)\n\n .output()?;\n", "file_path": "tests/integration/cli/tests/compile.rs", "rank": 74, "score": 160422.41216019256 }, { "content": "fn to_serialize_error(err: impl std::error::Error) -> SerializeError {\n\n SerializeError::Generic(format!(\"{}\", err))\n\n}\n\n\n\nimpl SerializableModule {\n\n /// Serialize a Module into bytes\n\n /// The bytes will have the following format:\n\n /// RKYV serialization (any length) + POS (8 bytes)\n\n pub fn serialize(&self) -> Result<Vec<u8>, SerializeError> {\n\n let mut serializer = AllocSerializer::<4096>::default();\n\n let pos = serializer\n\n .serialize_value(self)\n\n .map_err(to_serialize_error)? as u64;\n\n let mut serialized_data = serializer.into_serializer().into_inner();\n\n serialized_data.extend_from_slice(&pos.to_le_bytes());\n\n Ok(serialized_data.to_vec())\n\n }\n\n\n\n /// Deserialize a Module from a slice.\n\n /// The slice must have the following format:\n", "file_path": "lib/engine-universal/src/serialize.rs", "rank": 75, "score": 160049.92302868125 }, { "content": "fn to_serialize_error(err: impl std::error::Error) -> SerializeError {\n\n SerializeError::Generic(format!(\"{}\", err))\n\n}\n\n\n\nimpl SerializableModule {\n\n /// Serialize a Module into bytes\n\n /// The bytes will have the following format:\n\n /// RKYV serialization (any length) + POS (8 bytes)\n\n pub fn serialize(&self) -> Result<Vec<u8>, SerializeError> {\n\n let mut serializer = AllocSerializer::<4096>::default();\n\n let pos = serializer\n\n .serialize_value(self)\n\n .map_err(to_serialize_error)? as u64;\n\n let mut serialized_data = serializer.into_serializer().into_inner();\n\n serialized_data.extend_from_slice(&pos.to_le_bytes());\n\n Ok(serialized_data.to_vec())\n\n }\n\n\n\n /// Deserialize a Module from a slice.\n\n /// The slice must have the following format:\n", "file_path": "lib/universal-artifact/src/serialize.rs", "rank": 76, "score": 160049.92302868125 }, { "content": "// #[no_mangle]\n\n/// emscripten: _getenv // (name: *const char) -> *const c_char;\n\npub fn _getenv(ctx: &EmEnv, name: u32) -> u32 {\n\n debug!(\"emscripten::_getenv\");\n\n let memory = ctx.memory(0);\n\n let name_string = read_string_from_wasm(&memory, name);\n\n debug!(\"=> name({:?})\", name_string);\n\n let c_str = unsafe { getenv(name_string.as_ptr() as *const libc::c_char) };\n\n if c_str.is_null() {\n\n return 0;\n\n }\n\n unsafe { copy_cstr_into_wasm(ctx, c_str as *const c_char) }\n\n}\n\n\n", "file_path": "lib/emscripten/src/env/windows/mod.rs", "rank": 77, "score": 160034.36058265637 }, { "content": "// #[no_mangle]\n\n/// emscripten: _getenv // (name: *const char) -> *const c_char;\n\npub fn _getenv(ctx: &EmEnv, name: i32) -> u32 {\n\n debug!(\"emscripten::_getenv\");\n\n\n\n let name_addr = emscripten_memory_pointer!(ctx.memory(0), name) as *const c_char;\n\n\n\n debug!(\"=> name({:?})\", unsafe { CStr::from_ptr(name_addr) });\n\n\n\n let c_str = unsafe { getenv(name_addr) };\n\n if c_str.is_null() {\n\n return 0;\n\n }\n\n\n\n unsafe { copy_cstr_into_wasm(ctx, c_str) }\n\n}\n\n\n", "file_path": "lib/emscripten/src/env/unix/mod.rs", "rank": 78, "score": 160034.36058265637 }, { "content": "/// emscripten: _putenv // (name: *const char);\n\npub fn _putenv(ctx: &EmEnv, name: c_int) -> c_int {\n\n debug!(\"emscripten::_putenv\");\n\n let memory = ctx.memory(0);\n\n let name_addr = emscripten_memory_pointer!(&memory, name) as *const c_char;\n\n debug!(\"=> name({:?})\", unsafe {\n\n std::ffi::CStr::from_ptr(name_addr)\n\n });\n\n unsafe { putenv(name_addr) }\n\n}\n\n\n", "file_path": "lib/emscripten/src/env/windows/mod.rs", "rank": 79, "score": 160033.64552264256 }, { "content": "/// emscripten: _putenv // (name: *const char);\n\npub fn _putenv(ctx: &EmEnv, name: c_int) -> c_int {\n\n debug!(\"emscripten::_putenv\");\n\n\n\n let name_addr = emscripten_memory_pointer!(ctx.memory(0), name) as *const c_char;\n\n\n\n debug!(\"=> name({:?})\", unsafe { CStr::from_ptr(name_addr) });\n\n\n\n unsafe { putenv(name_addr as _) }\n\n}\n\n\n", "file_path": "lib/emscripten/src/env/unix/mod.rs", "rank": 80, "score": 160033.64552264256 }, { "content": "/// emscripten: _unsetenv // (name: *const char);\n\npub fn _unsetenv(ctx: &EmEnv, name: u32) -> c_int {\n\n debug!(\"emscripten::_unsetenv\");\n\n let memory = ctx.memory(0);\n\n let name = read_string_from_wasm(&memory, name);\n\n // no unsetenv on windows, so use putenv with an empty value\n\n let unsetenv_string = format!(\"{}=\", name);\n\n let unsetenv_cstring = CString::new(unsetenv_string).unwrap();\n\n let unsetenv_raw_ptr = unsetenv_cstring.as_ptr();\n\n debug!(\"=> name({:?})\", name);\n\n unsafe { putenv(unsetenv_raw_ptr) }\n\n}\n\n\n", "file_path": "lib/emscripten/src/env/windows/mod.rs", "rank": 81, "score": 160033.64552264256 }, { "content": "/// emscripten: _unsetenv // (name: *const char);\n\npub fn _unsetenv(ctx: &EmEnv, name: c_int) -> c_int {\n\n debug!(\"emscripten::_unsetenv\");\n\n\n\n let name_addr = emscripten_memory_pointer!(ctx.memory(0), name) as *const c_char;\n\n\n\n debug!(\"=> name({:?})\", unsafe { CStr::from_ptr(name_addr) });\n\n\n\n unsafe { unsetenv(name_addr) }\n\n}\n\n\n", "file_path": "lib/emscripten/src/env/unix/mod.rs", "rank": 82, "score": 160033.64552264256 }, { "content": "#[cfg(target_os = \"wasi\")]\n\nfn read(fd: u32, iovs: &[&mut [u8]]) -> u32 {\n\n let mut nread = 0;\n\n let mut processed_iovs = vec![];\n\n\n\n for iov in iovs {\n\n processed_iovs.push(WasiIovec {\n\n buf: iov.as_ptr() as usize as u32,\n\n buf_len: iov.len() as u32,\n\n })\n\n }\n\n\n\n unsafe {\n\n fd_read(\n\n fd,\n\n processed_iovs.as_ptr() as usize as u32,\n\n processed_iovs.len() as u32,\n\n &mut nread as *mut u32 as usize as u32,\n\n );\n\n }\n\n nread\n\n}\n\n\n", "file_path": "tests/wasi-wast/wasi/tests/fd_read.rs", "rank": 83, "score": 160025.0183144678 }, { "content": "pub fn _sysconf(_ctx: &EmEnv, name: c_int) -> i32 {\n\n debug!(\"emscripten::_sysconf {}\", name);\n\n // TODO: Implement like emscripten expects regarding memory/page size\n\n unsafe { sysconf(name) as i32 } // TODO review i64\n\n}\n\n\n", "file_path": "lib/emscripten/src/env/unix/mod.rs", "rank": 84, "score": 160020.57798431345 }, { "content": "pub fn _sysconf(_ctx: &EmEnv, name: c_int) -> c_long {\n\n debug!(\"emscripten::_sysconf {}\", name);\n\n #[cfg(not(feature = \"debug\"))]\n\n let _ = name;\n\n // stub because sysconf is not valid on windows\n\n 0\n\n}\n\n\n", "file_path": "lib/emscripten/src/env/windows/mod.rs", "rank": 85, "score": 160020.57798431345 }, { "content": "#[derive(Debug)]\n\nstruct Add2MulGen {\n\n value_off: i32,\n\n}\n\n\n", "file_path": "tests/compilers/middlewares.rs", "rank": 86, "score": 159254.8538110272 }, { "content": "/// Suggest function exports for the module\n\npub fn suggest_function_exports(module: &Module, query: &str) -> Vec<String> {\n\n let mut function_names = module\n\n .exports()\n\n .functions()\n\n .map(|extern_fn| {\n\n let name = extern_fn.name();\n\n name.to_string()\n\n })\n\n .collect::<Vec<_>>();\n\n function_names.sort_by_key(|name| damerau_levenshtein(name, query));\n\n function_names\n\n}\n", "file_path": "lib/cli/src/suggestions.rs", "rank": 87, "score": 157157.73561439742 }, { "content": "#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\nstruct AssertReturn {\n\n return_value: i64,\n\n}\n\n\n\nimpl<'a> Parse<'a> for AssertReturn {\n\n fn parse(parser: Parser<'a>) -> parser::Result<Self> {\n\n parser.parse::<wasi_kw::assert_return>()?;\n\n let return_value = parser.parens(|p| {\n\n p.parse::<wasi_kw::fake_i64_const>()?;\n\n p.parse::<i64>()\n\n })?;\n\n Ok(Self { return_value })\n\n }\n\n}\n\n\n", "file_path": "tests/lib/wast/src/wasi_wast.rs", "rank": 88, "score": 157048.80285964106 }, { "content": "#[derive(Debug, Clone)]\n\nstruct OutputCapturerer {\n\n output: Arc<Mutex<mpsc::Sender<Vec<u8>>>>,\n\n}\n\n\n\nimpl OutputCapturerer {\n\n fn new() -> (Self, mpsc::Receiver<Vec<u8>>) {\n\n let (tx, rx) = mpsc::channel();\n\n (\n\n Self {\n\n output: Arc::new(Mutex::new(tx)),\n\n },\n\n rx,\n\n )\n\n }\n\n}\n\n\n\nimpl Read for OutputCapturerer {\n\n fn read(&mut self, _buf: &mut [u8]) -> io::Result<usize> {\n\n Err(io::Error::new(\n\n io::ErrorKind::Other,\n", "file_path": "tests/lib/wast/src/wasi_wast.rs", "rank": 89, "score": 157047.3247608473 }, { "content": "#[allow(clippy::type_complexity)]\n\nstruct TrapHandlerContext {\n\n inner: *const u8,\n\n handle_trap: fn(\n\n *const u8,\n\n usize,\n\n usize,\n\n Option<usize>,\n\n Option<TrapCode>,\n\n &mut dyn FnMut(TrapHandlerRegs),\n\n ) -> bool,\n\n custom_trap: *const dyn TrapHandler,\n\n}\n", "file_path": "lib/vm/src/trap/traphandlers.rs", "rank": 90, "score": 156909.08167135718 }, { "content": "#[derive(Copy, Clone, Debug)]\n\nstruct FloatValue {\n\n /// Do we need to canonicalize the value before its bit pattern is next observed? If so, how?\n\n canonicalization: Option<CanonicalizeType>,\n\n\n\n /// Corresponding depth in the main value stack.\n\n depth: usize,\n\n}\n\n\n\nimpl FloatValue {\n\n fn new(depth: usize) -> Self {\n\n FloatValue {\n\n canonicalization: None,\n\n depth,\n\n }\n\n }\n\n\n\n fn cncl_f32(depth: usize) -> Self {\n\n FloatValue {\n\n canonicalization: Some(CanonicalizeType::F32),\n\n depth,\n", "file_path": "lib/compiler-singlepass/src/codegen.rs", "rank": 91, "score": 156756.592835487 }, { "content": "struct FuncTypeImpl : ExternTypeImpl {\n\n ownvec<ValType> params;\n\n ownvec<ValType> results;\n\n\n\n FuncTypeImpl(ownvec<ValType>& params, ownvec<ValType>& results) :\n\n ExternTypeImpl(ExternKind::FUNC),\n\n params(std::move(params)), results(std::move(results))\n\n {\n\n stats.make(Stats::FUNCTYPE, this);\n\n }\n\n\n\n ~FuncTypeImpl() {\n\n stats.free(Stats::FUNCTYPE, this);\n\n }\n\n};\n\n\n\ntemplate<> struct implement<FuncType> { using type = FuncTypeImpl; };\n\n\n\n\n\nFuncType::~FuncType() {}\n", "file_path": "lib/c-api/tests/wasm-c-api/src/wasm-v8.cc", "rank": 92, "score": 156204.54573552197 }, { "content": "struct GlobalTypeImpl : ExternTypeImpl {\n\n own<ValType> content;\n\n Mutability mutability;\n\n\n\n GlobalTypeImpl(own<ValType>& content, Mutability mutability) :\n\n ExternTypeImpl(ExternKind::GLOBAL),\n\n content(std::move(content)), mutability(mutability)\n\n {\n\n stats.make(Stats::GLOBALTYPE, this);\n\n }\n\n\n\n ~GlobalTypeImpl() {\n\n stats.free(Stats::GLOBALTYPE, this);\n\n }\n\n};\n\n\n\ntemplate<> struct implement<GlobalType> { using type = GlobalTypeImpl; };\n\n\n\n\n\nGlobalType::~GlobalType() {}\n", "file_path": "lib/c-api/tests/wasm-c-api/src/wasm-v8.cc", "rank": 93, "score": 156204.54573552197 }, { "content": "struct MemoryTypeImpl : ExternTypeImpl {\n\n Limits limits;\n\n\n\n MemoryTypeImpl(Limits limits) :\n\n ExternTypeImpl(ExternKind::MEMORY), limits(limits)\n\n {\n\n stats.make(Stats::MEMORYTYPE, this);\n\n }\n\n\n\n ~MemoryTypeImpl() {\n\n stats.free(Stats::MEMORYTYPE, this);\n\n }\n\n};\n\n\n\ntemplate<> struct implement<MemoryType> { using type = MemoryTypeImpl; };\n\n\n\n\n\nMemoryType::~MemoryType() {}\n\n\n\nauto MemoryType::make(Limits limits) -> own<MemoryType> {\n", "file_path": "lib/c-api/tests/wasm-c-api/src/wasm-v8.cc", "rank": 94, "score": 156204.54573552197 }, { "content": "struct TableTypeImpl : ExternTypeImpl {\n\n own<ValType> element;\n\n Limits limits;\n\n\n\n TableTypeImpl(own<ValType>& element, Limits limits) :\n\n ExternTypeImpl(ExternKind::TABLE), element(std::move(element)), limits(limits)\n\n {\n\n stats.make(Stats::TABLETYPE, this);\n\n }\n\n\n\n ~TableTypeImpl() {\n\n stats.free(Stats::TABLETYPE, this);\n\n }\n\n};\n\n\n\ntemplate<> struct implement<TableType> { using type = TableTypeImpl; };\n\n\n\n\n\nTableType::~TableType() {}\n\n\n", "file_path": "lib/c-api/tests/wasm-c-api/src/wasm-v8.cc", "rank": 95, "score": 156204.54573552197 }, { "content": "struct SpecialLabelSet {\n\n integer_division_by_zero: Label,\n\n integer_overflow: Label,\n\n heap_access_oob: Label,\n\n table_access_oob: Label,\n\n indirect_call_null: Label,\n\n bad_signature: Label,\n\n}\n\n\n\n/// Metadata about a floating-point value.\n", "file_path": "lib/compiler-singlepass/src/codegen.rs", "rank": 97, "score": 154368.77037199502 }, { "content": "#[derive(Debug, Clone, PartialEq, Eq, Hash)]\n\nstruct AssertStdout<'a> {\n\n expected: &'a str,\n\n}\n\n\n\nimpl<'a> Parse<'a> for AssertStdout<'a> {\n\n fn parse(parser: Parser<'a>) -> parser::Result<Self> {\n\n parser.parse::<wasi_kw::assert_stdout>()?;\n\n Ok(Self {\n\n expected: parser.parse()?,\n\n })\n\n }\n\n}\n\n\n", "file_path": "tests/lib/wast/src/wasi_wast.rs", "rank": 98, "score": 154171.34823294153 }, { "content": "struct Writer<'a> {\n\n buf: &'a mut [u8],\n\n offset: usize,\n\n}\n\n\n\nimpl<'a> Writer<'a> {\n\n pub fn new(buf: &'a mut [u8]) -> Self {\n\n Self { buf, offset: 0 }\n\n }\n\n\n\n fn write_u8(&mut self, v: u8) {\n\n self.buf[self.offset] = v;\n\n self.offset += 1;\n\n }\n\n\n\n fn write_u16_le(&mut self, v: u16) {\n\n self.buf[self.offset..(self.offset + 2)].copy_from_slice(&v.to_le_bytes());\n\n self.offset += 2;\n\n }\n\n\n", "file_path": "lib/compiler-singlepass/src/unwind_winx64.rs", "rank": 99, "score": 153879.1382087875 } ]
Rust
src/scd30/mod.rs
joemclo/knurling-sessions-cabon-sensor
893ffccb58166f273c02f3951bdce793f2fbd5fd
use crc_all::Crc; use embedded_hal::blocking::i2c::{Read, Write}; pub struct SensorData { pub co2: f32, pub temperature: f32, pub humidity: f32, } const DEFAULT_ADDRESS: u8 = 0x61; enum Command { StartContinuousMeasurement = 0x0010, StopContinuousMeasurement = 0x0104, MeasurementInterval = 0x4600, GetDataReadyStatus = 0x0202, ReadMeasurement = 0x0300, ASC = 0x5306, TemperatureOffset = 0x5403, ReadFirmwareVersion = 0xd100, SoftReset = 0xd304, } pub struct SCD30<T>(T); impl<T, E> SCD30<T> where T: Read<Error = E> + Write<Error = E>, { pub fn init(i2c2: T) -> Self { SCD30(i2c2) } pub fn read_firmware_version(&mut self) -> Result<[u8; 2], E> { let mut rd_buffer = [0u8; 2]; self.0.write( DEFAULT_ADDRESS, &(Command::ReadFirmwareVersion as u16).to_be_bytes(), )?; self.0.read(DEFAULT_ADDRESS, &mut rd_buffer)?; let major = u8::from_be(rd_buffer[0]); let minor = u8::from_be(rd_buffer[1]); Ok([major, minor]) } pub fn soft_reset(&mut self) -> Result<(), E> { self.0 .write(DEFAULT_ADDRESS, &(Command::SoftReset as u16).to_be_bytes())?; Ok(()) } fn get_crc(&mut self) -> Crc<u8> { let crc = Crc::<u8>::new(0x31, 8, 0xFF, 0x00, false); crc } pub fn set_temperature_offset(&mut self, temperature_offset: u16) -> Result<(), E> { let temperature_offset_bytes: &[u8; 2] = &temperature_offset.to_be_bytes(); let command: [u8; 2] = (Command::TemperatureOffset as u16).to_be_bytes(); let mut command: [u8; 5] = [ command[0], command[1], temperature_offset_bytes[0], temperature_offset_bytes[1], 0x00, ]; let mut crc = self.get_crc(); crc.update(temperature_offset_bytes); command[4] = crc.finish(); self.0.write(DEFAULT_ADDRESS, &command)?; Ok(()) } pub fn read_temperature_offset(&mut self) -> Result<u16, E> { let mut rd_buffer = [0u8; 3]; self.0.write( DEFAULT_ADDRESS, &(Command::TemperatureOffset as u16).to_be_bytes(), )?; self.0.read(DEFAULT_ADDRESS, &mut rd_buffer)?; Ok(u16::from_be_bytes([rd_buffer[0], rd_buffer[1]])) } pub fn start_continuous_measurement(&mut self, pressure: &u16) -> Result<(), E> { let argument_bytes = &pressure.to_be_bytes(); let mut crc = self.get_crc(); crc.update(argument_bytes); let command = (Command::StartContinuousMeasurement as u16).to_be_bytes(); let command: [u8; 5] = [ command[0], command[1], argument_bytes[0], argument_bytes[1], crc.finish(), ]; self.0.write(DEFAULT_ADDRESS, &command)?; Ok(()) } pub fn stop_continuous_measurement(&mut self) -> Result<(), E> { self.0.write( DEFAULT_ADDRESS, &(Command::StopContinuousMeasurement as u16).to_be_bytes(), )?; Ok(()) } pub fn set_measurement_interval(&mut self, interval: u16) -> Result<(), E> { let argument_bytes = &interval.to_be_bytes(); let mut crc = self.get_crc(); crc.update(argument_bytes); let command = (Command::MeasurementInterval as u16).to_be_bytes(); let command: [u8; 5] = [ command[0], command[1], argument_bytes[0], argument_bytes[1], crc.finish(), ]; self.0.write(DEFAULT_ADDRESS, &command)?; Ok(()) } pub fn get_measurement_interval(&mut self) -> Result<u16, E> { let mut rd_buffer = [0u8; 3]; self.0.write( DEFAULT_ADDRESS, &(Command::MeasurementInterval as u16).to_be_bytes(), )?; self.0.read(DEFAULT_ADDRESS, &mut rd_buffer)?; Ok(u16::from_be_bytes([rd_buffer[0], rd_buffer[1]])) } pub fn data_ready(&mut self) -> Result<bool, E> { let mut rd_buffer = [0u8; 3]; self.0.write( DEFAULT_ADDRESS, &(Command::GetDataReadyStatus as u16).to_be_bytes(), )?; self.0.read(DEFAULT_ADDRESS, &mut rd_buffer)?; Ok(u16::from_be_bytes([rd_buffer[0], rd_buffer[1]]) == 1) } pub fn read_measurement(&mut self) -> Result<SensorData, E> { let mut rd_buffer = [0u8; 18]; self.0.write( DEFAULT_ADDRESS, &(Command::ReadMeasurement as u16).to_be_bytes(), )?; self.0.read(DEFAULT_ADDRESS, &mut rd_buffer)?; let sensor_data = SensorData { co2: f32::from_be_bytes([rd_buffer[0], rd_buffer[1], rd_buffer[3], rd_buffer[4]]), temperature: f32::from_be_bytes([ rd_buffer[6], rd_buffer[7], rd_buffer[9], rd_buffer[10], ]), humidity: f32::from_be_bytes([ rd_buffer[12], rd_buffer[13], rd_buffer[15], rd_buffer[16], ]), }; Ok(sensor_data) } pub fn activate_auto_self_calibration(&mut self) -> Result<bool, E> { let argument_bytes: [u8; 2] = [0x00, 0x01]; let mut crc = self.get_crc(); crc.update(&argument_bytes); let command = (Command::ASC as u16).to_be_bytes(); let command: [u8; 5] = [ command[0], command[1], argument_bytes[0], argument_bytes[1], crc.finish(), ]; self.0.write(DEFAULT_ADDRESS, &command)?; self.0 .write(DEFAULT_ADDRESS, &(Command::ASC as u16).to_be_bytes())?; let mut rd_buffer = [0u8; 3]; self.0.read(DEFAULT_ADDRESS, &mut rd_buffer)?; Ok(u16::from_be_bytes([rd_buffer[0], rd_buffer[1]]) == 1) } }
use crc_all::Crc; use embedded_hal::blocking::i2c::{Read, Write}; pub struct SensorData { pub co2: f32, pub temperature: f32, pub humidity: f32, } const DEFAULT_ADDRESS: u8 = 0x61; enum Command { StartContinuousMeasurement = 0x0010, StopContinuousMeasurement = 0x0104, MeasurementInterval = 0x4600, GetDataReadyStatus = 0x0202, ReadMeasurement = 0x0300, ASC = 0x5306, TemperatureOffset = 0x5403, ReadFirmwareVersion = 0xd100, SoftReset = 0xd304, } pub struct SCD30<T>(T); impl<T, E> SCD30<T> where T: Read<Error = E> + Write<Error = E>, { pub fn init(i2c2: T) -> Self { SCD30(i2c2) } pub fn read_firmware_version(&mut self) -> Result<[u8; 2], E> { let mut rd_buffer = [0u8; 2]; self.0.write( DEFAULT_ADDRESS, &(Command::ReadFirmwareVersion as u16).to_be_bytes(), )?; self.0.read(DEFAULT_ADDRESS, &mut rd_buffer)?; let major = u8::from_be(rd_buffer[0]); let minor = u8::from_be(rd_buffer[1]); Ok([major, minor]) } pub fn soft_reset(&mut self) -> Result<(), E> { self.0 .write(DEFAULT_ADDRESS, &(Command::SoftReset as u16).to_be_bytes())?; Ok(()) } fn get_crc(&mut self) -> Crc<u8> { let crc = Crc::<u8>::new(0x31, 8, 0xFF, 0x00, false); crc } pub fn set_temperature_offset(&mut self, temperature_offset: u16) -> Result<(), E> { let temperature_offset_bytes: &[u8; 2] = &temperature_offset.to_be_bytes(); let command: [u8; 2] = (Command::TemperatureOffset as u16).to_be_bytes(); let mut command: [u8; 5] = [ command[0], command[1], temperature_offset_bytes[0], temperature_offset_bytes[1], 0x00, ]; let mut crc = self.get_crc(); crc.update(temperature_offset_bytes); command[4] = crc.finish(); self.0.write(DEFAULT_ADDRESS, &command)?; Ok(()) } pub fn read_temperature_offset(&mut self) -> Result<u16, E> { let mut rd_buffer = [0u8; 3]; self.0.write( DEFAULT_ADDRESS, &(Command::TemperatureOffset as u16).to_be_bytes(), )?; self.0.read(DEFAULT_ADDRESS, &mut rd_buffer)?; Ok(u16::from_be_bytes([rd_buffer[0], rd_buffer[1]])) } pub fn start_continuous_measurement(&mut self, pressure: &u16) -> Result<(), E> { let argument_bytes = &pressure.to_be_bytes(); let mut crc = self.get_crc(); crc.update(argument_bytes); let command = (Command::StartContinuousMeasurement as u16).to_be_bytes(); let command: [u8; 5] = [ command[0], command[1], argument_bytes[0], argument_bytes[1], crc.finish(), ]; self.0.write(DEFAULT_ADDRESS, &command)?; Ok(()) } pub fn stop_continuous_measurement(&mut self) -> Result<(), E> { self.0.write( DEFAULT_ADDRESS, &(Command::StopContinuousMeasurement as u16).to_be_bytes(), )?; Ok(()) } pub fn set_measurement_interval(&mut self, interval: u16) -> Result<(), E> { let argument_bytes = &interval.to_be_bytes(); let mut crc = self.get_crc(); crc.update(argument_bytes); let command = (Command::MeasurementInterval as u16).to_be_bytes(); let command: [u8; 5] = [ command[0], command[1], argument_bytes[0], argument_bytes[1], crc.finish(), ]; self.0.write(DEFAULT_ADDRESS, &command)?; Ok(()) } pub fn get_measurement_interval(&mut self) -> Result<u16, E> { let mut rd_buffer = [0u8; 3]; self.0.write( DEFAULT_ADDRESS, &(Command::MeasurementInterval as u16).to_be_bytes(), )?; self.0.read(DEFAULT_ADDRESS, &mut rd_buffer)?; Ok(u16::from_be_bytes([rd_buffer[0], rd_buffer[1]])) } pub fn data_ready(&mut self) -> Result<bool, E> { let mut rd_buffer = [0u8; 3]; self.0.write( DEFAULT_ADDRESS, &(Command::GetDataReadyStatus as u16).to_be_bytes(), )?; self.0.read(DEFAULT_ADDRESS, &mut rd_buffer)?; Ok(u16::from_be_bytes([rd_buffer[0], rd_buffer[1]]) == 1) } pub fn read_measurement(&mut self) -> Result<SensorData, E> { let mut rd_buffer = [0u8; 18]; self.0.write( DEFAULT_ADDRESS, &(Command::ReadMeasurement as u16).to_be_bytes(), )?; self.0.read(DEFAULT_ADDRESS, &mut rd_buffer)?; let sensor_data
_bytes([ rd_buffer[6], rd_buffer[7], rd_buffer[9], rd_buffer[10], ]), humidity: f32::from_be_bytes([ rd_buffer[12], rd_buffer[13], rd_buffer[15], rd_buffer[16], ]), }; Ok(sensor_data) } pub fn activate_auto_self_calibration(&mut self) -> Result<bool, E> { let argument_bytes: [u8; 2] = [0x00, 0x01]; let mut crc = self.get_crc(); crc.update(&argument_bytes); let command = (Command::ASC as u16).to_be_bytes(); let command: [u8; 5] = [ command[0], command[1], argument_bytes[0], argument_bytes[1], crc.finish(), ]; self.0.write(DEFAULT_ADDRESS, &command)?; self.0 .write(DEFAULT_ADDRESS, &(Command::ASC as u16).to_be_bytes())?; let mut rd_buffer = [0u8; 3]; self.0.read(DEFAULT_ADDRESS, &mut rd_buffer)?; Ok(u16::from_be_bytes([rd_buffer[0], rd_buffer[1]]) == 1) } }
= SensorData { co2: f32::from_be_bytes([rd_buffer[0], rd_buffer[1], rd_buffer[3], rd_buffer[4]]), temperature: f32::from_be
function_block-random_span
[ { "content": "pub fn draw_titles(mut display: Display4in2) -> Display4in2 {\n\n draw_large_text(&mut display, \"Air Quality\", (20, 30));\n\n\n\n draw_mid_text(&mut display, \"Carbon Dioxide:\", (20, 90));\n\n draw_mid_text(&mut display, \"Temperature:\", (20, 130));\n\n draw_mid_text(&mut display, \"Humidity:\", (20, 170));\n\n\n\n display\n\n}\n\n\n", "file_path": "src/display_helper/mod.rs", "rank": 1, "score": 65874.61258739703 }, { "content": "/// Terminates the application and makes `probe-run` exit with exit-code = 0\n\npub fn exit() -> ! {\n\n loop {\n\n cortex_m::asm::bkpt();\n\n }\n\n}\n\n\n\npub mod alert;\n\npub mod buzzer;\n\npub mod display_helper;\n\npub mod dk_button;\n\npub mod number_representations;\n\npub mod rgb_led;\n\npub mod scd30;\n", "file_path": "src/lib.rs", "rank": 2, "score": 59085.880737900676 }, { "content": "pub fn clear_numbers(\n\n mut display: Display4in2,\n\n top_left: (i32, i32),\n\n bottom_right: (i32, i32),\n\n) -> Display4in2 {\n\n Rectangle::new(\n\n Point::new(top_left.0, top_left.1),\n\n Point::new(bottom_right.0, bottom_right.1),\n\n )\n\n .into_styled(PrimitiveStyle::with_fill(BinaryColor::Off))\n\n .draw(&mut display)\n\n .unwrap();\n\n\n\n display\n\n}\n", "file_path": "src/display_helper/mod.rs", "rank": 3, "score": 54104.836492789604 }, { "content": "pub fn draw_numbers(\n\n value: f32,\n\n unit: &str,\n\n position: (i32, i32),\n\n mut display: Display4in2,\n\n) -> Display4in2 {\n\n let mut buf = ArrayString::<[_; 12]>::new();\n\n\n\n write!(&mut buf, \"{:.2} {}\", value, unit).expect(\"Failed to write to buffer\");\n\n\n\n egtext!(\n\n text = &buf,\n\n top_left = position,\n\n style = text_style!(font = Font12x16, text_color = BinaryColor::On,)\n\n )\n\n .draw(&mut display)\n\n .unwrap();\n\n\n\n display\n\n}\n\n\n", "file_path": "src/display_helper/mod.rs", "rank": 4, "score": 54104.836492789604 }, { "content": "fn draw_large_text(display: &mut Display4in2, text: &str, position: (i32, i32)) -> () {\n\n draw_text(display, text, position, Font24x32);\n\n}\n\n\n", "file_path": "src/display_helper/mod.rs", "rank": 5, "score": 36813.52421580943 }, { "content": "fn draw_mid_text(display: &mut Display4in2, text: &str, position: (i32, i32)) -> () {\n\n draw_text(display, text, position, Font12x16);\n\n}\n\n\n", "file_path": "src/display_helper/mod.rs", "rank": 6, "score": 36813.52421580943 }, { "content": "fn draw_text<F>(display: &mut Display4in2, text: &str, position: (i32, i32), font: F) -> ()\n\nwhere\n\n F: Font + Clone + Copy,\n\n{\n\n egtext!(\n\n text = text,\n\n top_left = position,\n\n style = text_style!(font = font, text_color = BinaryColor::On,)\n\n )\n\n .draw(display)\n\n .unwrap();\n\n}\n\n\n", "file_path": "src/display_helper/mod.rs", "rank": 7, "score": 33547.70459670664 }, { "content": "#[defmt::panic_handler]\n\nfn panic() -> ! {\n\n cortex_m::asm::udf()\n\n}\n\n\n\nstatic COUNT: AtomicUsize = AtomicUsize::new(0);\n\ndefmt::timestamp!(\"{=usize}\", {\n\n // NOTE(no-CAS) `timestamps` runs with interrupts disabled\n\n let n = COUNT.load(Ordering::Relaxed);\n\n COUNT.store(n + 1, Ordering::Relaxed);\n\n n\n\n});\n\n\n", "file_path": "src/lib.rs", "rank": 8, "score": 29772.639294915945 }, { "content": "pub enum Unit {\n\n Fahrenheit,\n\n Celsius,\n\n Kelvin,\n\n}\n\n\n\nimpl Unit {\n\n pub fn convert_temperature(&self, temperature: &f32) -> f32 {\n\n match self {\n\n Unit::Fahrenheit => (temperature * 9.0 / 5.0) + 32.0,\n\n Unit::Kelvin => temperature + 273.15,\n\n Unit::Celsius => *temperature,\n\n }\n\n }\n\n}\n", "file_path": "src/number_representations/mod.rs", "rank": 20, "score": 6.928093175392723 }, { "content": "use nrf52840_hal::{pac::TIMER1, timer::OneShot, Timer};\n\n\n\nuse crate::{buzzer::Buzzer, rgb_led::LEDColour};\n\n\n\npub struct CO2alert {\n\n warning_level_1: f32,\n\n warning_level_2: f32,\n\n limit_level: f32,\n\n buzzer_count: u16,\n\n}\n\n\n\nimpl CO2alert {\n\n pub fn init(warning_level_1: f32, warning_level_2: f32, limit_level: f32) -> CO2alert {\n\n CO2alert {\n\n warning_level_1,\n\n warning_level_2,\n\n limit_level,\n\n buzzer_count: 0,\n\n }\n\n }\n", "file_path": "src/alert/mod.rs", "rank": 21, "score": 6.403129569813006 }, { "content": "\n\n pub fn check_level(\n\n &mut self,\n\n current_level: &f32,\n\n buzzer: &mut Buzzer,\n\n led: &mut LEDColour,\n\n mut timer: &mut Timer<TIMER1, OneShot>,\n\n ) {\n\n if *current_level > self.limit_level {\n\n led.red();\n\n if self.buzzer_count < 5 {\n\n buzzer.buzz(&mut timer);\n\n self.buzzer_count += 1;\n\n }\n\n } else if *current_level > self.warning_level_2 {\n\n led.yellow();\n\n } else if *current_level > self.warning_level_1 {\n\n led.blue();\n\n } else {\n\n led.green();\n\n self.buzzer_count = 0;\n\n }\n\n }\n\n}\n", "file_path": "src/alert/mod.rs", "rank": 22, "score": 4.406524049472658 }, { "content": "use nrf52840_hal::{\n\n gpio::{Input, Pin, PullUp},\n\n prelude::InputPin,\n\n};\n\n\n\npub struct Button {\n\n pin: Pin<Input<PullUp>>,\n\n was_pressed: bool,\n\n}\n\n\n\nimpl Button {\n\n pub fn new<Mode>(pin: Pin<Mode>) -> Self {\n\n Button {\n\n pin: pin.into_pullup_input(),\n\n was_pressed: false,\n\n }\n\n }\n\n\n\n pub fn is_pressed(&self) -> bool {\n\n self.pin.is_low().unwrap()\n", "file_path": "src/dk_button/mod.rs", "rank": 23, "score": 4.395309569465164 }, { "content": "\n\n pub fn green(&mut self) {\n\n self.r.set_high().unwrap();\n\n self.g.set_low().unwrap();\n\n self.b.set_high().unwrap();\n\n }\n\n\n\n pub fn yellow(&mut self) {\n\n self.r.set_low().unwrap();\n\n self.b.set_high().unwrap();\n\n self.g.set_low().unwrap();\n\n }\n\n\n\n pub fn white(&mut self) {\n\n self.r.set_low().unwrap();\n\n self.g.set_low().unwrap();\n\n self.b.set_low().unwrap();\n\n }\n\n\n\n pub fn blink(&mut self, timer: &mut Timer<TIMER1, OneShot>) {\n", "file_path": "src/rgb_led/mod.rs", "rank": 24, "score": 4.168848193567509 }, { "content": "use nrf52840_hal::{\n\n gpio::{Level, Output, Pin, PushPull},\n\n pac::TIMER1,\n\n prelude::*,\n\n timer::OneShot,\n\n Timer,\n\n};\n\n\n\npub struct Buzzer {\n\n pin: Pin<Output<PushPull>>,\n\n}\n\n\n\nimpl Buzzer {\n\n pub fn init<Mode>(pin: Pin<Mode>) -> Buzzer {\n\n Buzzer {\n\n pin: pin.into_push_pull_output(Level::Low),\n\n }\n\n }\n\n\n\n pub fn high(&mut self) {\n", "file_path": "src/buzzer/mod.rs", "rank": 25, "score": 4.0771006320299845 }, { "content": " }\n\n }\n\n\n\n pub fn off(&mut self) {\n\n self.r.set_high().unwrap();\n\n self.b.set_high().unwrap();\n\n self.g.set_high().unwrap();\n\n }\n\n\n\n pub fn red(&mut self) {\n\n self.r.set_low().unwrap();\n\n self.g.set_high().unwrap();\n\n self.b.set_high().unwrap();\n\n }\n\n\n\n pub fn blue(&mut self) {\n\n self.r.set_high().unwrap();\n\n self.g.set_high().unwrap();\n\n self.b.set_low().unwrap();\n\n }\n", "file_path": "src/rgb_led/mod.rs", "rank": 26, "score": 3.9298211688806255 }, { "content": " self.pin.set_high().unwrap();\n\n }\n\n\n\n pub fn low(&mut self) {\n\n self.pin.set_low().unwrap();\n\n }\n\n\n\n pub fn buzz(&mut self, timer: &mut Timer<TIMER1, OneShot>) {\n\n for _ in 1..250 {\n\n self.low();\n\n timer.delay_ms(3_u32);\n\n\n\n self.high();\n\n timer.delay_ms(3_u32);\n\n }\n\n }\n\n}\n", "file_path": "src/buzzer/mod.rs", "rank": 27, "score": 3.8250566272336086 }, { "content": "use arrayvec::ArrayString;\n\nuse core::fmt::Write;\n\nuse embedded_graphics::{\n\n egtext,\n\n fonts::{Font, Font12x16, Font24x32},\n\n geometry::Point,\n\n pixelcolor::BinaryColor,\n\n prelude::*,\n\n primitives::Rectangle,\n\n style::PrimitiveStyle,\n\n text_style,\n\n};\n\nuse epd_waveshare::epd4in2::*;\n\n\n", "file_path": "src/display_helper/mod.rs", "rank": 28, "score": 3.6727513201601143 }, { "content": " }\n\n\n\n pub fn check_rising_edge(&mut self) -> bool {\n\n let mut rising_edge = false;\n\n\n\n let is_pressed = self.is_pressed();\n\n\n\n if self.was_pressed && !is_pressed {\n\n rising_edge = true;\n\n }\n\n self.was_pressed = is_pressed;\n\n\n\n rising_edge\n\n }\n\n}\n", "file_path": "src/dk_button/mod.rs", "rank": 29, "score": 3.4356282818126758 }, { "content": "#![no_std]\n\n#![no_main]\n\n\n\nuse carbon_sensor as _; // memory layout + panic handler\n\n\n\n// See https://crates.io/crates/defmt-test/0.1.0 for more documentation (e.g. about the 'state'\n\n// feature)\n\n#[defmt_test::tests]\n\nmod tests {\n\n use defmt::{assert, assert_eq};\n\n\n\n #[test]\n\n fn assert_true() {\n\n assert!(true)\n\n }\n\n\n\n #[test]\n\n fn assert_eq() {\n\n assert_eq!(24, 42, \"TODO: write actual tests\")\n\n }\n\n}\n", "file_path": "testsuite/tests/test.rs", "rank": 30, "score": 2.886631484444622 }, { "content": "\n\n let result = if current_blue {\n\n self.b.set_high()\n\n } else {\n\n self.b.set_low()\n\n };\n\n result.unwrap();\n\n }\n\n}\n", "file_path": "src/rgb_led/mod.rs", "rank": 31, "score": 2.492394321534147 }, { "content": " let current_red = self.r.is_set_high().unwrap();\n\n let current_green = self.g.is_set_high().unwrap();\n\n let current_blue = self.b.is_set_high().unwrap();\n\n\n\n self.white();\n\n timer.delay_ms(100_u32);\n\n\n\n let result = if current_red {\n\n self.r.set_high()\n\n } else {\n\n self.r.set_low()\n\n };\n\n result.unwrap();\n\n\n\n let result = if current_green {\n\n self.g.set_high()\n\n } else {\n\n self.g.set_low()\n\n };\n\n result.unwrap();\n", "file_path": "src/rgb_led/mod.rs", "rank": 32, "score": 2.3420080628963307 }, { "content": "#![no_std]\n\n\n\nuse core::sync::atomic::{AtomicUsize, Ordering};\n\n\n\nuse defmt_rtt as _; // global logger\n\nuse nrf52840_hal as _; // memory layout\n\n\n\nuse panic_probe as _;\n\n\n\n// same panicking *behavior* as `panic-probe` but doesn't print a panic message\n\n// this prevents the panic message being printed *twice* when `defmt::panic` is invoked\n\n#[defmt::panic_handler]\n", "file_path": "src/lib.rs", "rank": 33, "score": 2.2233821625843326 }, { "content": "use nrf52840_hal::{\n\n gpio::{Level, Output, Pin, PushPull},\n\n pac::TIMER1,\n\n prelude::*,\n\n timer::OneShot,\n\n Timer,\n\n};\n\n\n\npub struct LEDColour {\n\n r: Pin<Output<PushPull>>,\n\n b: Pin<Output<PushPull>>,\n\n g: Pin<Output<PushPull>>,\n\n}\n\n\n\nimpl LEDColour {\n\n pub fn init<Mode>(led_red: Pin<Mode>, led_blue: Pin<Mode>, led_green: Pin<Mode>) -> LEDColour {\n\n LEDColour {\n\n r: led_red.into_push_pull_output(Level::High),\n\n b: led_blue.into_push_pull_output(Level::High),\n\n g: led_green.into_push_pull_output(Level::High),\n", "file_path": "src/rgb_led/mod.rs", "rank": 34, "score": 2.0463314673250563 }, { "content": "#![no_std]\n\n#![cfg_attr(test, no_main)]\n\n\n\nuse carbon_sensor as _; // memory layout + panic handler\n\n\n\n#[defmt_test::tests]\n\nmod tests {}\n", "file_path": "testsuite/src/lib.rs", "rank": 35, "score": 1.6834083853013146 }, { "content": "3. Switch defmt dependencies to git: uncomment the last part of the root `Cargo.toml` and enter the hash reported by `probe-run --version`:\n\n\n\n``` diff\n\n-# [patch.crates-io]\n\n-# defmt = { git = \"https://github.com/knurling-rs/defmt\", rev = \"use defmt version reported by `probe-run --version`\" }\n\n-# defmt-rtt = { git = \"https://github.com/knurling-rs/defmt\", rev = \"use defmt version reported by `probe-run --version`\" }\n\n-# defmt-test = { git = \"https://github.com/knurling-rs/defmt\", rev = \"use defmt version reported by `probe-run --version`\" }\n\n-# panic-probe = { git = \"https://github.com/knurling-rs/defmt\", rev = \"use defmt version reported by `probe-run --version`\" }\n\n+[patch.crates-io]\n\n+defmt = { git = \"https://github.com/knurling-rs/defmt\", rev = \"60c6447f8ecbc4ff023378ba6905bcd0de1e679f\" }\n\n+defmt-rtt = { git = \"https://github.com/knurling-rs/defmt\", rev = \"60c6447f8ecbc4ff023378ba6905bcd0de1e679f\" }\n\n+defmt-test = { git = \"https://github.com/knurling-rs/defmt\", rev = \"60c6447f8ecbc4ff023378ba6905bcd0de1e679f\" }\n\n+panic-probe = { git = \"https://github.com/knurling-rs/defmt\", rev = \"60c6447f8ecbc4ff023378ba6905bcd0de1e679f\" }\n\n```\n\n\n\nYou are now using the git version of `defmt`!\n\n\n\n**NOTE** there may have been breaking changes between the crates.io version and the git version; you'll need to fix those in the source code.\n\n\n", "file_path": "README.md", "rank": 36, "score": 1.6112555254010137 }, { "content": "#### 3. Adjust the compilation target\n\n\n\nIn `.cargo/config.toml`, pick the right compilation target for your board.\n\n\n\n``` diff\n\n # .cargo/config.toml\n\n [build]\n\n-target = \"thumbv6m-none-eabi\" # Cortex-M0 and Cortex-M0+\n\n-# target = \"thumbv7m-none-eabi\" # Cortex-M3\n\n-# target = \"thumbv7em-none-eabi\" # Cortex-M4 and Cortex-M7 (no FPU)\n\n-# target = \"thumbv7em-none-eabihf\" # Cortex-M4F and Cortex-M7F (with FPU)\n\n+target = \"thumbv7em-none-eabihf\" # Cortex-M4F (with FPU)\n\n```\n\n\n\nAdd the target with `rustup`.\n\n\n\n``` console\n\n$ rustup target add thumbv7em-none-eabihf\n\n```\n\n\n\n#### 4. Add a HAL as a dependency\n\n\n\nIn `Cargo.toml`, list the Hardware Abstraction Layer (HAL) for your board as a dependency.\n\n\n\nFor the nRF52840 you'll want to use the [`nrf52840-hal`].\n\n\n\n[`nrf52840-hal`]: https://crates.io/crates/nrf52840-hal\n\n\n\n``` diff\n\n # Cargo.toml\n\n [dependencies]\n\n-# some-hal = \"1.2.3\"\n\n+nrf52840-hal = \"0.12.0\"\n\n```\n\n\n\n#### 5. Import your HAL\n\n\n\nNow that you have selected a HAL, fix the HAL import in `src/lib.rs`\n\n\n\n``` diff\n\n // my-app/src/lib.rs\n\n-// use some_hal as _; // memory layout\n\n+use nrf52840_hal as _; // memory layout\n\n```\n\n\n\n#### (6. Get a linker script)\n\n\n\nSome HAL crates require that you manually copy over a file called `memory.x` from the HAL to the root of your project. For nrf52840-hal, this is done automatically so no action is needed. For other HAL crates, you can get it from your local Cargo folder, the default location is under:\n\n\n\n```\n\n~/.cargo/registry/src/\n\n```\n\n\n\nNot all HALs provide a `memory.x` file, you may need to write it yourself. Check the documentation for the HAL you are using.\n\n\n\n\n\n#### 7. Run!\n\n\n\nYou are now all set to `cargo-run` your first `defmt`-powered application!\n\nThere are some examples in the `src/bin` directory.\n\n\n\nStart by `cargo run`-ning `my-app/src/bin/hello.rs`:\n\n\n\n``` console\n\n$ # `rb` is an alias for `run --bin`\n\n$ cargo rb hello\n\n Finished dev [optimized + debuginfo] target(s) in 0.03s\n\nflashing program ..\n\nDONE\n\nresetting device\n\n0.000000 INFO Hello, world!\n\n(..)\n\n\n\n$ echo $?\n\n0\n\n```\n\n\n", "file_path": "README.md", "rank": 37, "score": 1.5127975759435865 }, { "content": "#### (8. Set `rust-analyzer.linkedProjects`)\n\n\n\nIf you are using [rust-analyzer] with VS Code for IDE-like features you can add following configuration to your `.vscode/settings.json` to make it work transparently across workspaces. Find the details of this option in the [RA docs].\n\n\n\n```json\n\n{\n\n \"rust-analyzer.linkedProjects\": [\n\n \"Cargo.toml\",\n\n \"firmware/Cargo.toml\",\n\n ]\n\n} \n\n```\n\n\n\n[RA docs]: https://rust-analyzer.github.io/manual.html#configuration\n\n[rust-analyzer]: https://rust-analyzer.github.io/\n\n\n\n## Trying out the git version of defmt\n\n\n\nThis template is configured to use the latest crates.io release (the \"stable\" release) of the `defmt` framework.\n\nTo use the git version (the \"development\" version) of `defmt` follow these steps:\n\n\n\n1. Install the *git* version of `probe-run`\n\n\n\n``` console\n\n$ cargo install --git https://github.com/knurling-rs/probe-run --branch main\n\n```\n\n\n\n2. Check which defmt version `probe-run` supports\n\n\n\n``` console\n\n$ probe-run --version\n\n0.2.0 (aa585f2 2021-02-22)\n\nsupported defmt version: 60c6447f8ecbc4ff023378ba6905bcd0de1e679f\n\n```\n\n\n\nIn the example output, the supported version is `60c6447f8ecbc4ff023378ba6905bcd0de1e679f`\n\n\n", "file_path": "README.md", "rank": 38, "score": 1.2315096850732465 }, { "content": "## Support\n\n\n\n`app-template` is part of the [Knurling] project, [Ferrous Systems]' effort at\n\nimproving tooling used to develop for embedded systems.\n\n\n\nIf you think that our work is useful, consider sponsoring it via [GitHub\n\nSponsors].\n\n\n\n## License\n\n\n\nLicensed under either of\n\n\n\n- Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or\n\n http://www.apache.org/licenses/LICENSE-2.0)\n\n\n\n- MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)\n\n\n\nat your option.\n\n\n\n### Contribution\n\n\n\nUnless you explicitly state otherwise, any contribution intentionally submitted\n\nfor inclusion in the work by you, as defined in the Apache-2.0 license, shall be\n\nlicensed as above, without any additional terms or conditions.\n\n\n\n[Knurling]: https://knurling.ferrous-systems.com\n\n[Ferrous Systems]: https://ferrous-systems.com/\n\n[GitHub Sponsors]: https://github.com/sponsors/knurling-rs\n", "file_path": "README.md", "rank": 39, "score": 1.1879296482510018 }, { "content": "# `app-template`\n\n\n\n> Quickly set up a [`probe-run`] + [`defmt`] + [`flip-link`] embedded project\n\n\n\n[`probe-run`]: https://crates.io/crates/probe-run\n\n[`defmt`]: https://github.com/knurling-rs/defmt\n\n[`flip-link`]: https://github.com/knurling-rs/flip-link\n\n\n\n## Dependencies\n\n\n\n#### 1. `flip-link`:\n\n\n\n```console\n\n$ cargo install flip-link\n\n```\n\n\n\n#### 2. `probe-run`:\n\n\n\n``` console\n\n$ # make sure to install v0.2.0 or later\n\n$ cargo install probe-run\n\n```\n\n\n\n#### 3. [`cargo-generate`]:\n\n\n\n``` console\n\n$ cargo install cargo-generate\n\n```\n\n\n\n[`cargo-generate`]: https://crates.io/crates/cargo-generate\n\n\n\n> *Note:* You can also just clone this repository instead of using `cargo-generate`, but this involves additional manual adjustments.\n\n\n\n## Setup\n\n\n\n#### 1. Initialize the project template\n\n\n\n``` console\n\n$ cargo generate \\\n\n --git https://github.com/knurling-rs/app-template \\\n\n --branch main \\\n\n --name my-app\n\n```\n\n\n\nIf you look into your new `my-app` folder, you'll find that there are a few `TODO`s in the files marking the properties you need to set.\n\n\n\nLet's walk through them together now.\n\n\n\n#### 2. Set `probe-run` chip\n\n\n\nPick a chip from `probe-run --list-chips` and enter it into `.cargo/config.toml`.\n\n\n\nIf, for example, you have a nRF52840 Development Kit from one of [our workshops], replace `{{chip}}` with `nRF52840_xxAA`.\n\n\n\n[our workshops]: https://github.com/ferrous-systems/embedded-trainings-2020\n\n\n\n``` diff\n\n # .cargo/config.toml\n\n [target.'cfg(all(target_arch = \"arm\", target_os = \"none\"))']\n\n-runner = \"probe-run --chip {{chip}}\"\n\n+runner = \"probe-run --chip nRF52840_xxAA\"\n\n```\n\n\n", "file_path": "README.md", "rank": 40, "score": 0.9703751587762626 } ]
Rust
src/block_renderer.rs
AnthonyTornetta/bevy_testing
9f8a8e6bda66b2ac6b8caea72dded2e4740bdab3
use crate::blocks::block; use bevy::prelude::*; use bevy::render::mesh::Indices; use bevy::render::render_resource::PrimitiveTopology; use crate::base_renderable; use crate::base_renderable::CanCreateSubMesh; use crate::blocks::block::Side; pub const U_WIDTH: f32 = 0.5; pub const V_HEIGHT: f32 = 0.5; const DEFAULT_FRONT_INFO: [([f32; 3], [f32; 3], [f32; 2]); 4] = [ ([-0.5, -0.5, 0.5], [0., 0., 1.0], [0., 1.0]), ([0.5, -0.5, 0.5], [0., 0., 1.0], [1.0, 1.0]), ([0.5, 0.5, 0.5], [0., 0., 1.0], [1.0, 0.0]), ([-0.5, 0.5, 0.5], [0., 0., 1.0], [0., 0.0]), ]; const DEFAULT_FRONT_INDICES: [u32; 6] = [0, 1, 2, 2, 3, 0]; const DEFAULT_BACK_INFO: [([f32; 3], [f32; 3], [f32; 2]); 4] = [ ([-0.5, 0.5, -0.5], [0., 0., -1.0], [0., 1.0]), ([0.5, 0.5, -0.5], [0., 0., -1.0], [1.0, 1.0]), ([0.5, -0.5, -0.5], [0., 0., -1.0], [1.0, 0.0]), ([-0.5, -0.5, -0.5], [0., 0., -1.0], [0., 0.0]), ]; const DEFAULT_BACK_INDICES: [u32; 6] = [4, 5, 6, 6, 7, 4]; const DEFAULT_RIGHT_INFO: [([f32; 3], [f32; 3], [f32; 2]); 4] = [ ([0.5, -0.5, -0.5], [1.0, 0., 0.], [1.0, 1.0]), ([0.5, 0.5, -0.5], [1.0, 0., 0.], [1.0, 0.0]), ([0.5, 0.5, 0.5], [1.0, 0., 0.], [0.0, 0.0]), ([0.5, -0.5, 0.5], [1.0, 0., 0.], [0.0, 1.0]), ]; const DEFAULT_RIGHT_INDICES: [u32; 6] = [8, 9, 10, 10, 11, 8]; const DEFAULT_LEFT_INFO: [([f32; 3], [f32; 3], [f32; 2]); 4] = [ ([-0.5, -0.5, 0.5], [-1.0, 0., 0.], [1.0, 1.0]), ([-0.5, 0.5, 0.5], [-1.0, 0., 0.], [1.0, 0.0]), ([-0.5, 0.5, -0.5], [-1.0, 0., 0.], [0.0, 0.0]), ([-0.5, -0.5, -0.5], [-1.0, 0., 0.], [0.0, 1.0]), ]; const DEFAULT_LEFT_INDICES: [u32; 6] = [12, 13, 14, 14, 15, 12]; const DEFAULT_TOP_INFO: [([f32; 3], [f32; 3], [f32; 2]); 4] = [ ([0.5, 0.5, -0.5], [0., 1.0, 0.], [1.0, 0.]), ([-0.5, 0.5, -0.5], [0., 1.0, 0.], [0., 0.]), ([-0.5, 0.5, 0.5], [0., 1.0, 0.], [0., 1.0]), ([0.5, 0.5, 0.5], [0., 1.0, 0.], [1.0, 1.0]), ]; const DEFAULT_TOP_INDICES: [u32; 6] = [16, 17, 18, 18, 19, 16]; const DEFAULT_BOTTOM_INFO: [([f32; 3], [f32; 3], [f32; 2]); 4] = [ ([0.5, -0.5, 0.5], [0., -1.0, 0.], [1.0, 0.]), ([-0.5, -0.5, 0.5], [0., -1.0, 0.], [0., 0.]), ([-0.5, -0.5, -0.5], [0., -1.0, 0.], [0., 1.0]), ([0.5, -0.5, -0.5], [0., -1.0, 0.], [1.0, 1.0]), ]; const DEFAULT_BOTTOM_INDICES: [u32; 6] = [20, 21, 22, 22, 23, 20]; #[inline] fn apply_offset<T: HasUVs>( default_info: [([f32; 3], [f32; 3], [f32; 2]); 4], offset: &Vec3, has_uvs: &T, side: Side, ) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { let mut res = Vec::with_capacity(default_info.len()); for mut item in default_info { item.0[0] += offset.x; item.0[1] += offset.y; item.0[2] += offset.z; item.2[0] = has_uvs.u_width(side) * item.2[0] + has_uvs.u_min(side); item.2[1] = has_uvs.v_height(side) * item.2[1] + has_uvs.v_min(side); res.push(item); } res } pub trait HasUVs { fn u_min(&self, side: block::Side) -> f32; fn u_width(&self, _side: block::Side) -> f32 { U_WIDTH } fn v_min(&self, side: block::Side) -> f32; fn v_height(&self, _side: block::Side) -> f32 { V_HEIGHT } } pub struct DefaultBlockMeshCreator<T: HasUVs> { uv_chooser: T, } impl<T: HasUVs> DefaultBlockMeshCreator<T> { pub fn new(uv_chooser: T) -> Self { DefaultBlockMeshCreator { uv_chooser } } } impl<T: HasUVs> base_renderable::CanCreateSubMesh for DefaultBlockMeshCreator<T> { fn right_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { apply_offset(DEFAULT_RIGHT_INFO, offset, &self.uv_chooser, Side::RIGHT) } fn right_indices(&self) -> Vec<u32> { Vec::from(DEFAULT_FRONT_INDICES) } fn left_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { apply_offset(DEFAULT_LEFT_INFO, offset, &self.uv_chooser, Side::LEFT) } fn left_indices(&self) -> Vec<u32> { Vec::from(DEFAULT_FRONT_INDICES) } fn top_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { apply_offset(DEFAULT_TOP_INFO, offset, &self.uv_chooser, Side::TOP) } fn top_indices(&self) -> Vec<u32> { Vec::from(DEFAULT_FRONT_INDICES) } fn bottom_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { apply_offset(DEFAULT_BOTTOM_INFO, offset, &self.uv_chooser, Side::BOTTOM) } fn bottom_indices(&self) -> Vec<u32> { Vec::from(DEFAULT_FRONT_INDICES) } fn front_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { apply_offset(DEFAULT_FRONT_INFO, offset, &self.uv_chooser, Side::FRONT) } fn front_indices(&self) -> Vec<u32> { Vec::from(DEFAULT_FRONT_INDICES) } fn back_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { apply_offset(DEFAULT_BACK_INFO, offset, &self.uv_chooser, Side::BACK) } fn back_indices(&self) -> Vec<u32> { Vec::from(DEFAULT_FRONT_INDICES) } } pub struct EmptyMeshCreator; impl EmptyMeshCreator { pub fn new() -> Self { EmptyMeshCreator {} } } impl base_renderable::CanCreateSubMesh for EmptyMeshCreator { fn right_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { Vec::new() } fn right_indices(&self) -> Vec<u32> { Vec::new() } fn left_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { Vec::new() } fn left_indices(&self) -> Vec<u32> { Vec::new() } fn top_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { Vec::new() } fn top_indices(&self) -> Vec<u32> { Vec::new() } fn bottom_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { Vec::new() } fn bottom_indices(&self) -> Vec<u32> { Vec::new() } fn front_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { Vec::new() } fn front_indices(&self) -> Vec<u32> { Vec::new() } fn back_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { Vec::new() } fn back_indices(&self) -> Vec<u32> { Vec::new() } }
use crate::blocks::block; use bevy::prelude::*; use bevy::render::mesh::Indices; use bevy::render::render_resource::PrimitiveTopology; use crate::base_renderable; use crate::base_renderable::CanCreateSubMesh; use crate::blocks::block::Side; pub const U_WIDTH: f32 = 0.5; pub const V_HEIGHT: f32 = 0.5; const DEFAULT_FRONT_INFO: [([f32; 3], [f32; 3], [f32; 2]); 4] = [ ([-0.5, -0.5, 0.5], [0., 0., 1.0], [0., 1.0]), ([0.5, -0.5, 0.5], [0., 0., 1.0], [1.0, 1.0]), ([0.5, 0.5, 0.5], [0., 0., 1.0], [1.0, 0.0]), ([-0.5, 0.5, 0.5], [0., 0., 1.0], [0., 0.0]), ]; const DEFAULT_FRONT_INDICES: [u32; 6] = [0, 1, 2, 2, 3, 0]; const DEFAULT_BACK_INFO: [([f32; 3], [f32; 3], [f32; 2]); 4] = [ ([-0.5, 0.5, -0.5], [0., 0., -1.0], [0., 1.0]), ([0.5, 0.5, -0.5], [0., 0., -1.0], [1.0, 1.0]), ([0.5, -0.5, -0.5], [0., 0., -1.0], [1.0, 0.0]), ([-0.5, -0.5, -0.5], [0., 0., -1.0], [0., 0.0]), ]; const DEFAULT_BACK_INDICES: [u32; 6] = [4, 5, 6, 6, 7, 4]; const DEFAULT_RIGHT_INFO: [([f32; 3], [f32; 3], [f32; 2]); 4] = [ ([0.5, -0.5, -0.5], [1.0, 0., 0.], [1.0, 1.0]), ([0.5, 0.5, -0.5], [1.0, 0., 0.], [1.0, 0.0]), ([0.5, 0.5, 0.5], [1.0, 0., 0.], [0.0, 0.0]), ([0.5, -0.5, 0.5], [1.0, 0., 0.], [0.0, 1.0]), ]; const DEFAULT_RIGHT_INDICES: [u32; 6] = [8, 9, 10, 10, 11, 8]; const DEFAULT_LEFT_INFO: [([f32; 3], [f32; 3], [f32; 2]); 4] = [ ([-0.5, -0.5, 0.5], [-1.0, 0., 0.], [1.0, 1.0]), ([-0.5, 0.5, 0.5], [-1.0, 0., 0.], [1.0, 0.0]), ([-0.5, 0.5, -0.5], [-1.0, 0., 0.], [0.0, 0.0]), ([-0.5, -0.5, -0.5], [-1.0, 0., 0.], [0.0, 1.0]), ]; const DEFAULT_LEFT_INDICES: [u32; 6] = [12, 13, 14, 14, 15, 12]; const DEFAULT_TOP_INFO: [([f32; 3], [f32; 3], [f32; 2]); 4] = [ ([0.5, 0.5, -0.5], [0., 1.0, 0.], [1.0, 0.]), ([-0.5, 0.5, -0.5], [0., 1.0, 0.], [0., 0.]), ([-0.5, 0.5, 0.5], [0., 1.0, 0.], [0., 1.0]), ([0.5, 0.5, 0.5], [0., 1.0, 0.], [1.0, 1.0]), ]; const DEFAULT_TOP_INDICES: [u32; 6] = [16, 17, 18, 18, 19, 16]; const DEFAULT_BOTTOM_INFO: [([f32; 3], [f32; 3], [f32; 2]); 4] = [ ([0.5, -0.5, 0.5], [0., -1.0, 0.], [1.0, 0.]), ([-0.5, -0.5, 0.5], [0., -1.0, 0.], [0., 0.]), ([-0.5, -0.5, -0.5], [0., -1.0, 0.], [0., 1.0]), ([0.5, -0.5, -0.5], [0., -1.0, 0.], [1.0, 1.0]), ]; const DEFAULT_BOTTOM_INDICES: [u32; 6] = [20, 21, 22, 22, 23, 20]; #[inline] fn apply_offset<T: HasUVs>( default_i
n(side); item.2[1] = has_uvs.v_height(side) * item.2[1] + has_uvs.v_min(side); res.push(item); } res } pub trait HasUVs { fn u_min(&self, side: block::Side) -> f32; fn u_width(&self, _side: block::Side) -> f32 { U_WIDTH } fn v_min(&self, side: block::Side) -> f32; fn v_height(&self, _side: block::Side) -> f32 { V_HEIGHT } } pub struct DefaultBlockMeshCreator<T: HasUVs> { uv_chooser: T, } impl<T: HasUVs> DefaultBlockMeshCreator<T> { pub fn new(uv_chooser: T) -> Self { DefaultBlockMeshCreator { uv_chooser } } } impl<T: HasUVs> base_renderable::CanCreateSubMesh for DefaultBlockMeshCreator<T> { fn right_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { apply_offset(DEFAULT_RIGHT_INFO, offset, &self.uv_chooser, Side::RIGHT) } fn right_indices(&self) -> Vec<u32> { Vec::from(DEFAULT_FRONT_INDICES) } fn left_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { apply_offset(DEFAULT_LEFT_INFO, offset, &self.uv_chooser, Side::LEFT) } fn left_indices(&self) -> Vec<u32> { Vec::from(DEFAULT_FRONT_INDICES) } fn top_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { apply_offset(DEFAULT_TOP_INFO, offset, &self.uv_chooser, Side::TOP) } fn top_indices(&self) -> Vec<u32> { Vec::from(DEFAULT_FRONT_INDICES) } fn bottom_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { apply_offset(DEFAULT_BOTTOM_INFO, offset, &self.uv_chooser, Side::BOTTOM) } fn bottom_indices(&self) -> Vec<u32> { Vec::from(DEFAULT_FRONT_INDICES) } fn front_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { apply_offset(DEFAULT_FRONT_INFO, offset, &self.uv_chooser, Side::FRONT) } fn front_indices(&self) -> Vec<u32> { Vec::from(DEFAULT_FRONT_INDICES) } fn back_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { apply_offset(DEFAULT_BACK_INFO, offset, &self.uv_chooser, Side::BACK) } fn back_indices(&self) -> Vec<u32> { Vec::from(DEFAULT_FRONT_INDICES) } } pub struct EmptyMeshCreator; impl EmptyMeshCreator { pub fn new() -> Self { EmptyMeshCreator {} } } impl base_renderable::CanCreateSubMesh for EmptyMeshCreator { fn right_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { Vec::new() } fn right_indices(&self) -> Vec<u32> { Vec::new() } fn left_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { Vec::new() } fn left_indices(&self) -> Vec<u32> { Vec::new() } fn top_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { Vec::new() } fn top_indices(&self) -> Vec<u32> { Vec::new() } fn bottom_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { Vec::new() } fn bottom_indices(&self) -> Vec<u32> { Vec::new() } fn front_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { Vec::new() } fn front_indices(&self) -> Vec<u32> { Vec::new() } fn back_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { Vec::new() } fn back_indices(&self) -> Vec<u32> { Vec::new() } }
nfo: [([f32; 3], [f32; 3], [f32; 2]); 4], offset: &Vec3, has_uvs: &T, side: Side, ) -> Vec<([f32; 3], [f32; 3], [f32; 2])> { let mut res = Vec::with_capacity(default_info.len()); for mut item in default_info { item.0[0] += offset.x; item.0[1] += offset.y; item.0[2] += offset.z; item.2[0] = has_uvs.u_width(side) * item.2[0] + has_uvs.u_mi
function_block-random_span
[ { "content": "pub fn camera_movement_system(\n\n mut query: Query<(&Camera, &mut Transform)>,\n\n mut ev_motion: EventReader<MouseMotion>,\n\n keys: Res<Input<KeyCode>>,\n\n)\n\n{\n\n let (_cam, mut transform) = query.single_mut();\n\n\n\n let speed = (keys.pressed(KeyCode::LShift) as i32 * 5 + 1) as f32 * 0.01;\n\n\n\n for x in ev_motion.iter()\n\n {\n\n let sens: f32 = 0.001;\n\n\n\n let temp: Quat = Quat::from_euler(\n\n EulerRot::XYZ,\n\n -x.delta.y as f32 * sens,\n\n -x.delta.x as f32 * sens,\n\n 0.0,\n\n );\n", "file_path": "src/main.rs", "rank": 0, "score": 70309.03945865194 }, { "content": "fn add_collider(start_x: f32, start_y: f32, start_z: f32, count: u16, colliders: &mut Vec<(Isometry<Real>, SharedShape)>)\n\n{\n\n let length_size = LENGTH * HEIGHT;\n\n let height_size = HEIGHT;\n\n\n\n // make collider\n\n let length = count / (length_size);\n\n let height = (count - length * length_size) / height_size;\n\n let width = count - height_size * height - length_size * length;\n\n\n\n let hw = (width + 1) as f32 / 2.0;\n\n let hh = (height + 1) as f32 / 2.0;\n\n let hl = (length + 1) as f32 / 2.0;\n\n\n\n colliders.push(\n\n (Isometry::translation(start_x + hw, start_y + hh, start_z + hl),\n\n ColliderShape::cuboid(hw, hh, hl)));\n\n}\n\n\n", "file_path": "src/chunk_mesh_updater.rs", "rank": 1, "score": 57181.39987292178 }, { "content": "pub fn from_id(id: BlockID) -> &'static Block\n\n{\n\n &BLOCKS[id as usize]\n\n}\n", "file_path": "src/blocks/mod.rs", "rank": 2, "score": 55877.06914565175 }, { "content": "/// sets up a scene with textured entities\n\nfn setup(\n\n mut commands: Commands,\n\n asset_server: Res<AssetServer>,\n\n mut meshes: ResMut<Assets<Mesh>>,\n\n mut materials: ResMut<Assets<StandardMaterial>>,\n\n)\n\n{\n\n let texture_handle = asset_server.load(\"block.png\");\n\n\n\n let material_handle = materials.add(StandardMaterial {\n\n base_color_texture: Some(texture_handle.clone()),\n\n alpha_mode: AlphaMode::Blend,\n\n unlit: true,\n\n ..Default::default()\n\n });\n\n\n\n for z in -4..0\n\n {\n\n for x in 0..4\n\n {\n", "file_path": "src/main.rs", "rank": 3, "score": 37656.8622808423 }, { "content": "fn main()\n\n{\n\n let mut app = App::new();\n\n app.add_plugins(DefaultPlugins)\n\n .add_startup_system(setup)\n\n // .add_system(print_heights)\n\n .add_system(camera_movement_system)\n\n .add_plugin(RapierPhysicsPlugin::<NoUserData>::default())\n\n .insert_resource(RapierConfiguration\n\n {\n\n gravity: vector![0.0,0.0,0.0],\n\n ..Default::default()\n\n })\n\n .add_plugin(WorldInspectorPlugin::new())\n\n .add_plugin(chunk_mesh_updater::ChunkMeshUpdaterPlugin)\n\n .add_plugin(chunk_generator::ChunkGeneratorPlugin)\n\n .run();\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 4, "score": 37656.8622808423 }, { "content": "pub trait CanCreateMesh\n\n{\n\n fn create_mesh(&self) -> Mesh;\n\n}\n", "file_path": "src/base_renderable.rs", "rank": 6, "score": 36260.81136601274 }, { "content": "fn apply_info(\n\n cur_index: i32,\n\n mesh_data: &Vec<([f32; 3], [f32; 3], [f32; 2])>,\n\n indices: &Vec<u32>,\n\n positions: &mut Vec<[f32; 3]>,\n\n normals: &mut Vec<[f32; 3]>,\n\n uvs: &mut Vec<[f32; 2]>,\n\n indices_congreg: &mut Vec<u32>,\n\n) -> i32\n\n{\n\n let mut next_max_index = cur_index;\n\n\n\n for index in indices\n\n {\n\n indices_congreg.push((*index as i32 + cur_index + 1) as u32);\n\n next_max_index = std::cmp::max(*index as i32 + cur_index + 1, next_max_index);\n\n }\n\n\n\n for (position, normal, uv) in mesh_data\n\n {\n", "file_path": "src/chunk_renderer.rs", "rank": 7, "score": 35527.32131546457 }, { "content": "pub trait CanCreateSubMesh\n\n{\n\n fn right_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])>;\n\n fn right_indices(&self) -> Vec<u32>;\n\n\n\n fn left_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])>;\n\n fn left_indices(&self) -> Vec<u32>;\n\n\n\n fn top_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])>;\n\n fn top_indices(&self) -> Vec<u32>;\n\n\n\n fn bottom_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])>;\n\n fn bottom_indices(&self) -> Vec<u32>;\n\n\n\n fn front_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])>;\n\n fn front_indices(&self) -> Vec<u32>;\n\n\n\n fn back_mesh_data(&self, offset: &Vec3) -> Vec<([f32; 3], [f32; 3], [f32; 2])>;\n\n fn back_indices(&self) -> Vec<u32>;\n\n}\n\n\n", "file_path": "src/base_renderable.rs", "rank": 8, "score": 35453.12416295276 }, { "content": "fn check_chunks(\n\n mut commands: Commands,\n\n mut meshes: ResMut<Assets<Mesh>>,\n\n mut query: Query<(Entity, &Chunk, &Handle<Mesh>, Option<&Aabb>, With<Dirty>)>,\n\n)\n\n{\n\n for (entity, chunk, handle, aabb, _) in query.iter_mut()\n\n {\n\n let mesh = chunk.create_mesh();\n\n\n\n let _ = meshes.set(handle, mesh);\n\n\n\n let mut ent = commands.entity(entity);\n\n\n\n if !aabb.is_none()\n\n {\n\n ent.remove::<Aabb>();\n\n\n\n ent.insert(chunk.create_mesh().compute_aabb().unwrap());\n\n }\n", "file_path": "src/chunk_mesh_updater.rs", "rank": 9, "score": 34602.30195862862 }, { "content": "fn chunk_generation_system(\n\n mut commands: Commands,\n\n mut query: Query<(Entity, &mut Chunk, With<NeedsGenerated>)>,\n\n)\n\n{\n\n for (ent, mut chunk, _) in query.iter_mut()\n\n {\n\n generate(&mut chunk, ent, &mut commands);\n\n }\n\n}\n", "file_path": "src/generation/chunk_generator.rs", "rank": 10, "score": 33754.82913470107 }, { "content": "fn add_people(mut commands: Commands)\n\n{\n\n commands\n\n .spawn()\n\n .insert(Person)\n\n .insert(Name(\"Joe\".to_string()));\n\n commands\n\n .spawn()\n\n .insert(Person)\n\n .insert(Name(\"Mike\".to_string()));\n\n commands\n\n .spawn()\n\n .insert(Person)\n\n .insert(Name(\"Chad\".to_string()));\n\n}\n\n\n", "file_path": "src/epic_plugin.rs", "rank": 12, "score": 29391.981272077217 }, { "content": "#[inline]\n\nfn flatten(x: u16, y: u16, z: u16) -> usize\n\n{\n\n (x + WIDTH * (y + HEIGHT * z)) as usize\n\n}\n\n\n\nimpl Chunk\n\n{\n\n pub fn new(x: i32, y: i32, z: i32) -> Self\n\n {\n\n Chunk {\n\n blocks: [blocks::BLOCK_AIR.id; (WIDTH * HEIGHT * LENGTH) as usize],\n\n neighbors: [None, None, None, None, None, None], // does nothing yet\n\n x,\n\n y,\n\n z,\n\n }\n\n }\n\n\n\n pub fn set_neighbor(&mut self, neighbor_position: usize, neighbor: Option<Box<Chunk>>)\n\n {\n", "file_path": "src/chunk.rs", "rank": 13, "score": 27481.04349631616 }, { "content": "fn create_colliders(chunk: &Chunk) -> Vec<(Isometry<Real>, SharedShape)>\n\n{\n\n let mut colliders: Vec<(Isometry<Real>, SharedShape)> = Vec::new();\n\n\n\n let mut start_x: f32 = 0.0;\n\n let mut start_y: f32 = 0.0;\n\n let mut start_z: f32 = 0.0;\n\n\n\n let mut count: u16 = 0;\n\n\n\n for z in 0..LENGTH\n\n {\n\n for y in 0..HEIGHT\n\n {\n\n for x in 0..WIDTH\n\n {\n\n if chunk.has_block_u16(x, y, z)\n\n {\n\n if count == 0\n\n {\n", "file_path": "src/chunk_mesh_updater.rs", "rank": 14, "score": 24124.460278046143 }, { "content": "fn generate(chunk: &mut Chunk, entity: Entity, commands: &mut Commands)\n\n{\n\n commands.entity(entity).remove::<NeedsGenerated>();\n\n\n\n let perlin = Perlin::new();\n\n\n\n for z in 0..LENGTH\n\n {\n\n for x in 0..WIDTH\n\n {\n\n let max_y = HEIGHT as i32 - 3\n\n + f64::round(\n\n 3.0 * perlin.get([\n\n (x as i32 + chunk.x) as f64 * 0.1,\n\n (z as i32 + chunk.z) as f64 * 0.1,\n\n ]),\n\n ) as i32;\n\n\n\n for y in 0..max_y\n\n {\n", "file_path": "src/generation/chunk_generator.rs", "rank": 15, "score": 23109.72959686053 }, { "content": "fn greet_people(time: Res<Time>, mut timer: ResMut<GreetTimer>, query: Query<&Name, With<Person>>)\n\n{\n\n if timer.0.tick(time.delta()).just_finished()\n\n {\n\n for name in query.iter()\n\n {\n\n println!(\"Hello, {}\", name.0);\n\n }\n\n }\n\n}\n\n\n\nimpl Plugin for HelloPlugin\n\n{\n\n fn build(&self, app: &mut App)\n\n {\n\n app.insert_resource(GreetTimer(Timer::from_seconds(2.0, true)))\n\n .add_startup_system(add_people)\n\n .add_system(greet_people);\n\n // .add_plugin(EditorPlugin);\n\n }\n\n}\n", "file_path": "src/epic_plugin.rs", "rank": 16, "score": 19438.745536563292 }, { "content": "use crate::blocks;\n\nuse crate::blocks::block;\n\nuse crate::blocks::BLOCK_AIR;\n\nuse bevy::prelude::*;\n\nuse std::mem;\n\n\n\npub const WIDTH: u16 = 16;\n\npub const HEIGHT: u16 = 16;\n\npub const LENGTH: u16 = 16;\n\n\n\n#[derive(Component)]\n\npub struct Dirty;\n\n\n\n#[derive(Component)]\n\npub struct NeedsGenerated;\n\n\n\n#[derive(Component)]\n\npub struct Chunk\n\n{\n\n blocks: [block::BlockID; (WIDTH * HEIGHT * LENGTH) as usize],\n\n neighbors: [Option<Box<Chunk>>; 6], // does nothing yet\n\n\n\n pub x: i32,\n\n pub y: i32,\n\n pub z: i32,\n\n}\n\n\n\n#[inline]\n", "file_path": "src/chunk.rs", "rank": 19, "score": 6.121209246141406 }, { "content": "use crate::blocks::block::Block;\n\nuse crate::blocks::{BLOCK_DIRT, BLOCK_GRASS, BLOCK_STONE};\n\nuse crate::chunk::{HEIGHT, LENGTH, WIDTH};\n\nuse crate::{Chunk, Commands, Entity, NeedsGenerated};\n\nuse bevy::prelude::*;\n\nuse noise::{NoiseFn, Perlin};\n\n\n\npub struct ChunkGeneratorPlugin;\n\n\n\nimpl Plugin for ChunkGeneratorPlugin\n\n{\n\n fn build(&self, app: &mut App)\n\n {\n\n app.add_system(chunk_generation_system);\n\n }\n\n}\n\n\n", "file_path": "src/generation/chunk_generator.rs", "rank": 23, "score": 4.828730291347133 }, { "content": " x < WIDTH && y < HEIGHT && z < LENGTH\n\n }\n\n\n\n #[inline]\n\n pub fn within_blocks_i16(&self, x: i16, y: i16, z: i16) -> bool\n\n {\n\n x >= 0 && x < WIDTH as i16 && y >= 0 && y < HEIGHT as i16 && z >= 0 && z < LENGTH as i16\n\n }\n\n\n\n #[inline]\n\n pub fn block_at(&self, x: u16, y: u16, z: u16) -> &'static block::Block\n\n {\n\n blocks::from_id(self.blocks[flatten(x, y, z)])\n\n }\n\n\n\n #[inline]\n\n pub fn set_block(\n\n &mut self,\n\n x: u16,\n\n y: u16,\n", "file_path": "src/chunk.rs", "rank": 24, "score": 4.6409343444368645 }, { "content": "use bevy::prelude::*;\n\n\n\n#[derive(Component)]\n\npub struct Structure {}\n", "file_path": "src/structure/structure.rs", "rank": 26, "score": 4.553109715591489 }, { "content": "use bevy::prelude::*;\n\n\n\npub struct HelloPlugin;\n\n\n\n#[derive(Component)]\n", "file_path": "src/epic_plugin.rs", "rank": 28, "score": 4.444845547981652 }, { "content": " self.neighbors[neighbor_position] = neighbor;\n\n }\n\n\n\n #[inline]\n\n pub fn has_block_u16(&self, x: u16, y: u16, z: u16) -> bool\n\n {\n\n self.within_blocks_u16(x, y, z)\n\n && self.blocks[flatten(x as u16, y as u16, z as u16)] != BLOCK_AIR.id\n\n }\n\n\n\n #[inline]\n\n pub fn has_block_i16(&self, x: i16, y: i16, z: i16) -> bool\n\n {\n\n self.within_blocks_i16(x, y, z)\n\n && self.blocks[flatten(x as u16, y as u16, z as u16)] != BLOCK_AIR.id\n\n }\n\n\n\n #[inline]\n\n pub fn within_blocks_u16(&self, x: u16, y: u16, z: u16) -> bool\n\n {\n", "file_path": "src/chunk.rs", "rank": 31, "score": 4.201991388998101 }, { "content": "use crate::base_renderable::CanCreateMesh;\n\nuse crate::chunk::Dirty;\n\nuse crate::{Chunk, WIDTH, HEIGHT, LENGTH};\n\nuse bevy::ecs::component::{ComponentId, ComponentInfo};\n\nuse bevy::prelude::*;\n\nuse bevy::render::primitives::Aabb;\n\nuse bevy_rapier3d::prelude::*;\n\n\n\npub struct ChunkMeshUpdaterPlugin;\n\n\n", "file_path": "src/chunk_mesh_updater.rs", "rank": 32, "score": 4.065233335302656 }, { "content": "use crate::block_renderer::EmptyMeshCreator;\n\nuse crate::blocks::block::Block;\n\n\n\npub struct AirBlockCreator;\n\n\n\nimpl AirBlockCreator\n\n{\n\n pub fn create(id: u16) -> Block\n\n {\n\n Block {\n\n id,\n\n mesh_creator: Box::new(EmptyMeshCreator::new()),\n\n }\n\n }\n\n}\n", "file_path": "src/blocks/air_block.rs", "rank": 33, "score": 3.9249607463490905 }, { "content": "use crate::block_renderer;\n\nuse crate::block_renderer::DefaultBlockMeshCreator;\n\nuse crate::blocks::block::{Block, Side};\n\n\n\npub struct StoneBlockCreator;\n\n\n\nimpl StoneBlockCreator\n\n{\n\n pub fn create(id: u16) -> Block\n\n {\n\n Block {\n\n id,\n\n mesh_creator: Box::new(DefaultBlockMeshCreator::new(StoneUVs {})),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/blocks/stone_block.rs", "rank": 34, "score": 3.916209104295676 }, { "content": "use crate::block_renderer;\n\nuse crate::block_renderer::DefaultBlockMeshCreator;\n\nuse crate::blocks::block::{Block, Side};\n\n\n\npub struct DirtBlockCreator;\n\n\n\nimpl DirtBlockCreator\n\n{\n\n pub fn create(id: u16) -> Block\n\n {\n\n Block {\n\n id,\n\n mesh_creator: Box::new(DefaultBlockMeshCreator::new(DirtUVs {})),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/blocks/dirt_block.rs", "rank": 35, "score": 3.916209104295676 }, { "content": "use crate::block_renderer;\n\nuse crate::block_renderer::DefaultBlockMeshCreator;\n\nuse crate::blocks::block::{Block, Side};\n\n\n\npub struct GrassBlockCreator;\n\n\n\nimpl GrassBlockCreator\n\n{\n\n pub fn create(id: u16) -> Block\n\n {\n\n Block {\n\n id,\n\n mesh_creator: Box::new(DefaultBlockMeshCreator::new(GrassBlockUVs {})),\n\n }\n\n }\n\n}\n\n\n", "file_path": "src/blocks/grass_block.rs", "rank": 36, "score": 3.883400737981785 }, { "content": "use crate::base_renderable::CanCreateSubMesh;\n\n\n\npub type BlockID = u16;\n\n\n\npub struct Block\n\n{\n\n pub id: BlockID,\n\n\n\n pub mesh_creator: Box<dyn CanCreateSubMesh + Send + Sync + 'static>,\n\n}\n\n\n\nimpl PartialEq for Block\n\n{\n\n fn eq(&self, other: &Self) -> bool\n\n {\n\n self.id == other.id\n\n }\n\n}\n\n\n\n#[derive(Clone, Copy)]\n", "file_path": "src/blocks/block.rs", "rank": 37, "score": 3.622926792717706 }, { "content": "use crate::blocks::air_block::AirBlockCreator;\n\nuse crate::blocks::block::{Block, BlockID};\n\nuse crate::blocks::dirt_block::DirtBlockCreator;\n\nuse crate::blocks::grass_block::GrassBlockCreator;\n\nuse crate::blocks::stone_block::StoneBlockCreator;\n\n\n\nmod air_block;\n\npub mod block;\n\nmod dirt_block;\n\nmod grass_block;\n\nmod stone_block;\n\n//\n\n// static mut MASTER_BLOCK_ID: BlockID = 0;\n\n//\n\n// // will only ever be called on the same thread. Therefore, it is assumed that it is completely safe\n\n// fn next_id() -> BlockID\n\n// {\n\n// unsafe {\n\n// let res = MASTER_BLOCK_ID;\n\n//\n", "file_path": "src/blocks/mod.rs", "rank": 39, "score": 3.015382777541781 }, { "content": " start_x = x as f32 - WIDTH as f32 / 2.0;\n\n start_y = y as f32 - HEIGHT as f32 / 2.0;\n\n start_z = z as f32 - LENGTH as f32 / 2.0;\n\n }\n\n count += 1;\n\n }\n\n else if count != 0\n\n {\n\n add_collider(start_x, start_y, start_z, count, &mut colliders);\n\n\n\n count = 0;\n\n }\n\n }\n\n }\n\n }\n\n\n\n if count != 0\n\n {\n\n add_collider(start_x, start_y, start_z, count, &mut colliders);\n\n }\n\n\n\n colliders\n\n}\n\n\n", "file_path": "src/chunk_mesh_updater.rs", "rank": 40, "score": 2.887758747562651 }, { "content": "use crate::base_renderable::{CanCreateMesh, CanCreateSubMesh};\n\nuse crate::chunk;\n\nuse bevy::prelude::*;\n\nuse bevy::render::mesh::Indices;\n\nuse bevy::render::render_resource::PrimitiveTopology;\n\n\n", "file_path": "src/chunk_renderer.rs", "rank": 41, "score": 2.839414982406128 }, { "content": "#[macro_use]\n\nextern crate lazy_static;\n\n\n\nmod blocks;\n\nmod chunk;\n\nmod structure;\n\n\n\nmod base_renderable;\n\nmod block_renderer;\n\nmod chunk_renderer;\n\n\n\nmod chunk_mesh_updater;\n\nmod epic_plugin;\n\nmod generation;\n\n\n\nuse crate::base_renderable::CanCreateMesh;\n\nuse crate::chunk::{Chunk, HEIGHT, LENGTH, NeedsGenerated, WIDTH};\n\nuse crate::generation::chunk_generator;\n\nuse crate::structure::structure::Structure;\n\nuse bevy::ecs::component::ComponentInfo;\n\nuse bevy::input::mouse::MouseMotion;\n\nuse bevy::prelude::*;\n\nuse bevy_inspector_egui::WorldInspectorPlugin;\n\nuse std::ops::Mul;\n\nuse bevy_rapier3d::prelude::*;\n\nuse rand::random;\n\nuse crate::KeyCode::D;\n\nuse crate::nalgebra::Isometry3;\n\n\n", "file_path": "src/main.rs", "rank": 42, "score": 2.8234708576213423 }, { "content": "use bevy::prelude::*;\n\n\n", "file_path": "src/base_renderable.rs", "rank": 43, "score": 2.5722067451043866 }, { "content": " let chunk = Chunk::new(x * 16, 0, z * 16);\n\n\n\n let xf: f32 = x as f32 * 16.0;\n\n let yf: f32 = 0.0;\n\n let zf: f32 = z as f32 * 16.0;\n\n\n\n commands\n\n .spawn_bundle(PbrBundle {\n\n mesh: meshes.add(chunk.create_mesh()),\n\n material: material_handle.clone(),\n\n transform: Transform {\n\n translation: Vec3::new(xf, yf, zf),\n\n ..Default::default()\n\n },\n\n ..Default::default()\n\n })\n\n .insert(chunk)\n\n .insert(NeedsGenerated {})\n\n .insert_bundle(RigidBodyBundle {\n\n position: [xf, yf, zf].into(),\n", "file_path": "src/main.rs", "rank": 44, "score": 2.497212607967411 }, { "content": "pub mod structure;\n", "file_path": "src/structure/mod.rs", "rank": 45, "score": 2.472393074718868 }, { "content": "pub mod chunk_generator;\n", "file_path": "src/generation/mod.rs", "rank": 46, "score": 2.4074250815145763 }, { "content": " for z in 0..chunk::LENGTH\n\n {\n\n for y in 0..chunk::HEIGHT\n\n {\n\n for x in 0..chunk::WIDTH\n\n {\n\n if !self.has_block_u16(x, y, z)\n\n {\n\n continue;\n\n }\n\n\n\n let here = &self.block_at(x, y, z).mesh_creator;\n\n let location = Vec3::new(x as f32, y as f32, z as f32);\n\n\n\n if !self.has_block_i16(x as i16 - 1, y as i16, z as i16)\n\n {\n\n let indices = here.left_indices();\n\n let mesh_data = here.left_mesh_data(&location);\n\n\n\n cur_index = apply_info(\n", "file_path": "src/chunk_renderer.rs", "rank": 47, "score": 2.335211463386047 }, { "content": "// MASTER_BLOCK_ID += 1;\n\n//\n\n// res\n\n// }\n\n// }\n\n\n\nlazy_static! {\n\n pub static ref BLOCK_AIR: Block = AirBlockCreator::create(0);\n\n pub static ref BLOCK_STONE: Block = StoneBlockCreator::create(1);\n\n pub static ref BLOCK_GRASS: Block = GrassBlockCreator::create(2);\n\n pub static ref BLOCK_DIRT: Block = DirtBlockCreator::create(3);\n\n pub static ref BLOCKS: [&'static block::Block; 4] =\n\n [&BLOCK_AIR, &BLOCK_STONE, &BLOCK_GRASS, &BLOCK_DIRT];\n\n}\n\n\n", "file_path": "src/blocks/mod.rs", "rank": 48, "score": 2.292209690448357 }, { "content": " velocity: RigidBodyVelocity {\n\n linvel: [rand::random::<f32>() * 6.0 - 3.0,\n\n rand::random::<f32>() * 6.0 - 3.0,\n\n rand::random::<f32>() * 6.0 - 3.0].into(),\n\n angvel: [0.0, 0.0, 0.0].into()\n\n }.into(),\n\n ..Default::default()\n\n })\n\n .insert(ColliderPositionSync::Discrete); // Updates Bevy's transform w/ rapier's transform\n\n }\n\n }\n\n\n\n commands.spawn_bundle(PointLightBundle {\n\n point_light: PointLight {\n\n intensity: 10240.0,\n\n shadows_enabled: true,\n\n color: Color::Rgba {\n\n red: 1.0,\n\n blue: 1.0,\n\n green: 1.0,\n", "file_path": "src/main.rs", "rank": 49, "score": 2.1427230941300923 }, { "content": " positions.push(*position);\n\n normals.push(*normal);\n\n uvs.push(*uv);\n\n }\n\n\n\n next_max_index\n\n}\n\n\n\nimpl CanCreateMesh for chunk::Chunk\n\n{\n\n fn create_mesh(&self) -> Mesh\n\n {\n\n let mut positions: Vec<[f32; 3]> = Vec::new();\n\n let mut normals: Vec<[f32; 3]> = Vec::new();\n\n let mut uvs: Vec<[f32; 2]> = Vec::new();\n\n\n\n let mut indices_congreg = Vec::new();\n\n\n\n let mut cur_index: i32 = -1;\n\n\n", "file_path": "src/chunk_renderer.rs", "rank": 50, "score": 2.126785990922107 }, { "content": "\n\n transform.rotation = temp.mul(transform.rotation).normalize();\n\n }\n\n\n\n transform.translation.x +=\n\n (keys.pressed(KeyCode::D) as i32 - keys.pressed(KeyCode::A) as i32) as f32 * speed;\n\n transform.translation.y +=\n\n (keys.pressed(KeyCode::E) as i32 - keys.pressed(KeyCode::Q) as i32) as f32 * speed;\n\n transform.translation.z +=\n\n (keys.pressed(KeyCode::S) as i32 - keys.pressed(KeyCode::W) as i32) as f32 * speed;\n\n}\n\n\n", "file_path": "src/main.rs", "rank": 51, "score": 2.0359292274577943 }, { "content": "pub enum Side\n\n{\n\n LEFT,\n\n RIGHT,\n\n TOP,\n\n BOTTOM,\n\n FRONT,\n\n BACK,\n\n}\n\n\n\nimpl Block\n\n{\n\n pub fn new(id: u16, mesh_creator: Box<dyn CanCreateSubMesh + Send + Sync + 'static>) -> Self\n\n {\n\n Block { id, mesh_creator }\n\n }\n\n}\n", "file_path": "src/blocks/block.rs", "rank": 52, "score": 2.030221161123851 }, { "content": " cur_index,\n\n &mesh_data,\n\n &indices,\n\n &mut positions,\n\n &mut normals,\n\n &mut uvs,\n\n &mut indices_congreg,\n\n );\n\n }\n\n }\n\n }\n\n }\n\n\n\n let indices = Indices::U32(indices_congreg);\n\n\n\n let mut mesh = Mesh::new(PrimitiveTopology::TriangleList);\n\n mesh.set_attribute(Mesh::ATTRIBUTE_POSITION, positions);\n\n mesh.set_attribute(Mesh::ATTRIBUTE_NORMAL, normals);\n\n mesh.set_attribute(Mesh::ATTRIBUTE_UV_0, uvs);\n\n mesh.set_indices(Some(indices));\n\n mesh\n\n }\n\n}\n", "file_path": "src/chunk_renderer.rs", "rank": 53, "score": 1.125043308305091 } ]